From 4738d15b1d0bfebcd5f3adf7e06f6fb59ee7e2b1 Mon Sep 17 00:00:00 2001 From: flutistar Date: Thu, 19 Dec 2024 14:15:27 -0800 Subject: [PATCH 001/133] Replaced Minio with Document Record Service --- .../src/legal_api/resources/v2/document.py | 17 +++ legal-api/src/legal_api/services/__init__.py | 1 + .../src/legal_api/services/document_record.py | 126 ++++++++++++++++++ .../filings/validations/continuation_in.py | 6 +- 4 files changed, 145 insertions(+), 5 deletions(-) create mode 100644 legal-api/src/legal_api/services/document_record.py diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index 8d08f9af91..96ff6cda82 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -20,6 +20,7 @@ from legal_api.models import Document, Filing from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService from legal_api.utils.auth import jwt @@ -77,3 +78,19 @@ def get_minio_document(document_key: str): return jsonify( message=f'Error getting file {document_key}.' ), HTTPStatus.INTERNAL_SERVER_ERROR + +@bp.route('//', methods=['POST', 'OPTIONS']) +@cross_origin(origin='*') +@jwt.requires_auth +def upload_document(document_class: str, document_type: str): + """Upload document file to Document Record Service.""" + + return DocumentRecordService.upload_document(document_class, document_type), HTTPStatus.OK + +@bp.route('/drs/', methods=['DELETE']) +@cross_origin(origin='*') +@jwt.requires_auth +def delete_document(document_service_id: str): + """Delete document file from Document Record Service.""" + + return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK \ No newline at end of file diff --git a/legal-api/src/legal_api/services/__init__.py b/legal-api/src/legal_api/services/__init__.py index c2a44e2dc0..7f895c28a1 100644 --- a/legal-api/src/legal_api/services/__init__.py +++ b/legal-api/src/legal_api/services/__init__.py @@ -29,6 +29,7 @@ from .furnishing_documents_service import FurnishingDocumentsService from .involuntary_dissolution import InvoluntaryDissolutionService from .minio import MinioService +from .document_record import DocumentRecordService from .mras_service import MrasService from .naics import NaicsService from .namex import NameXService diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py new file mode 100644 index 0000000000..25a7d30b32 --- /dev/null +++ b/legal-api/src/legal_api/services/document_record.py @@ -0,0 +1,126 @@ +# Copyright © 2021 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This module is a wrapper for Document Record Service.""" + +import base64 +from typing import Optional +import requests +from flask import current_app, request +from flask_babel import _ + +import PyPDF2 + + +BASE_URL = current_app.config.get('DRS_BASE_URL') +BASE_HEADERS = { + 'x-apikey': current_app.config.get('DRS_X_API_KEY'), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID'), +} + +class DocumentRecordService: + """Document Storage class.""" + + + @staticmethod + def upload_document(document_class: str, document_type: str) -> dict: + """Upload document to Docuemtn Record Service.""" + query_params = request.args.to_dict() + file = request.files.get('file') + # Ensure file exists + if not file: + current_app.logger.debug('No file found in request.') + return {'data': 'File not provided'} + current_app.logger.debug(f'Upload file to document record service {file.filename}') + url = f'{BASE_URL}documents/{document_class}/{document_type}' + + # Validate file size and encryption status before submitting to DRS. + validation_error = DocumentRecordService.validate_pdf(file, request.content_length) + if validation_error: + return { + 'error': validation_error + } + + file_content = file.read() + + try: + # Read and encode the file content as base64 + file_content = file.read() + file_base64 = base64.b64encode(file_content).decode('utf-8') + + response_body = requests.post( + url, + params=query_params, + json={ + 'filename': file.filename, + 'content': file_base64, + 'content_type': file.content_type, + }, + headers={ + **BASE_HEADERS, + 'Content-Type': 'application/pdf' + } + ).json() + + current_app.logger.debug(f'Upload file to document record service {response_body}') + return { + 'documentServiceId': response_body['documentServiceId'], + 'consumerDocumentId': response_body['consumerDocumentId'], + 'consumerFilename': response_body['consumerFilename'] + } + except Exception as e: + current_app.logger.debug(f"Error on uploading document {e}") + return {} + + @staticmethod + def delete_document(document_service_id: str) -> dict: + """Delete document from Document Record Service.""" + url = f'{BASE_URL}documents/{document_service_id}' + + try: + response = requests.patch( + url, json={ 'removed': True }, + headers=BASE_HEADERS + ).json() + current_app.logger.debug(f'Delete document from document record service {response}') + return response + except Exception as e: + current_app.logger.debug(f'Error on deleting document {e}') + return {} + + @staticmethod + def validate_pdf(file, content_length) -> Optional[list]: + """Validate the PDF file.""" + msg = [] + try: + pdf_reader = PyPDF2.PdfFileReader(file) + + # Check that all pages in the pdf are letter size and able to be processed. + if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): + msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), + 'path': file.filename}) + + if content_length > 30000000: + msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) + + if pdf_reader.isEncrypted: + msg.append({'error': _('File must be unencrypted.'), 'path': file.filename}) + + except Exception as e: + msg.append({'error': _('Invalid file.'), 'path': file.filename}) + current_app.logger.debug(e) + + if msg: + return msg + + return None diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 08ec8e0286..68925ede5d 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -51,7 +51,6 @@ def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many- return msg # Cannot continue validation without legal_type msg.extend(validate_business_in_colin(filing_json, filing_type)) - msg.extend(validate_continuation_in_authorization(filing_json, filing_type)) msg.extend(_validate_foreign_jurisdiction(filing_json, filing_type, legal_type)) msg.extend(validate_name_request(filing_json, legal_type, filing_type)) @@ -126,10 +125,7 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty foreign_jurisdiction['country'] == 'CA' and ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' - if file_key := foreign_jurisdiction.get('affidavitFileKey'): - if err := validate_pdf(file_key, affidavit_file_key_path, False): - msg.extend(err) - else: + if not foreign_jurisdiction.get('affidavitFileKey'): msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) try: # Check the incorporation date is in valid format From 22093c8ce0e4902939f883511be83cc1c2cdc41f Mon Sep 17 00:00:00 2001 From: flutistar Date: Thu, 19 Dec 2024 14:40:11 -0800 Subject: [PATCH 002/133] fixed issue on getting env variables --- legal-api/src/legal_api/config.py | 5 +++++ .../src/legal_api/services/document_record.py | 21 +++++++++---------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/legal-api/src/legal_api/config.py b/legal-api/src/legal_api/config.py index 0b35323e3a..2bde6f1aa7 100644 --- a/legal-api/src/legal_api/config.py +++ b/legal-api/src/legal_api/config.py @@ -182,6 +182,11 @@ class _Config(): # pylint: disable=too-few-public-methods STAGE_1_DELAY = int(os.getenv('STAGE_1_DELAY', '42')) STAGE_2_DELAY = int(os.getenv('STAGE_2_DELAY', '30')) + # Document Record Service Settings + DRS_BASE_URL = os.getenv('DRS_BASE_URL', '') + DRS_ACCOUNT_ID = os.getenv('DRS_ACCOUNT_ID', '') + DRS_X_API_KEY = os.getenv('DRS_X_API_KEY', '') + TESTING = False DEBUG = False diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 25a7d30b32..7c3872b694 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -21,13 +21,6 @@ import PyPDF2 - -BASE_URL = current_app.config.get('DRS_BASE_URL') -BASE_HEADERS = { - 'x-apikey': current_app.config.get('DRS_X_API_KEY'), - 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID'), -} - class DocumentRecordService: """Document Storage class.""" @@ -42,7 +35,8 @@ def upload_document(document_class: str, document_type: str) -> dict: current_app.logger.debug('No file found in request.') return {'data': 'File not provided'} current_app.logger.debug(f'Upload file to document record service {file.filename}') - url = f'{BASE_URL}documents/{document_class}/{document_type}' + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. validation_error = DocumentRecordService.validate_pdf(file, request.content_length) @@ -67,7 +61,8 @@ def upload_document(document_class: str, document_type: str) -> dict: 'content_type': file.content_type, }, headers={ - **BASE_HEADERS, + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), 'Content-Type': 'application/pdf' } ).json() @@ -85,12 +80,16 @@ def upload_document(document_class: str, document_type: str) -> dict: @staticmethod def delete_document(document_service_id: str) -> dict: """Delete document from Document Record Service.""" - url = f'{BASE_URL}documents/{document_service_id}' + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}documents/{document_service_id}' try: response = requests.patch( url, json={ 'removed': True }, - headers=BASE_HEADERS + headers={ + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + } ).json() current_app.logger.debug(f'Delete document from document record service {response}') return response From 9edd49d152edab74fe84223d286b4a2b2c35e565 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Fri, 20 Dec 2024 09:36:01 -0800 Subject: [PATCH 003/133] 24952 - Fix not allow to update NoW draft issue (#3139) * update logic to allow update NoW draft --- legal-api/src/legal_api/services/authz.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index 087236e898..0980e23598 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -504,7 +504,7 @@ def is_allowed(business: Business, else: is_ignore_draft_blockers = True - # Special case: handiling authorization for amalgamation application + # Special case: handling authorization for amalgamation application # this check is to make sure that amalgamation application is not allowed/authorized with continue in corps if filing_type == 'amalgamationApplication' and legal_type in ['C', 'CBEN', 'CUL', 'CCC']: return False @@ -746,7 +746,7 @@ def business_blocker_check(business: Business, is_ignore_draft_blockers: bool = if business.in_dissolution: business_blocker_checks[BusinessBlocker.IN_DISSOLUTION] = True - if has_notice_of_withdrawal_filing_blocker(business): + if has_notice_of_withdrawal_filing_blocker(business, is_ignore_draft_blockers): business_blocker_checks[BusinessBlocker.FILING_WITHDRAWAL] = True return business_blocker_checks @@ -872,15 +872,16 @@ def has_blocker_warning_filing(warnings: List, blocker_checks: dict): return warning_matches -def has_notice_of_withdrawal_filing_blocker(business: Business): +def has_notice_of_withdrawal_filing_blocker(business: Business, is_ignore_draft_blockers: bool = False): """Check if there are any blockers specific to Notice of Withdrawal.""" if business.admin_freeze: return True - filing_statuses = [Filing.Status.DRAFT.value, - Filing.Status.PENDING.value, + filing_statuses = [Filing.Status.PENDING.value, Filing.Status.PENDING_CORRECTION.value, Filing.Status.ERROR.value] + if not is_ignore_draft_blockers: + filing_statuses.append(Filing.Status.DRAFT.value) blocker_filing_matches = Filing.get_filings_by_status(business.id, filing_statuses) if any(blocker_filing_matches): return True From d69c7dc97929aacdadebceb2673c7a88c2cba1b7 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Mon, 23 Dec 2024 09:28:10 -0800 Subject: [PATCH 004/133] 22169 - api specs notice of withdrawal (#3136) * add notice of withdrawal to api specs * update filing_header * add request examples including options --- docs/business.yaml | 113 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 112 insertions(+), 1 deletion(-) diff --git a/docs/business.yaml b/docs/business.yaml index 423df3f80c..b6e30f18ba 100644 --- a/docs/business.yaml +++ b/docs/business.yaml @@ -22,6 +22,7 @@ info: - Consent Continuation Out - Conversion - Dissolution + - Notice of Withdrawal - Registration (Sole Proprietorship, General Partnership) - Special Resolution @@ -696,6 +697,35 @@ paths: paymentToken: '12345' status: 'PENDING' submitter: 'mocked submitter' + notice-of-withdrawal-success-response: + summary: Notice of Withdrawal Response + value: + filing: + business: + foundingDate: '2023-07-12T17:31:58.000+00:00' + identifier: BC1234567 + legalName: 1234567 B.C. LTD. + legalType: BC + header: + affectedFilings: [] + availableOnPaperOnly: false + certifiedBy: Sample Certified Person + colinIds: [] + comments: [] + data: '2024-12-18T00:10:47.042797+00:00' + email: api.specs@example.com + filingId: 654321 + inColinOnly: false + isCorrected: false + isCorrectionPending: false + isPaymentActionRequired: false + name: noticeOfWithdrawal + paymentStatusCode: 'APPROVED' + paymentToken: '12345' + status: 'PENDING' + submitter: 'mocked submitter' + noticeOfWithdrawal: + filingId: 123456 voluntary-dissolution-success-response: summary: Voluntary Dissolution Response value: @@ -870,6 +900,12 @@ paths: value: errorMessage: API backend third party service error. rootCause: errors:[error:Can't have new consent for same jurisdiction if an unexpired one already exists,path:/filing/consentContinuationOut/foreignJurisdiction],filing:business:foundingDate:2024-07-08T15:34:57.844764+00:00,identifier:BC0882848,legalName:0882848 B.C. LTD.,legalType:BEN,consentContinuationOut:courtOrder:effectOfOrder:planOfArrangement,fileNumber:12345,foreignJurisdiction:country:CA,region:AB,header:availableOnPaperOnly:false,certifiedBy:Api specs,date:2024-07-10,documentOptionalEmail:Apispecs@email.com,email:Apispecs@gov.bc.ca,inColinOnly:false,name:consentContinuationOut + notice-of-withdrawal-failed-withdrawn-filing-issues-response: + summary: Notice of Withdrawal - invalid withdrawn filing + value: + errors: + - error: Only filings with a future effective date can be withdrawn. + - error: Only paid filings with a future effective date can be withdrawn. voluntary-dissolution-failed-missing-filing-name-response: summary: Voluntary Dissolution - Missing Filing Name Response value: @@ -884,7 +920,21 @@ paths: summary: Consent Continuation Out - Unauthorized Response value: errorMessage: API backend third party service error. - rootCause: message:You are not authorized to submit a filing for BC1218840. + rootCause: message:You are not authorized to submit a filing for BC1218840. + notice-of-withdrawal-failed-not-staff-response: + summary: Notice of Withdrawal - Not a staff + value: + message: You are not authorized to submit a filing for BC1234567. + '404': + description: Cannot found, when a value cannot be found in the records + content: + application/json: + examples: + notice-of-withdrawal-failed-invalid-filing-id-response: + summary: Notice of Withdrawal - withdrawn filing cannot be found + value: + errors: + - error: The filing to be withdrawn cannot be found. '422': description: UNPROCESSABLE ENTITY, in many cases caused by missing one or more required field(s) content: @@ -1436,6 +1486,43 @@ paths: courtOrder: fileNumber: '12345' effectOfOrder: planOfArrangement + notice-of-withdrawal-request: + summary: Notice of Withdrawal Request + value: + filing: + header: + name: noticeOfWithdrawal + certifiedBy: Sample Certified Person + email: api.specs@example.com + date: '2024-12-18' + priority: false + business: + foundingDate: '2023-07-12T17:31:58.000+00:00' + identifier: BC1234567 + legalName: 1234567 B.C. LTD. + legalType: BC + noticeOfWithdrawal: + filingId: 123456 + notice-of-withdrawal-with-options-request: + summary: Notice of Withdrawal Request with Options + value: + filing: + header: + name: noticeOfWithdrawal + certifiedBy: Sample Certified Person + email: api.specs@example.com + date: '2024-12-18' + priority: false + business: + foundingDate: '2023-07-12T17:31:58.000+00:00' + identifier: BC1234567 + legalName: 1234567 B.C. LTD. + legalType: BC + noticeOfWithdrawal: + filingId: 123456 + courtOrder: + fileNumber: "A12345" + effectOfOrder: planOfArrangement voluntary-dissolution-request: summary: Voluntary Dissolution Request value: @@ -5325,6 +5412,7 @@ components: - dissolution - dissolved - incorporationApplication + - noticeOfWithdrawal - putBackOn - registration - restoration @@ -5677,6 +5765,7 @@ components: - $ref: '#/components/schemas/Correction' - $ref: '#/components/schemas/Dissolution' - $ref: '#/components/schemas/Incorporation_application' + - $ref: '#/components/schemas/Notice_of_withdrawal' - $ref: '#/components/schemas/Registrars_notation' - $ref: '#/components/schemas/Registrars_order' - $ref: '#/components/schemas/Registration' @@ -6248,6 +6337,28 @@ components: - DBA required: - name + Notice_of_withdrawal: + type: object + title: Notice of Withdrawal Filing + description: Filing to withdraw future effective filings. + required: + - noticeOfWithdrawal + properties: + noticeOfWithdrawal: + type: object + description: This section contains all the information to withdraw a future effective filing. + required: + - filingId + properties: + filingId: + type: integer + title: ID for the future effective filing + courtOrder: + $ref: '#/components/schemas/Court_order' + x-examples: + Example 1: + noticeOfWithdrawal: + filingId: 123456 Office: title: Office Schema type: object From 1a6862925350f5dcec3fe2e273766b951c383f23 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Mon, 23 Dec 2024 16:35:11 -0800 Subject: [PATCH 005/133] 25064 skip if nameTranslations prop not exist (#3149) --- colin-api/src/colin_api/models/filing.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index 9b687b164b..dbc48c097b 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -1744,6 +1744,9 @@ def _process_share_structure(cls, cursor, filing: Filing, corp_num: str): @classmethod def _process_name_translations(cls, cursor, filing: Filing, corp_num: str): """Process name translations.""" + if 'nameTranslations' not in filing.body: + return + name_translations = filing.body.get('nameTranslations', []) old_translations = CorpName.get_current_by_type( cursor=cursor, From 71abcb923addc0ec1dc246153774d62dfc83e0ed Mon Sep 17 00:00:00 2001 From: EasonPan Date: Tue, 24 Dec 2024 13:10:12 -0800 Subject: [PATCH 006/133] 23350 - notice of withdrawal outputs - focus on NoW for existing businesses (#3148) * create template for NoW and format it * update business details for NoW * update receipt data for NoW --- .../report-templates/noticeOfWithdrawal.html | 33 +++++++ .../common/businessDetails.html | 26 ++++- .../template-parts/common/style.html | 9 ++ .../recordToBeWithdrawn.html | 17 ++++ legal-api/src/legal_api/reports/report.py | 21 +++- .../business_filings/business_documents.py | 3 +- legal-api/tests/unit/reports/test_report.py | 97 ++++++++++++++++++- 7 files changed, 201 insertions(+), 5 deletions(-) create mode 100644 legal-api/report-templates/noticeOfWithdrawal.html create mode 100644 legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html diff --git a/legal-api/report-templates/noticeOfWithdrawal.html b/legal-api/report-templates/noticeOfWithdrawal.html new file mode 100644 index 0000000000..59aaa17815 --- /dev/null +++ b/legal-api/report-templates/noticeOfWithdrawal.html @@ -0,0 +1,33 @@ +[[macros.html]] + + + + Notice of Withdrawal + + + [[common/style.html]] + + +
+ + + + + + +
+ +
+
+ [[common/businessDetails.html]] +
NOTICE OF WITHDRAWAL
+
+ [[notice-of-withdrawal/recordToBeWithdrawn.html]] +
+ + \ No newline at end of file diff --git a/legal-api/report-templates/template-parts/common/businessDetails.html b/legal-api/report-templates/template-parts/common/businessDetails.html index f26c60eaac..03b0561663 100644 --- a/legal-api/report-templates/template-parts/common/businessDetails.html +++ b/legal-api/report-templates/template-parts/common/businessDetails.html @@ -328,6 +328,28 @@ {% endif %}
{{report_date_time}}
+ {% elif header.name == 'noticeOfWithdrawal' %} + + {% if not business or business.identifier.startswith('T') %} +
Filed Date and Time:
+ {% else %} +
Incorporation Number:
+
Filed Date and Time:
+ {% endif %} +
Recognition Date and Time:
+
Retrieved Date and Time:
+ + + {% if not business or business.identifier.startswith('T') %} +
{{ filing_date_time }}
+
{{ effective_date_time }}
+ {% else %} +
{{business.identifier}}
+
{{ filing_date_time }}
+
{{ recognition_date_time }}
+ {% endif %} +
{{ report_date_time }}
+ {% endif %} {% if reportType != 'summary' %} @@ -359,8 +381,8 @@ {% if business.legalType in ['SP', 'GP'] %} | Registration #{{business.identifier}} {% else %} - {% if header.name != 'incorporationApplication' %} - + {% if header.name not in ['incorporationApplication', 'noticeOfWithdrawal'] %} + | Incorporation # {% if not business or business.identifier.startswith('T') %} Pending diff --git a/legal-api/report-templates/template-parts/common/style.html b/legal-api/report-templates/template-parts/common/style.html index 08c373d572..26820ee0c6 100644 --- a/legal-api/report-templates/template-parts/common/style.html +++ b/legal-api/report-templates/template-parts/common/style.html @@ -329,6 +329,15 @@ text-align: center } + .record-to-be-withdrawn-table { + width: 100%; + font-family: 'BCSans-Regular', sans-serif !important; + color: #313132; + font-size: 13px; + line-height: 16px; + text-align: left; + } + .share-structure-table { width: 100%; border-collapse: collapse; diff --git a/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html b/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html new file mode 100644 index 0000000000..179f393538 --- /dev/null +++ b/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html @@ -0,0 +1,17 @@ +
+
Record to be Withdrawn
+ + + + + + +
\ No newline at end of file diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index facc8af50c..690a963c6a 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -24,7 +24,7 @@ from dateutil.relativedelta import relativedelta from flask import current_app, jsonify -from legal_api.core.meta.filing import FILINGS +from legal_api.core.meta.filing import FILINGS, FilingMeta from legal_api.models import ( AmalgamatingBusiness, Amalgamation, @@ -174,6 +174,7 @@ def _substitute_template_parts(template_code): 'change-of-registration/addresses', 'change-of-registration/proprietor', 'change-of-registration/partner', + 'notice-of-withdrawal/recordToBeWithdrawn', 'incorporation-application/benefitCompanyStmt', 'incorporation-application/completingParty', 'incorporation-application/effectiveDate', @@ -306,6 +307,8 @@ def _format_filing_json(self, filing): # pylint: disable=too-many-branches, too self._format_continuation_in_data(filing) elif self._report_key == 'certificateOfContinuation': self._format_certificate_of_continuation_in_data(filing) + elif self._report_key == 'noticeOfWithdrawal': + self._format_notice_of_withdrawal_data(filing) else: # set registered office address from either the COA filing or status quo data in AR filing with suppress(KeyError): @@ -748,6 +751,18 @@ def _format_amalgamation_data(self, filing): def _format_certificate_of_amalgamation_data(self, filing): self._set_amalgamating_businesses(filing) + def _format_notice_of_withdrawal_data(self, filing): + withdrawn_filing_id = filing['noticeOfWithdrawal']['filingId'] + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + formatted_withdrawn_filing_type = FilingMeta.get_display_name( + withdrawn_filing.filing_json['filing']['business']['legalType'], + withdrawn_filing.filing_type, + withdrawn_filing.filing_sub_type + ) + filing['withdrawnFilingType'] = formatted_withdrawn_filing_type + withdrawn_filing_date = LegislationDatetime.as_legislation_timezone(withdrawn_filing.effective_date) + filing['withdrawnFilingEffectiveDate'] = LegislationDatetime.format_as_report_string(withdrawn_filing_date) + def _set_amalgamating_businesses(self, filing): amalgamating_businesses = [] business_legal_name = None @@ -1460,6 +1475,10 @@ class ReportMeta: # pylint: disable=too-few-public-methods 'certificateOfContinuation': { 'filingDescription': 'Certificate of Continuation', 'fileName': 'certificateOfContinuation' + }, + 'noticeOfWithdrawal': { + 'filingDescription': 'Notice of Withdrawal', + 'fileName': 'noticeOfWithdrawal' } } diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py index 5b62e18a3e..59ce9c1937 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py @@ -110,7 +110,8 @@ def _get_receipt(business: Business, filing: Filing, token): return {}, HTTPStatus.BAD_REQUEST effective_date = None - if filing.storage.effective_date.date() != filing.storage.filing_date.date(): + if filing.storage.effective_date.date() != filing.storage.filing_date.date() \ + or filing.filing_type == 'noticeOfWithdrawal': effective_date = LegislationDatetime.format_as_report_string(filing.storage.effective_date) headers = {'Authorization': 'Bearer ' + token} diff --git a/legal-api/tests/unit/reports/test_report.py b/legal-api/tests/unit/reports/test_report.py index 5ac9632db2..05652094df 100644 --- a/legal-api/tests/unit/reports/test_report.py +++ b/legal-api/tests/unit/reports/test_report.py @@ -15,12 +15,14 @@ """Test-Suite to ensure that the Report class is working as expected.""" import copy from contextlib import suppress +from datetime import datetime, timedelta from pathlib import Path from unittest.mock import patch import pytest from flask import current_app from registry_schemas.example_data import ( + AGM_LOCATION_CHANGE, ALTERATION_FILING_TEMPLATE, ANNUAL_REPORT, CHANGE_OF_ADDRESS, @@ -31,6 +33,8 @@ CORRECTION_COMBINED_AR, DISSOLUTION, FILING_HEADER, + NOTICE_OF_WITHDRAWAL, + RESTORATION, INCORPORATION_FILING_TEMPLATE, SPECIAL_RESOLUTION, TRANSITION_FILING_TEMPLATE, @@ -40,7 +44,8 @@ from legal_api.models.db import versioning_manager from legal_api.reports.report import Report # noqa:I001 from legal_api.services import VersionedBusinessDetailsService # noqa:I001 -from tests.unit.models import factory_business, factory_completed_filing # noqa:E501,I001 +from legal_api.utils.legislation_datetime import LegislationDatetime +from tests.unit.models import factory_business, factory_completed_filing, factory_pending_filing # noqa:E501,I001 def create_report(identifier, entity_type, report_type, filing_type, template): @@ -293,3 +298,93 @@ def create_alteration_report(filing, business, report_type): set_registrar_info(report) set_meta_info(report) return report + + +@pytest.mark.parametrize( + 'test_name, identifier, entity_type, filing_template, filing_type, formatted_filing_type', + [ + ('BC agmLocationChange', 'BC1234567', 'BC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('BC alteration', 'BC1234567', 'BC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('BC changeOfAddress', 'BC1234567', 'BC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('BC changeOfDirectors', 'BC1234567', 'BC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('BC dissolution', 'BC1234567', 'BC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('BC restoration', 'BC1234567', 'BC', RESTORATION, 'restoration', 'Full Restoration Application'), + ('BEN agmLocationChange', 'BC1234567', 'BEN', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('BEN alteration', 'BC1234567', 'BEN', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('BEN changeOfAddress', 'BC1234567', 'BEN', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('BEN changeOfDirectors', 'BC1234567', 'BEN', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('BEN dissolution', 'BC1234567', 'BEN', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('BEN restoration', 'BC1234567', 'BEN', RESTORATION, 'restoration', 'Full Restoration Application'), + ('ULC agmLocationChange', 'BC1234567', 'ULC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('ULC alteration', 'BC1234567', 'ULC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('ULC changeOfAddress', 'BC1234567', 'ULC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('ULC changeOfDirectors', 'BC1234567', 'ULC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('ULC dissolution', 'BC1234567', 'ULC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('ULC restoration', 'BC1234567', 'ULC', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CC agmLocationChange', 'BC1234567', 'CC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CC alteration', 'BC1234567', 'CC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CC changeOfAddress', 'BC1234567', 'CC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CC changeOfDirectors', 'BC1234567', 'CC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CC dissolution', 'BC1234567', 'CC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CC restoration', 'BC1234567', 'CC', RESTORATION, 'restoration', 'Full Restoration Application'), + ('C agmLocationChange', 'C1234567', 'C', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('C alteration', 'C1234567', 'C', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('C changeOfAddress', 'C1234567', 'C', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('C changeOfDirectors', 'C1234567', 'C', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('C dissolution', 'C1234567', 'C', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('C restoration', 'C1234567', 'C', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CUL agmLocationChange', 'C1234567', 'CUL', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CUL alteration', 'C1234567', 'CUL', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CUL changeOfAddress', 'C1234567', 'CUL', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CUL changeOfDirectors', 'C1234567', 'CUL', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CUL dissolution', 'C1234567', 'CUL', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CUL restoration', 'C1234567', 'CUL', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CBEN agmLocationChange', 'C1234567', 'CBEN', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CBEN alteration', 'C1234567', 'CBEN', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CBEN changeOfAddress', 'C1234567', 'CBEN', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CBEN changeOfDirectors', 'C1234567', 'CBEN', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CBEN dissolution', 'C1234567', 'CBEN', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CBEN restoration', 'C1234567', 'CBEN', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CCC agmLocationChange', 'C1234567', 'CCC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CCC alteration', 'C1234567', 'CCC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CCC changeOfAddress', 'C1234567', 'CCC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CCC changeOfDirectors', 'C1234567', 'CCC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CCC dissolution', 'C1234567', 'CCC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CCC restoration', 'C1234567', 'CCC', RESTORATION, 'restoration', 'Full Restoration Application') + ] +) +def test_notice_of_withdraw_format_data(session, test_name, identifier, entity_type, filing_template, filing_type, formatted_filing_type): + """Test the data passed to NoW report template - existing business""" + # create a business + test_business = factory_business(identifier=identifier, entity_type=entity_type) + + # file a FE filing + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + withdrawn_json = copy.deepcopy(FILING_HEADER) + withdrawn_json['filing']['header']['name'] = filing_type + withdrawn_json['filing']['business']['legalType'] = entity_type + withdrawn_json['filing'][filing_type] = copy.deepcopy(filing_template) + withdrawn_filing = factory_pending_filing(test_business, withdrawn_json) + withdrawn_filing.effective_date = future_effective_date + withdrawn_filing.payment_completion_date = today.isoformat() + withdrawn_filing.save() + withdrawn_filing_id = withdrawn_filing.id + + # file a NoW filing + now_json = copy.deepcopy(FILING_HEADER) + now_json['filing']['header']['name'] = 'noticeOfWithdrawal' + now_json['filing']['business']['legalType'] = 'BC' + now_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + now_json['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + + # verify formatted NoW data for report template + formatted_now_json = copy.deepcopy(now_json['filing']) + report_instance = Report({}) + expected_withdrawn_filing_effective_date = LegislationDatetime.as_legislation_timezone(withdrawn_filing.effective_date) + expected_withdrawn_filing_effective_date = LegislationDatetime.format_as_report_string(expected_withdrawn_filing_effective_date) + report_instance._format_notice_of_withdrawal_data(formatted_now_json) + assert formatted_now_json['withdrawnFilingType'] == formatted_filing_type + assert formatted_now_json['withdrawnFilingEffectiveDate'] == expected_withdrawn_filing_effective_date + assert formatted_now_json['noticeOfWithdrawal']['filingId'] == withdrawn_filing_id From 80b86c34a59efc2ec3583ad5b5779a2b2097ddb6 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Fri, 27 Dec 2024 11:08:39 -0800 Subject: [PATCH 007/133] 25038 25039 putbackoff filing (#3151) --- .../involuntary_dissolutions.py | 11 ++-- .../versions/f99e7bda56bb_hide_in_ledger.py | 29 +++++++++++ legal-api/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 4 +- legal-api/src/legal_api/core/filing.py | 16 ++---- legal-api/src/legal_api/core/meta/filing.py | 6 +++ legal-api/src/legal_api/models/business.py | 8 +++ legal-api/src/legal_api/models/filing.py | 3 ++ .../business_filings/business_filings.py | 15 +++++- .../resources/v2/internal_services.py | 9 ++++ legal-api/src/legal_api/services/authz.py | 3 ++ .../filings/validations/put_back_off.py | 50 +++++++++++++++++++ .../filings/validations/validation.py | 4 ++ legal-api/tests/unit/models/__init__.py | 5 ++ .../tests/unit/resources/v2/test_business.py | 6 ++- .../resources/v2/test_internal_services.py | 23 +++++++++ .../filings/validations/test_put_back_off.py | 34 +++++++++++++ .../tests/unit/services/test_authorization.py | 44 ++++++++++++++-- 18 files changed, 246 insertions(+), 26 deletions(-) create mode 100644 legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py create mode 100644 legal-api/src/legal_api/services/filings/validations/put_back_off.py create mode 100644 legal-api/tests/unit/services/filings/validations/test_put_back_off.py diff --git a/jobs/involuntary-dissolutions/involuntary_dissolutions.py b/jobs/involuntary-dissolutions/involuntary_dissolutions.py index caeb5fe743..54c5bd9fee 100644 --- a/jobs/involuntary-dissolutions/involuntary_dissolutions.py +++ b/jobs/involuntary-dissolutions/involuntary_dissolutions.py @@ -106,6 +106,7 @@ def create_invountary_dissolution_filing(business_id: int): } } + filing.hide_in_ledger = True filing.save() return filing @@ -199,7 +200,7 @@ def stage_1_process(app: Flask): # pylint: disable=redefined-outer-name,too-man step=BatchProcessing.BatchProcessingStep.WARNING_LEVEL_1, status=BatchProcessing.BatchProcessingStatus.PROCESSING, created_date=datetime.utcnow(), - trigger_date=datetime.utcnow()+stage_1_delay, + trigger_date=datetime.utcnow() + stage_1_delay, batch_id=batch.id, business_id=business.id) @@ -222,10 +223,10 @@ def _check_stage_1_furnishing_entries(furnishings): 2. only available to send mail out, and it's processed. """ email_processed = any( - furnishing.furnishing_type == Furnishing.FurnishingType.EMAIL - and furnishing.status == Furnishing.FurnishingStatus.PROCESSED - for furnishing in furnishings - ) + furnishing.furnishing_type == Furnishing.FurnishingType.EMAIL + and furnishing.status == Furnishing.FurnishingStatus.PROCESSED + for furnishing in furnishings + ) expected_mail_status = [Furnishing.FurnishingStatus.PROCESSED] # if SFTP function is off, we expect the mail status will be QUEUED or PROCESSED diff --git a/legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py b/legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py new file mode 100644 index 0000000000..4c7e87381a --- /dev/null +++ b/legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py @@ -0,0 +1,29 @@ +"""hide-in-ledger + +Revision ID: f99e7bda56bb +Revises: f3b30f43aa86 +Create Date: 2024-12-20 13:59:15.359911 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'f99e7bda56bb' +down_revision = 'f3b30f43aa86' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('filings', sa.Column('hide_in_ledger', sa.Boolean(), nullable=False, server_default='False')) + op.execute("UPDATE filings SET hide_in_ledger = true WHERE filing_type = 'adminFreeze'") + op.execute("UPDATE filings SET hide_in_ledger = true WHERE filing_type = 'dissolution' and filing_sub_type = 'involuntary'") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('filings', 'hide_in_ledger') + # ### end Alembic commands ### diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index 80dfadb843..7d9965f9a8 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.31#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/requirements/bcregistry-libraries.txt b/legal-api/requirements/bcregistry-libraries.txt index 260b5b725c..94c67a8b48 100644 --- a/legal-api/requirements/bcregistry-libraries.txt +++ b/legal-api/requirements/bcregistry-libraries.txt @@ -1,2 +1,2 @@ -git+https://github.com/bcgov/business-schemas.git@2.18.31#egg=registry_schemas -git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning \ No newline at end of file +git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas +git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index ce305284ef..f0e42d81ce 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -88,6 +88,7 @@ class FilingTypes(str, Enum): DISSOLVED = 'dissolved' INCORPORATIONAPPLICATION = 'incorporationApplication' NOTICEOFWITHDRAWAL = 'noticeOfWithdrawal' + PUTBACKOFF = 'putBackOff' PUTBACKON = 'putBackOn' REGISTRARSNOTATION = 'registrarsNotation' REGISTRARSORDER = 'registrarsOrder' @@ -434,7 +435,7 @@ def common_ledger_items(business_identifier: str, filing_storage: FilingStorage) filing = Filing() filing._storage = filing_storage # pylint: disable=protected-access return { - 'displayLedger': Filing._is_display_ledger(filing_storage), + 'displayLedger': not filing_storage.hide_in_ledger, 'commentsCount': filing_storage.comments_count, 'commentsLink': f'{base_url}/{business_identifier}/filings/{filing_storage.id}/comments', 'documentsLink': f'{base_url}/{business_identifier}/filings/{filing_storage.id}/documents' if @@ -457,18 +458,6 @@ def _add_ledger_order(filing: FilingStorage, ledger_filing: dict) -> dict: ledger_filing['data'] = {} ledger_filing['data']['order'] = court_order_data - @staticmethod - def _is_display_ledger(filing: FilingStorage) -> bool: - """Return boolean that display the ledger.""" - # If filing is NOT an admin freeze or involuntary dissolution, we will display it on ledger - return not ( - filing.filing_type == Filing.FilingTypes.ADMIN_FREEZE or - ( - filing.filing_type == Filing.FilingTypes.DISSOLUTION and - filing.filing_sub_type == 'involuntary' - ) - ) - @staticmethod def get_document_list(business, # pylint: disable=too-many-locals disable=too-many-branches filing, @@ -477,6 +466,7 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m no_output_filings = [ Filing.FilingTypes.CONVERSION.value, Filing.FilingTypes.COURTORDER.value, + Filing.FilingTypes.PUTBACKOFF.value, Filing.FilingTypes.PUTBACKON.value, Filing.FilingTypes.REGISTRARSNOTATION.value, Filing.FilingTypes.REGISTRARSORDER.value, diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index 20c29cdf3b..e6efdad49f 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -506,6 +506,12 @@ class FilingTitles(str, Enum): 'CCC': 'NWITH' } }, + 'putBackOff': { + 'name': 'putBackOff', + 'title': 'Put Back Off', + 'displayName': 'Correction - Put Back Off', + 'code': 'NOFEE' + }, 'putBackOn': { 'name': 'putBackOn', 'title': 'Put Back On', diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 2b3fb570a1..76ae8cf464 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -672,6 +672,14 @@ def get_all_by_no_tax_id(cls): .all()) return businesses + @classmethod + def get_expired_restoration(cls): + """Return all identifier with an expired restoration_expiry_date.""" + businesses = (db.session.query(Business.identifier) + .filter(Business.restoration_expiry_date <= datetime.utcnow()) + .all()) + return businesses + @classmethod def get_filing_by_id(cls, business_identifier: int, filing_id: str): """Return the filings for a specific business and filing_id.""" diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index bd94a0cccb..73f33c0557 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -448,6 +448,7 @@ class Source(Enum): # for all the business the fee code remain same as NOFEE (Staff) 'adminFreeze': {'name': 'adminFreeze', 'title': 'Admin Freeze', 'code': 'NOFEE'}, 'courtOrder': {'name': 'courtOrder', 'title': 'Court Order', 'code': 'NOFEE'}, + 'putBackOff': {'name': 'putBackOff', 'title': 'Put Back Off', 'code': 'NOFEE'}, 'putBackOn': {'name': 'putBackOn', 'title': 'Put Back On', 'code': 'NOFEE'}, 'registrarsNotation': {'name': 'registrarsNotation', 'title': 'Registrars Notation', 'code': 'NOFEE'}, 'registrarsOrder': {'name': 'registrarsOrder', 'title': 'Registrars Order', 'code': 'NOFEE'} @@ -492,6 +493,7 @@ class Source(Enum): 'court_order_effect_of_order', 'court_order_file_number', 'deletion_locked', + 'hide_in_ledger', 'effective_date', 'order_details', 'paper_only', @@ -535,6 +537,7 @@ class Source(Enum): application_date = db.Column('application_date', db.DateTime(timezone=True)) notice_date = db.Column('notice_date', db.DateTime(timezone=True)) resubmission_date = db.Column('resubmission_date', db.DateTime(timezone=True)) + hide_in_ledger = db.Column('hide_in_ledger', db.Boolean, unique=False, default=False) # # relationships transaction_id = db.Column('transaction_id', db.BigInteger, diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index 2a9a9821d9..c5c8f9636e 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -620,12 +620,24 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s datetime.datetime.fromisoformat(filing.filing_json['filing']['header']['effectiveDate']) \ if filing.filing_json['filing']['header'].get('effectiveDate', None) else datetime.datetime.utcnow() + filing.hide_in_ledger = ListFilingResource._hide_in_ledger(filing) filing.save() except BusinessException as err: return None, None, {'error': err.error}, err.status_code return business or bootstrap, filing, None, None + @staticmethod + def _hide_in_ledger(filing: Filing) -> bool: + """Hide the filing in the ledger.""" + hide_in_ledger = str(request.headers.get('hide-in-ledger', None)).lower() + if (filing.filing_type == 'adminFreeze' or + (filing.filing_type == 'dissolution' and filing.filing_sub_type == 'involuntary') or + (jwt.validate_roles([SYSTEM_ROLE]) and hide_in_ledger == 'true')): + return True + + return False + @staticmethod def _save_colin_event_ids(filing: Filing, business: Union[Business, RegistrationBootstrap]): try: @@ -683,7 +695,8 @@ def get_filing_types(business: Business, filing_json: dict): # pylint: disable= legal_type, priority_flag, waive_fees_flag)) - elif filing_type in ['courtOrder', 'registrarsNotation', 'registrarsOrder', 'putBackOn', 'adminFreeze']: + elif filing_type in ('adminFreeze', 'courtOrder', 'putBackOff', 'putBackOn', + 'registrarsNotation', 'registrarsOrder'): filing_type_code = Filing.FILINGS.get(filing_type, {}).get('code') filing_types.append({ 'filingTypeCode': filing_type_code, diff --git a/legal-api/src/legal_api/resources/v2/internal_services.py b/legal-api/src/legal_api/resources/v2/internal_services.py index 00bbc46b68..7b22e1aff0 100644 --- a/legal-api/src/legal_api/resources/v2/internal_services.py +++ b/legal-api/src/legal_api/resources/v2/internal_services.py @@ -36,6 +36,15 @@ def get_future_effective_filing_ids(): return jsonify(filing_ids), HTTPStatus.OK +@bp.route('/expired_restoration', methods=['GET']) +@cross_origin(origin='*') +@jwt.has_one_of_roles([UserRoles.system]) +def get_identifiers_of_expired_restoration(): + """Return all identifiers (if limited restoration has expired).""" + businesses = Business.get_expired_restoration() + return jsonify({'identifiers': [business.identifier for business in businesses]}), HTTPStatus.OK + + @bp.route('/bnmove', methods=['POST']) @cross_origin(origin='*') @jwt.has_one_of_roles([UserRoles.system]) diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index 0980e23598..dbede093b1 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -279,6 +279,9 @@ def get_allowable_filings_dict(): # only show filing when providing allowable filings not specific to a business 'businessRequirement': BusinessRequirement.NOT_EXIST }, + 'putBackOff': { + 'legalTypes': ['BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'] + }, 'registrarsNotation': { 'legalTypes': ['SP', 'GP', 'CP', 'BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'] }, diff --git a/legal-api/src/legal_api/services/filings/validations/put_back_off.py b/legal-api/src/legal_api/services/filings/validations/put_back_off.py new file mode 100644 index 0000000000..a031d46b5a --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/put_back_off.py @@ -0,0 +1,50 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Validation for the Put Back Off filing.""" +from http import HTTPStatus +from typing import Dict, Final, Optional + +from flask_babel import _ as babel # noqa: N813, I004, I001; importing camelcase '_' as a name +# noqa: I004 +from legal_api.errors import Error +from legal_api.models import Business + +from .common_validations import validate_court_order +from ...utils import get_str # noqa: I003; needed as the linter gets confused from the babel override above. + + +def validate(business: Business, put_back_off: Dict) -> Optional[Error]: + """Validate the Court Order filing.""" + if not business or not put_back_off: + return Error(HTTPStatus.BAD_REQUEST, [{'error': babel('A valid business and filing are required.')}]) + msg = [] + + if not get_str(put_back_off, '/filing/putBackOff/details'): + msg.append({'error': babel('Put Back Off details are required.'), 'path': '/filing/putBackOff/details'}) + + msg.extend(_validate_court_order(put_back_off)) + + if msg: + return Error(HTTPStatus.BAD_REQUEST, msg) + return None + + +def _validate_court_order(filing): + """Validate court order.""" + if court_order := filing.get('filing', {}).get('putBackOff', {}).get('courtOrder', None): + court_order_path: Final = '/filing/putBackOff/courtOrder' + err = validate_court_order(court_order_path, court_order) + if err: + return err + return [] diff --git a/legal-api/src/legal_api/services/filings/validations/validation.py b/legal-api/src/legal_api/services/filings/validations/validation.py index cd0a85c33b..3faaab272e 100644 --- a/legal-api/src/legal_api/services/filings/validations/validation.py +++ b/legal-api/src/legal_api/services/filings/validations/validation.py @@ -41,6 +41,7 @@ from .dissolution import validate as dissolution_validate from .incorporation_application import validate as incorporation_application_validate from .notice_of_withdrawal import validate as notice_of_withdrawal_validate +from .put_back_off import validate as put_back_off_validate from .put_back_on import validate as put_back_on_validate from .registrars_notation import validate as registrars_notation_validate from .registrars_order import validate as registrars_order_validate @@ -186,6 +187,9 @@ def validate(business: Business, # pylint: disable=too-many-branches,too-many-s elif k == Filing.FILINGS['noticeOfWithdrawal'].get('name'): err = notice_of_withdrawal_validate(filing_json) + elif k == Filing.FILINGS['putBackOff'].get('name'): + err = put_back_off_validate(business, filing_json) + if err: return err diff --git a/legal-api/tests/unit/models/__init__.py b/legal-api/tests/unit/models/__init__.py index fadc6ebbb7..3853258d2a 100644 --- a/legal-api/tests/unit/models/__init__.py +++ b/legal-api/tests/unit/models/__init__.py @@ -243,6 +243,11 @@ def factory_completed_filing(business, filing._filing_type = filing_type if filing_sub_type: filing._filing_sub_type = filing_sub_type + + if (filing.filing_type == 'adminFreeze' or + (filing.filing_type == 'dissolution' and filing.filing_sub_type == 'involuntary')): + filing.hide_in_ledger = True + filing.save() uow = versioning_manager.unit_of_work(db.session) diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index 79d3ddf4db..f45481f801 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -626,6 +626,10 @@ def test_get_could_file(session, client, jwt): "displayName": "BC Limited Company Incorporation Application", "name": "incorporationApplication" }, + { + "displayName": "Correction - Put Back Off", + "name": "putBackOff", + }, { "displayName": "Registrar's Notation", "name": "registrarsNotation" @@ -659,4 +663,4 @@ def test_get_could_file(session, client, jwt): assert rv.json['couldFile']['filing'] assert rv.json['couldFile']['filing']['filingTypes'] assert len(rv.json['couldFile']['filing']['filingTypes']) > 0 - assert rv.json['couldFile']['filing']['filingTypes'] == expected \ No newline at end of file + assert rv.json['couldFile']['filing']['filingTypes'] == expected diff --git a/legal-api/tests/unit/resources/v2/test_internal_services.py b/legal-api/tests/unit/resources/v2/test_internal_services.py index c37e04a3b9..6676835178 100644 --- a/legal-api/tests/unit/resources/v2/test_internal_services.py +++ b/legal-api/tests/unit/resources/v2/test_internal_services.py @@ -73,6 +73,29 @@ def test_get_future_effective_filing_ids(session, client, jwt): assert len(rv.json) == 0 +@pytest.mark.parametrize( + 'test_name, expired', [ + ('LIMITED_RESTORATION', True), + ('LIMITED_RESTORATION_EXPIRED', False) + ] +) +def test_get_businesses_expired_restoration(session, client, jwt, test_name, expired): + """Assert that expired restoration can be fetched.""" + identifier = 'BC1234567' + business = factory_business(identifier=identifier, entity_type=Business.LegalTypes.COMP.value) + business.restoration_expiry_date = (datetime.now(timezone.utc) + + datedelta.datedelta(days=-1 if expired else 1)) + business.save() + rv = client.get('/api/v2/internal/expired_restoration', headers=create_header(jwt, [UserRoles.system])) + if expired: + assert rv.status_code == HTTPStatus.OK + assert len(rv.json) == 1 + assert rv.json['identifiers'][0] == identifier + else: + assert rv.status_code == HTTPStatus.OK + assert len(rv.json['identifiers']) == 0 + + def test_update_bn_move(session, client, jwt): """Assert that the endpoint updates tax_id.""" identifier = 'FM0000001' diff --git a/legal-api/tests/unit/services/filings/validations/test_put_back_off.py b/legal-api/tests/unit/services/filings/validations/test_put_back_off.py new file mode 100644 index 0000000000..35d02b4283 --- /dev/null +++ b/legal-api/tests/unit/services/filings/validations/test_put_back_off.py @@ -0,0 +1,34 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test Put back off validations.""" +import copy + +from registry_schemas.example_data import PUT_BACK_OFF, FILING_HEADER + +from legal_api.services.filings.validations.put_back_off import validate + +from tests.unit.models import factory_business + + +def test_put_back_off(session): + """Assert valid put back off.""" + identifier = 'CP1234567' + business = factory_business(identifier) + + filing_json = copy.deepcopy(FILING_HEADER) + filing_json['filing']['business']['identifier'] = identifier + filing_json['filing']['putBackOff'] = copy.deepcopy(PUT_BACK_OFF) + + err = validate(business, filing_json) + assert err is None diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 5a33d92603..fa59a289fd 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -39,6 +39,7 @@ CORRECTION_AR, DISSOLUTION, FILING_TEMPLATE, + PUT_BACK_OFF, PUT_BACK_ON, RESTORATION, ) @@ -154,6 +155,7 @@ class FilingKey(str, Enum): RESTRN_LTD_EXT_LLC = 'RESTRN_LTD_EXT_LLC' RESTRN_LTD_TO_FULL_CORPS = 'RESTRN_LTD_TO_FULL_CORPS' RESTRN_LTD_TO_FULL_LLC = 'RESTRN_LTD_TO_FULL_LLC' + PUT_BACK_OFF = 'PUT_BACK_OFF' PUT_BACK_ON = 'PUT_BACK_ON' AMALGAMATION_REGULAR = 'AMALGAMATION_REGULAR' AMALGAMATION_VERTICAL = 'AMALGAMATION_VERTICAL' @@ -229,6 +231,7 @@ class FilingKey(str, Enum): 'name': 'restoration', 'type': 'limitedRestorationExtension'}, FilingKey.RESTRN_LTD_TO_FULL_LLC: {'displayName': 'Conversion to Full Restoration Application', 'feeCode': None, 'name': 'restoration', 'type': 'limitedRestorationToFull'}, + FilingKey.PUT_BACK_OFF: {'displayName': 'Correction - Put Back Off', 'feeCode': 'NOFEE', 'name': 'putBackOff'}, FilingKey.PUT_BACK_ON: {'displayName': 'Correction - Put Back On', 'feeCode': 'NOFEE', 'name': 'putBackOn'}, FilingKey.AMALGAMATION_REGULAR: {'name': 'amalgamationApplication', 'type': 'regular', 'displayName': 'Amalgamation Application (Regular)', 'feeCode': 'AMALR'}, FilingKey.AMALGAMATION_VERTICAL: {'name': 'amalgamationApplication', 'type': 'vertical', 'displayName': 'Amalgamation Application Short-form (Vertical)', 'feeCode': 'AMALV'}, @@ -304,6 +307,7 @@ class FilingKey(str, Enum): 'name': 'restoration', 'type': 'limitedRestorationExtension'}, FilingKey.RESTRN_LTD_TO_FULL_LLC: {'displayName': 'Conversion to Full Restoration Application', 'feeCode': None, 'name': 'restoration', 'type': 'limitedRestorationToFull'}, + FilingKey.PUT_BACK_OFF: {'displayName': 'Correction - Put Back Off', 'feeCode': 'NOFEE', 'name': 'putBackOff'}, FilingKey.PUT_BACK_ON: {'displayName': 'Correction - Put Back On', 'feeCode': 'NOFEE', 'name': 'putBackOn'}, FilingKey.AMALGAMATION_REGULAR: {'name': 'amalgamationApplication', 'type': 'regular', 'displayName': 'Amalgamation Application (Regular)', 'feeCode': None}, @@ -334,6 +338,9 @@ class FilingKey(str, Enum): DISSOLUTION_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) DISSOLUTION_FILING_TEMPLATE['filing']['dissolution'] = DISSOLUTION +PUT_BACK_OFF_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) +PUT_BACK_OFF_FILING_TEMPLATE['filing']['putBackOff'] = PUT_BACK_OFF + PUT_BACK_ON_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) PUT_BACK_ON_FILING_TEMPLATE['filing']['putBackOn'] = PUT_BACK_ON @@ -357,6 +364,7 @@ class FilingKey(str, Enum): 'restoration.fullRestoration': RESTORATION_FILING_TEMPLATE, 'restoration.limitedRestorationExtension': RESTORATION_FILING_TEMPLATE, 'dissolution': DISSOLUTION_FILING_TEMPLATE, + 'putBackOff': PUT_BACK_OFF_FILING_TEMPLATE, 'putBackOn': PUT_BACK_ON_FILING_TEMPLATE, 'continuationIn': CONTINUATION_IN_TEMPLATE, 'continuationOut': CONTINUATION_OUT_TEMPLATE, @@ -541,15 +549,15 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', - {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', + {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentContinuationOut', 'continuationOut', 'correction', - 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'registrarsNotation', 'registrarsOrder', - 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, + 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'putBackOff', 'registrarsNotation', + 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_llc', Business.State.ACTIVE, ['LLC'], 'staff', [STAFF_ROLE], []), ('staff_active_firms', Business.State.ACTIVE, ['SP', 'GP'], 'staff', [STAFF_ROLE], @@ -948,6 +956,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -968,6 +977,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1232,6 +1242,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1252,6 +1263,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1370,6 +1382,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1378,6 +1391,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1550,6 +1564,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1564,6 +1579,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1674,6 +1690,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_STATUSES, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1681,6 +1698,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_STATUSES, expected_lookup_continue_in_corps([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1788,6 +1806,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1795,6 +1814,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, expected_lookup_continue_in_corps([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1906,6 +1926,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1913,6 +1934,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1997,6 +2019,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2016,6 +2039,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2155,6 +2179,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2178,6 +2203,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2200,6 +2226,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2220,6 +2247,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2518,6 +2546,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2526,6 +2555,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.CONTINUATION_OUT, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2533,6 +2563,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me [STAFF_ROLE], ['consentContinuationOut', 'consentContinuationOut'], [None, None], [False, False], expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2553,6 +2584,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2617,6 +2649,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2725,6 +2758,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2733,6 +2767,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], 'FUTURE_EFFECTIVE', expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2743,6 +2778,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2751,6 +2787,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], 'DRAFT', expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION @@ -2767,6 +2804,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), From a53571eeb1e6244fb0f00101de1b7dd80c6262db Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Fri, 27 Dec 2024 15:24:56 -0800 Subject: [PATCH 008/133] 21751 - add put back on sync (#3153) * 21751 - add put back on sync * 21751 - fix linting issue * 21751 - fix linting issue * 21751 - update to the implementation --- colin-api/src/colin_api/models/filing.py | 32 +++++++++++++++++++-- colin-api/src/colin_api/resources/filing.py | 1 + 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index dbc48c097b..baa60068ee 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -352,6 +352,17 @@ class FilingSource(Enum): 'courtOrder': { 'type_code_list': ['COURT'], Business.TypeCodes.BC_COMP.value: 'COURT' + }, + 'putBackOn': { + 'type_code_list': ['CO_PO'], + Business.TypeCodes.BCOMP.value: 'CO_PO', + Business.TypeCodes.BC_COMP.value: 'CO_PO', + Business.TypeCodes.ULC_COMP.value: 'CO_PO', + Business.TypeCodes.CCC_COMP.value: 'CO_PO', + Business.TypeCodes.BCOMP_CONTINUE_IN.value: 'CO_PO', + Business.TypeCodes.CONTINUE_IN.value: 'CO_PO', + Business.TypeCodes.ULC_CONTINUE_IN.value: 'CO_PO', + Business.TypeCodes.CCC_CONTINUE_IN.value: 'CO_PO', } } @@ -618,7 +629,7 @@ def _insert_filing(cls, cursor, filing, # pylint: disable=too-many-statements, 'CONTB', 'CONTI', 'CONTU', 'CONTC', 'NOABE', 'NOALE', 'NOALR', 'NOALD', 'NOALA', 'NOALB', 'NOALU', 'NOALC', - 'CONTO', 'COUTI', + 'CONTO', 'COUTI', 'CO_PO', 'AGMDT', 'AGMLC', 'RESTF', 'RESTL', 'RESXL', 'RESXF', 'REGSN', 'REGSO', 'COURT']: @@ -1214,7 +1225,7 @@ def add_filing(cls, con, filing: Filing) -> int: 'amalgamationApplication', 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationIn', 'continuationOut', 'courtOrder', - 'dissolution', 'incorporationApplication', 'registrarsNotation', + 'dissolution', 'incorporationApplication', 'putBackOn', 'registrarsNotation', 'registrarsOrder', 'restoration', 'specialResolution', 'transition']: raise InvalidFilingTypeException(filing_type=filing.filing_type) @@ -1252,6 +1263,8 @@ def add_filing(cls, con, filing: Filing) -> int: cls._process_continuation_out(cursor, filing) elif filing.filing_type == 'restoration': cls._process_restoration(cursor, filing) + elif filing.filing_type == 'putBackOn': + cls._process_put_back_on(cursor, filing) elif filing.filing_type == 'alteration': # alter corp type if ( @@ -1447,6 +1460,21 @@ def _process_restoration(cls, cursor, filing): corp_state = Business.CorpStateTypes.LIMITED_RESTORATION.value Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + @classmethod + def _process_put_back_on(cls, cursor, filing): + """Process Put Back On.""" + corp_num = filing.get_corp_num() + + Office.end_office(cursor=cursor, + event_id=filing.event_id, + corp_num=corp_num, + office_code=Office.OFFICE_TYPES_CODES['custodialOffice']) + + Party.end_current(cursor, filing.event_id, corp_num, 'Custodian') + + corp_state = Business.CorpStateTypes.ACTIVE.value # Active for Put Back On + Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + @classmethod def _process_continuation_out(cls, cursor, filing): """Process continuation out.""" diff --git a/colin-api/src/colin_api/resources/filing.py b/colin-api/src/colin_api/resources/filing.py index a22337920d..0a83b3a64e 100644 --- a/colin-api/src/colin_api/resources/filing.py +++ b/colin-api/src/colin_api/resources/filing.py @@ -137,6 +137,7 @@ def post(legal_type, identifier, **kwargs): 'courtOrder': json_data.get('courtOrder', None), 'dissolution': json_data.get('dissolution', None), 'incorporationApplication': json_data.get('incorporationApplication', None), + 'putBackOn': json_data.get('putBackOn', None), 'registrarsNotation': json_data.get('registrarsNotation', None), 'registrarsOrder': json_data.get('registrarsOrder', None), 'restoration': json_data.get('restoration', None), From c830473995e852bab44a9e8d79f74941f9dd7681 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Fri, 27 Dec 2024 16:17:05 -0800 Subject: [PATCH 009/133] 24195 updating last_modified with filing completion_date (#3154) --- legal-api/src/legal_api/models/business.py | 3 ++- legal-api/tests/unit/models/test_business.py | 3 ++- queue_services/entity-filer/src/entity_filer/worker.py | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 76ae8cf464..005ed8c204 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -536,7 +536,8 @@ def _slim_json(self): 'inDissolution': self.in_dissolution, 'legalName': self.business_legal_name, 'legalType': self.legal_type, - 'state': self.state.name if self.state else Business.State.ACTIVE.name + 'state': self.state.name if self.state else Business.State.ACTIVE.name, + 'lastModified': self.last_modified.isoformat() } if self.tax_id: diff --git a/legal-api/tests/unit/models/test_business.py b/legal-api/tests/unit/models/test_business.py index 6cda1bb70d..e7e6289b3a 100644 --- a/legal-api/tests/unit/models/test_business.py +++ b/legal-api/tests/unit/models/test_business.py @@ -293,7 +293,7 @@ def test_good_standing_check_transition_filing(session, test_name, has_no_transi restoration_filing.save() elif test_name == 'TRANSITION_COMPLETED': factory_completed_filing(business, TRANSITION_FILING_TEMPLATE, filing_type='transition') - + check_result = business._has_no_transition_filed_after_restoration() assert check_result == has_no_transition_filed with patch.object(flags, 'is_on', return_value=True): @@ -330,6 +330,7 @@ def test_business_json(session): 'legalName': 'legal_name', 'legalType': Business.LegalTypes.COOP.value, 'state': Business.State.ACTIVE.name, + 'lastModified': EPOCH_DATETIME.isoformat(), 'taxId': '123456789' } diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 5e331629ea..2dbc8aca92 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -338,6 +338,7 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable business_type = business.legal_type if business else filing_submission['business']['legal_type'] filing_submission.set_processed(business_type) + business.last_modified = filing_submission.completion_date filing_submission._meta_data = json.loads( # pylint: disable=W0212 json.dumps(filing_meta.asjson, default=json_serial) From ddb0337177129d6db34c76d157c0ee84d7df97b0 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Tue, 31 Dec 2024 09:21:45 -0800 Subject: [PATCH 010/133] 24708 inserting to event_insert table for ORGBOOK sync (#3152) --- colin-api/src/colin_api/models/filing.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index baa60068ee..23c9ce184c 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -514,6 +514,14 @@ def _get_event_id(cls, cursor, corp_num: str, filing_dt: str, event_type: str = filing_dt=filing_dt, event_type=event_type ) + cursor.execute( + """ + INSERT INTO event_insert (event_id, corp_num, insert_date) + VALUES (:event_id, :corp_num, sysdate) + """, + event_id=event_id, + corp_num=corp_num + ) except Exception as err: current_app.logger.error('Error in filing: Failed to create new event.') raise err From 110797b026957275851ef22387f536c15caadbb4 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Tue, 31 Dec 2024 12:47:21 -0800 Subject: [PATCH 011/133] 24987 24989 - Update to enable NoW for temporary businesses (#3155) * add now for temporary businesses in basic checks * update error code for not found withdrawn filing from 400 to 404 * update code in save_filing and complete_filing to enable NoW for T businesses * add unit tests NoW filing for regular businesses and T businesses * fix 2 old unit tests to deal with the last day of a year issue --- .../business_filings/business_filings.py | 45 +++++++++- legal-api/src/legal_api/services/authz.py | 3 +- .../validations/notice_of_withdrawal.py | 16 ++-- .../unit/resources/v1/test_business_tasks.py | 4 +- .../v2/test_business_filings/test_filings.py | 87 ++++++++++++++++++- .../unit/resources/v2/test_business_tasks.py | 4 +- .../validations/test_notice_of_withdrawal.py | 2 +- .../tests/unit/services/test_authorization.py | 24 +++-- 8 files changed, 161 insertions(+), 24 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index c5c8f9636e..9169dd87df 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -479,7 +479,8 @@ def put_basic_checks(identifier, filing, client_request, business) -> Tuple[dict if not filing_type: return ({'message': 'filing/header/name is a required property'}, HTTPStatus.BAD_REQUEST) - if filing_type not in CoreFiling.NEW_BUSINESS_FILING_TYPES and business is None: + if filing_type not in CoreFiling.NEW_BUSINESS_FILING_TYPES + [CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL] \ + and business is None: return ({'message': 'A valid business is required.'}, HTTPStatus.BAD_REQUEST) if client_request.method == 'PUT' and not filing: @@ -497,9 +498,14 @@ def check_authorization(identifier, filing_json: dict, # While filing IA business object will be None. Setting default values in that case. state = business.state if business else Business.State.ACTIVE + if business: + legal_type = business.legal_type + # for temporary business notice of withdraw, get legalType from filing json + elif filing_type == CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL.value: + legal_type = filing_json['filing'].get('business', None).get('legalType') # for incorporationApplication and registration, get legalType from nameRequest - legal_type = business.legal_type if business else \ - filing_json['filing'][filing_type]['nameRequest'].get('legalType') + else: + legal_type = filing_json['filing'][filing_type]['nameRequest'].get('legalType') if not authorized(identifier, jwt, action=['edit']) or \ not is_allowed(business, state, filing_type, legal_type, jwt, filing_sub_type, filing): @@ -680,6 +686,9 @@ def get_filing_types(business: Business, filing_json: dict): # pylint: disable= if filing_type in CoreFiling.NEW_BUSINESS_FILING_TYPES: legal_type = filing_json['filing'][filing_type]['nameRequest']['legalType'] + elif business.identifier.startswith('T') and \ + filing_type == CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL: + legal_type = filing_json['filing'].get('business', None).get('legalType') else: legal_type = business.legal_type @@ -839,6 +848,12 @@ def create_invoice(business: Business, # pylint: disable=too-many-locals,too-ma mailing_address = business.mailing_address.one_or_none() corp_type = business.legal_type if business.legal_type else \ filing.json['filing']['business'].get('legalType') + # deal with withdrawing a new business filing + elif business.identifier.startswith('T') and \ + filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: + mailing_address, corp_type, legal_name = \ + ListFilingResource._get_address_from_withdrawn_new_business_filing(business, filing) + business.legal_name = legal_name else: mailing_address = business.mailing_address.one_or_none() corp_type = business.legal_type if business.legal_type else \ @@ -1057,3 +1072,27 @@ def submit_filing_for_review(filing: Filing): {'email': {'filingId': filing.id, 'type': filing.filing_type, 'option': review.status}}, current_app.config.get('NATS_EMAILER_SUBJECT') ) + + @staticmethod + def _get_address_from_withdrawn_new_business_filing(business: Business, filing: Filing): + if filing.filing_type != CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL.value: + return None, None, None + withdrawn_filing_id = filing.filing_json['filing']['noticeOfWithdrawal']['filingId'] + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + if withdrawn_filing.filing_type in CoreFiling.NEW_BUSINESS_FILING_TYPES: + office_type = OfficeType.REGISTERED + if withdrawn_filing.filing_type == Filing.FILINGS['registration']['name']: + office_type = OfficeType.BUSINESS + + mailing_address = Address.create_address( + withdrawn_filing.json['filing'][withdrawn_filing.filing_type]['offices'][office_type]['mailingAddress']) + corp_type = withdrawn_filing.json['filing'][withdrawn_filing.filing_type]['nameRequest'].get( + 'legalType', Business.LegalTypes.BCOMP.value) + + try: + legal_name = withdrawn_filing.json['filing'][withdrawn_filing.filing_type]['nameRequest']['legalName'] + except KeyError: + legal_name = business.identifier + + return mailing_address, corp_type, legal_name + return None, None, None diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index dbede093b1..a1275cf654 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -324,7 +324,8 @@ def get_allowable_filings_dict(): 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { 'business': [BusinessBlocker.FILING_WITHDRAWAL] - } + }, + 'businessRequirement': BusinessRequirement.NO_RESTRICTION } }, Business.State.HISTORICAL: { diff --git a/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py b/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py index 443bf4bc78..74d1d5f495 100644 --- a/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py +++ b/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py @@ -37,9 +37,11 @@ def validate(filing: Dict) -> Optional[Error]: msg.append({'error': babel('Filing Id is required.'), 'path': withdrawn_filing_id_path}) return msg # cannot continue validation without the to be withdrawn filing id - err = validate_withdrawn_filing(withdrawn_filing_id) - if err: - msg.extend(err) + is_not_found, err_msg = validate_withdrawn_filing(withdrawn_filing_id) + if is_not_found: + return Error(HTTPStatus.NOT_FOUND, err_msg) + if err_msg and not is_not_found: + msg.extend(err_msg) if msg: return Error(HTTPStatus.BAD_REQUEST, msg) @@ -49,12 +51,14 @@ def validate(filing: Dict) -> Optional[Error]: def validate_withdrawn_filing(withdrawn_filing_id: int): """Validate the to be withdrawn filing id exists, the filing has a FED, the filing status is PAID.""" msg = [] + is_not_found = False # check whether the filing ID exists withdrawn_filing = db.session.query(Filing). \ filter(Filing.id == withdrawn_filing_id).one_or_none() if not withdrawn_filing: msg.append({'error': babel('The filing to be withdrawn cannot be found.')}) - return msg # cannot continue if the withdrawn filing doesn't exist + is_not_found = True + return is_not_found, msg # cannot continue if the withdrawn filing doesn't exist # check whether the filing has a Future Effective Date(FED) now = dt.utcnow() @@ -68,5 +72,5 @@ def validate_withdrawn_filing(withdrawn_filing_id: int): msg.append({'error': babel('Only paid filings with a future effective date can be withdrawn.')}) if msg: - return msg - return None + return is_not_found, msg + return None, None diff --git a/legal-api/tests/unit/resources/v1/test_business_tasks.py b/legal-api/tests/unit/resources/v1/test_business_tasks.py index b60019eccf..8095958570 100644 --- a/legal-api/tests/unit/resources/v1/test_business_tasks.py +++ b/legal-api/tests/unit/resources/v1/test_business_tasks.py @@ -96,8 +96,8 @@ def test_get_tasks_no_filings(session, client, jwt): def test_get_tasks_next_year(session, client, jwt): """Assert that one todo item is returned in the calendar year following incorporation.""" identifier = 'CP7654321' - founding_date = datetime.today() + datedelta.datedelta(days=1) - datedelta.datedelta(years=1) - factory_business(identifier, founding_date=founding_date) # incorporation 1 year - 1 day ago + founding_date = datetime.today() - datedelta.datedelta(years=1) + factory_business(identifier, founding_date=founding_date) # incorporation 1 year # To-do are all years from the year after incorporation until this year diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index 08c2c9bea8..f8459a2376 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -17,7 +17,7 @@ Test-Suite to ensure that the /businesses endpoint is working as expected. """ import copy -from datetime import datetime, timezone +from datetime import datetime, timedelta, timezone from http import HTTPStatus from typing import Final from unittest.mock import patch @@ -44,6 +44,7 @@ FILING_HEADER, INCORPORATION, INCORPORATION_FILING_TEMPLATE, + NOTICE_OF_WITHDRAWAL as SCHEMA_NOTICE_OF_WITHDRAWAL, REGISTRATION, SPECIAL_RESOLUTION, TRANSITION_FILING_TEMPLATE @@ -1595,3 +1596,87 @@ def test_resubmit_filing_failed(session, client, jwt, filing_status, review_stat headers=create_header(jwt, [STAFF_ROLE], identifier)) assert rv.status_code == HTTPStatus.UNAUTHORIZED + +@pytest.mark.parametrize( + 'test_name, legal_type, filing_type, filing_json, is_temp', + [ + ('T-BUSINESS-IA', 'BC', 'incorporationApplication', INCORPORATION, True), + ('T-BUSINESS-CONT-IN', 'BEN', 'continuationIn', CONTINUATION_IN, True), + ('T-BUSINESS-AMALGAMATION', 'CBEN', 'amalgamationApplication', AMALGAMATION_APPLICATION, True), + ('REGULAR-BUSINESS-COA', 'BC', 'changeOfAddress', CHANGE_OF_ADDRESS, False), + ('REGULAR-BUSINESS-CONT-ALTERATION', 'BEN', 'alteration', ALTERATION_FILING_TEMPLATE, False), + ('REGULAR-BUSINESS-DISSOLUTION', 'CBEN', 'dissolution', DISSOLUTION, False) + ] +) +def test_notice_of_withdraw_filing(session, client, jwt, test_name, legal_type, filing_type, filing_json, is_temp): + """Assert that notice of withdraw for new business filings can be filed""" + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + # create a FE new business filing + if is_temp: + identifier = 'Tb31yQIuBw' + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + json_data = copy.deepcopy(FILING_HEADER) + json_data['filing']['header']['name'] = filing_type + del json_data['filing']['business'] + new_bus_filing_json = copy.deepcopy(filing_json) + new_bus_filing_json['nameRequest']['legalType'] = legal_type + json_data['filing'][filing_type] = new_bus_filing_json + new_business_filing = factory_pending_filing(None, json_data) + new_business_filing.temp_reg = identifier + new_business_filing.effective_date = future_effective_date + new_business_filing.payment_completion_date = datetime.utcnow().isoformat() + new_business_filing.save() + withdrawn_filing_id = new_business_filing.id + # create a regular business and file a FE filing + else: + identifier = 'BC1234567' + founding_date = datetime.utcnow() - timedelta(days=5) + business = factory_business(identifier=identifier, founding_date=founding_date, entity_type=legal_type) + filing_data_reg_business = copy.deepcopy(FILING_HEADER) + filing_data_reg_business['filing']['header']['name'] = filing_type + filing_data_reg_business['filing']['business']['identifier'] = identifier + filing_data_reg_business['filing']['business']['legalType'] = legal_type + fe_filing_json = copy.deepcopy(filing_json) + filing_data_reg_business['filing'][filing_type] = fe_filing_json + fe_filing = factory_pending_filing(business, filing_data_reg_business) + fe_filing.effective_date = future_effective_date + fe_filing.payment_completion_date = datetime.utcnow().isoformat() + fe_filing.save() + withdrawn_filing_id = fe_filing.id + + # test filing a notice of withdraw for a temporary business + now_json_data = copy.deepcopy(FILING_HEADER) + now_json_data['filing']['header']['name'] = 'noticeOfWithdrawal' + if is_temp: + del now_json_data['filing']['business'] + now_json_data['filing']['business'] = { + "identifier": identifier, + "legalType": legal_type + } + else: + now_json_data['filing']['business']['identifier'] = identifier + now_json_data['filing']['business']['legalType'] = legal_type + now_json_data['filing']['noticeOfWithdrawal'] = copy.deepcopy(SCHEMA_NOTICE_OF_WITHDRAWAL) + now_json_data['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + del now_json_data['filing']['header']['filingId'] + + # Test validation OK + rv_validation = client.post(f'/api/v2/businesses/{identifier}/filings?only_validate=true', + json=now_json_data, + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + assert rv_validation.status_code == HTTPStatus.OK + assert rv_validation.json['filing']['header']['name'] == 'noticeOfWithdrawal' + + # Test can create a draft + rv_draft = client.post(f'/api/v2/businesses/{identifier}/filings?draft=true', + json=now_json_data, + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate + assert rv_draft.status_code == HTTPStatus.CREATED + assert rv_draft.json['filing']['header']['name'] == 'noticeOfWithdrawal' diff --git a/legal-api/tests/unit/resources/v2/test_business_tasks.py b/legal-api/tests/unit/resources/v2/test_business_tasks.py index 979426ca9e..48b0fb8bd2 100644 --- a/legal-api/tests/unit/resources/v2/test_business_tasks.py +++ b/legal-api/tests/unit/resources/v2/test_business_tasks.py @@ -98,8 +98,8 @@ def test_get_tasks_no_filings(session, client, jwt): def test_get_tasks_next_year(session, client, jwt): """Assert that one todo item is returned in the calendar year following incorporation.""" identifier = 'CP7654321' - founding_date = datetime.today() + datedelta.datedelta(days=1) - datedelta.datedelta(years=1) - factory_business(identifier, founding_date=founding_date) # incorporation 1 year - 1 day ago + founding_date = datetime.today() - datedelta.datedelta(years=1) + factory_business(identifier, founding_date=founding_date) # incorporation 1 year # To-do are all years from the year after incorporation until this year diff --git a/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py b/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py index 4020aea84b..b4ea68ee92 100644 --- a/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py +++ b/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py @@ -44,7 +44,7 @@ ('EXIST_BUSINESS_SUCCESS', True, Filing.Status.PAID, True, True, None, None), ('EXIST_BUSINESS_FAIL_NOT_PAID', True, Filing.Status.PENDING, True, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_PAID_MSG]), ('EXIST_BUSINESS_FAIL_NOT_FED', True, Filing.Status.PAID, False, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG]), - ('EXIST_BUSINESS_FAIL_FILING_NOT_EXIST', False, Filing.Status.PAID, True, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_EXIST_MSG]), + ('EXIST_BUSINESS_FAIL_FILING_NOT_EXIST', False, Filing.Status.PAID, True, True, HTTPStatus.NOT_FOUND, [FILING_NOT_EXIST_MSG]), ('EXIST_BUSINESS_FAIL_MISS_FILING_ID', True, Filing.Status.PAID, True, False, HTTPStatus.UNPROCESSABLE_ENTITY, ''), ('EXIST_BUSINESS_FAIL_NOT_PAID_NOT_FED', True, Filing.Status.PENDING, False, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG, FILING_NOT_PAID_MSG]) ] diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index fa59a289fd..e8dc133a7e 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -1116,42 +1116,50 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_BC])), + FilingKey.IA_BC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_c', False, Business.State.ACTIVE, ['C'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_C])), + FilingKey.CONTINUATION_IN_C, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_ben', False, Business.State.ACTIVE, ['BEN'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_BEN])), + FilingKey.IA_BEN, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_cben', False, Business.State.ACTIVE, ['CBEN'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_CBEN])), + FilingKey.CONTINUATION_IN_CBEN, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_cc', False, Business.State.ACTIVE, ['CC'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_CC])), + FilingKey.IA_CC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_ccc', False, Business.State.ACTIVE, ['CCC'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_CCC])), + FilingKey.CONTINUATION_IN_CCC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_ulc', False, Business.State.ACTIVE, ['ULC'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_ULC])), + FilingKey.IA_ULC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_cul', False, Business.State.ACTIVE, ['CUL'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_CUL])), + FilingKey.CONTINUATION_IN_CUL, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_llc', False, Business.State.ACTIVE, ['LLC'], 'staff', [STAFF_ROLE], []), ('staff_no_business_sp', False, Business.State.ACTIVE, ['SP'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.REG_SP])), From 0502c9f58eb736822485e937001760ca26c4dc21 Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Thu, 2 Jan 2025 14:45:29 -0800 Subject: [PATCH 012/133] 25042 - put back off sync (#3157) * 25042 - put back off sync * 25042 - fix linting issue * 25042 - fixed typo --- colin-api/src/colin_api/models/business.py | 2 +- colin-api/src/colin_api/models/filing.py | 27 ++++++++++++++++++--- colin-api/src/colin_api/resources/filing.py | 1 + 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/colin-api/src/colin_api/models/business.py b/colin-api/src/colin_api/models/business.py index 5f4421771b..c26a54da87 100644 --- a/colin-api/src/colin_api/models/business.py +++ b/colin-api/src/colin_api/models/business.py @@ -66,7 +66,7 @@ class CorpStateTypes(Enum): AMALGAMATED = 'HAM' CONTINUE_IN = 'HCI' CONTINUE_OUT = 'HCO' - INVOLUNTARY_DISSOLUTION_NO_AR = 'HDF' + INVOLUNTARY_DISSOLUTION_NO_AR = 'HDF' # this corp state is also used for Put back off INVOLUNTARY_DISSOLUTION_NO_TR = 'HDT' LIMITED_RESTORATION = 'LRS' VOLUNTARY_DISSOLUTION = 'HDV' diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index 23c9ce184c..f681aa3663 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -363,6 +363,17 @@ class FilingSource(Enum): Business.TypeCodes.CONTINUE_IN.value: 'CO_PO', Business.TypeCodes.ULC_CONTINUE_IN.value: 'CO_PO', Business.TypeCodes.CCC_CONTINUE_IN.value: 'CO_PO', + }, + 'putBackOff': { + 'type_code_list': ['CO_PF'], + Business.TypeCodes.BCOMP.value: 'CO_PF', + Business.TypeCodes.BC_COMP.value: 'CO_PF', + Business.TypeCodes.ULC_COMP.value: 'CO_PF', + Business.TypeCodes.CCC_COMP.value: 'CO_PF', + Business.TypeCodes.BCOMP_CONTINUE_IN.value: 'CO_PF', + Business.TypeCodes.CONTINUE_IN.value: 'CO_PF', + Business.TypeCodes.ULC_CONTINUE_IN.value: 'CO_PF', + Business.TypeCodes.CCC_CONTINUE_IN.value: 'CO_PF', } } @@ -637,7 +648,7 @@ def _insert_filing(cls, cursor, filing, # pylint: disable=too-many-statements, 'CONTB', 'CONTI', 'CONTU', 'CONTC', 'NOABE', 'NOALE', 'NOALR', 'NOALD', 'NOALA', 'NOALB', 'NOALU', 'NOALC', - 'CONTO', 'COUTI', 'CO_PO', + 'CONTO', 'COUTI', 'CO_PO', 'CO_PF', 'AGMDT', 'AGMLC', 'RESTF', 'RESTL', 'RESXL', 'RESXF', 'REGSN', 'REGSO', 'COURT']: @@ -1232,8 +1243,8 @@ def add_filing(cls, con, filing: Filing) -> int: if filing.filing_type not in ['agmExtension', 'agmLocationChange', 'alteration', 'amalgamationApplication', 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationIn', - 'continuationOut', 'courtOrder', - 'dissolution', 'incorporationApplication', 'putBackOn', 'registrarsNotation', + 'continuationOut', 'courtOrder', 'dissolution', 'incorporationApplication', + 'putBackOn', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'restoration', 'specialResolution', 'transition']: raise InvalidFilingTypeException(filing_type=filing.filing_type) @@ -1273,6 +1284,8 @@ def add_filing(cls, con, filing: Filing) -> int: cls._process_restoration(cursor, filing) elif filing.filing_type == 'putBackOn': cls._process_put_back_on(cursor, filing) + elif filing.filing_type == 'putBackOff': + cls._process_put_back_off(cursor, filing) elif filing.filing_type == 'alteration': # alter corp type if ( @@ -1483,6 +1496,14 @@ def _process_put_back_on(cls, cursor, filing): corp_state = Business.CorpStateTypes.ACTIVE.value # Active for Put Back On Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + @classmethod + def _process_put_back_off(cls, cursor, filing): + """Process Put Back Off.""" + corp_num = filing.get_corp_num() + + corp_state = Business.CorpStateTypes.INVOLUNTARY_DISSOLUTION_NO_AR.value + Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + @classmethod def _process_continuation_out(cls, cursor, filing): """Process continuation out.""" diff --git a/colin-api/src/colin_api/resources/filing.py b/colin-api/src/colin_api/resources/filing.py index 0a83b3a64e..ea46694193 100644 --- a/colin-api/src/colin_api/resources/filing.py +++ b/colin-api/src/colin_api/resources/filing.py @@ -137,6 +137,7 @@ def post(legal_type, identifier, **kwargs): 'courtOrder': json_data.get('courtOrder', None), 'dissolution': json_data.get('dissolution', None), 'incorporationApplication': json_data.get('incorporationApplication', None), + 'putBackOff': json_data.get('putBackOff', None), 'putBackOn': json_data.get('putBackOn', None), 'registrarsNotation': json_data.get('registrarsNotation', None), 'registrarsOrder': json_data.get('registrarsOrder', None), From 3374b5fe36a31ab0352eda19a75415dd042fdcab Mon Sep 17 00:00:00 2001 From: Aimee Date: Fri, 3 Jan 2025 08:46:24 -0800 Subject: [PATCH 013/133] 25040 - Filer put back off (#3159) * Update putBackOff processor * Update unit test * Fix failed unit tests --- queue_services/entity-filer/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 2 +- .../filing_components/business_profile.py | 2 +- .../filing_processors/put_back_off.py | 45 ++++++++++++++++ .../entity-filer/src/entity_filer/worker.py | 5 ++ .../filing_processors/test_annual_report.py | 9 ++-- .../filing_processors/test_put_back_off.py | 52 +++++++++++++++++++ 7 files changed, 110 insertions(+), 7 deletions(-) create mode 100644 queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py create mode 100644 queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py diff --git a/queue_services/entity-filer/requirements.txt b/queue_services/entity-filer/requirements.txt index a7783e6cbd..98298d1a47 100755 --- a/queue_services/entity-filer/requirements.txt +++ b/queue_services/entity-filer/requirements.txt @@ -24,7 +24,7 @@ minio==7.0.2 PyPDF2==1.26.0 reportlab==3.6.12 git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-filer/requirements/bcregistry-libraries.txt b/queue_services/entity-filer/requirements/bcregistry-libraries.txt index bcb1f1ba4a..d9a337a3fd 100644 --- a/queue_services/entity-filer/requirements/bcregistry-libraries.txt +++ b/queue_services/entity-filer/requirements/bcregistry-libraries.txt @@ -1,5 +1,5 @@ git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py index f56910cf7d..c55c668f74 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py @@ -149,7 +149,7 @@ def update_affiliation(business: Business, filing: Filing): def update_entity(business: Business, filing_type: str): """Update an entity in auth with the latest change.""" state = None - if filing_type in ['dissolution', 'putBackOn', 'restoration']: + if filing_type in ['dissolution', 'putBackOn', 'putBackOff', 'restoration']: state = business.state.name # state changed to HISTORICAL/ACTIVE AccountService.update_entity( diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py new file mode 100644 index 0000000000..76db9f809c --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py @@ -0,0 +1,45 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""File processing rules and actions for the put back off filing.""" + +from contextlib import suppress +from typing import Dict + +import dpath +from entity_queue_common.service_utils import QueueException, logger +from legal_api.models import Business, Filing + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components import filings + + +def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): + """Render the put back off filing unto the model objects.""" + if not (put_back_off_filing := filing.get('putBackOff')): + logger.error('Could not find putBackOff in: %s', filing) + raise QueueException(f'legal_filing:putBackOff missing from {filing}') + + logger.debug('processing putBackOff: %s', filing) + + # update court order, if any is present + with suppress(IndexError, KeyError, TypeError): + court_order_json = dpath.util.get(put_back_off_filing, '/courtOrder') + filings.update_filing_court_order(filing_rec, court_order_json) + + filing_rec.order_details = put_back_off_filing.get('details') + + # change business state to historical + business.state = Business.State.HISTORICAL + business.state_filing_id = filing_rec.id + business.restoration_expiry_date = None diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 2dbc8aca92..1de4fb1c26 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -66,6 +66,7 @@ court_order, dissolution, incorporation_filing, + put_back_off, put_back_on, registrars_notation, registrars_order, @@ -296,6 +297,9 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable elif filing.get('changeOfRegistration'): change_of_registration.process(business, filing_submission, filing, filing_meta) + elif filing.get('putBackOff'): + put_back_off.process(business, filing, filing_submission, filing_meta) + elif filing.get('putBackOn'): put_back_on.process(business, filing, filing_submission, filing_meta) @@ -376,6 +380,7 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable FilingCore.FilingTypes.CHANGEOFREGISTRATION, FilingCore.FilingTypes.CORRECTION, FilingCore.FilingTypes.DISSOLUTION, + FilingCore.FilingTypes.PUTBACKOFF, FilingCore.FilingTypes.PUTBACKON, FilingCore.FilingTypes.RESTORATION ]: diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py b/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py index 34fd19bccf..4250e0e0a7 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py @@ -18,6 +18,7 @@ import random from unittest.mock import patch +from dateutil.relativedelta import relativedelta from freezegun import freeze_time from legal_api.models import BatchProcessing, Business, Filing from registry_schemas.example_data import ANNUAL_REPORT @@ -60,11 +61,11 @@ def test_process_ar_filing_involuntary_dissolution(app, session, test_name, flag now = datetime.datetime.utcnow() if eligibility: # setup ar_date to """INTERVAL '26 MONTHS'"" to make the businees is eligibility - ar_date = datetime.date(year=now.year-4, month=now.month-1, day=now.day) - agm_date = datetime.date(year=now.year-4, month=now.month-2, day=now.day) + ar_date = (now - relativedelta(years=4, months=1)).date() + agm_date = (now - relativedelta(years=4, months=2)).date() else: - ar_date = datetime.date(year=now.year, month=now.month-1, day=now.day) - agm_date = datetime.date(year=now.year, month=now.month-2, day=now.day) + ar_date = (now - relativedelta(months=1)).date() + agm_date = (now - relativedelta(months=2)).date() ar = copy.deepcopy(ANNUAL_REPORT) ar['filing']['business']['identifier'] = identifier diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py new file mode 100644 index 0000000000..a6a7c6066d --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py @@ -0,0 +1,52 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Put Back Off filing.""" +import copy +import random + +from legal_api.models import Business, Filing +from registry_schemas.example_data import FILING_HEADER, PUT_BACK_OFF + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import put_back_off +from tests.unit import create_business, create_filing + + +def test_worker_put_back_off(session): + """Assert that the put back off filing processes correctly.""" + # Setup + identifier = 'BC1234567' + business = create_business(identifier, legal_type='BC') + + # Create filing + filing_json = copy.deepcopy(FILING_HEADER) + filing_json['filing']['business']['identifier'] = identifier + filing_json['filing']['putBackOff'] = copy.deepcopy(PUT_BACK_OFF) + + payment_id = str(random.SystemRandom().getrandbits(0x58)) + filing = create_filing(payment_id, filing_json, business_id=business.id) + + filing_meta = FilingMeta() + + # Test + put_back_off.process(business, filing_json['filing'], filing, filing_meta) + business.save() + + # Check results + final_filing = Filing.find_by_id(filing.id) + + assert business.state == Business.State.HISTORICAL + assert business.state_filing_id == filing.id + assert business.restoration_expiry_date is None + assert filing.order_details == final_filing.order_details From 17575f36397167c662586e71073cd73cc7c986c3 Mon Sep 17 00:00:00 2001 From: Rajandeep Kaur <144159721+Rajandeep98@users.noreply.github.com> Date: Fri, 3 Jan 2025 12:13:42 -0800 Subject: [PATCH 014/133] Correction Details Added on Register Correction Application pdf (#3158) * Correction Details Added on Register Correction Application pdf * updated line breaks nd staff * reverting version change --- legal-api/report-templates/correction.html | 5 ++++ .../template-parts/common/style.html | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/legal-api/report-templates/correction.html b/legal-api/report-templates/correction.html index 2f70471ee2..ab83882654 100644 --- a/legal-api/report-templates/correction.html +++ b/legal-api/report-templates/correction.html @@ -51,5 +51,10 @@ [[correction/rulesMemorandum.html]] [[correction/resolution.html]] +
+
Correction DetailADDED
+
Correction filed by Registry Staff on {{effective_date_time}}
+
{{correction.comment}}
+
diff --git a/legal-api/report-templates/template-parts/common/style.html b/legal-api/report-templates/template-parts/common/style.html index 26820ee0c6..92c3e013f3 100644 --- a/legal-api/report-templates/template-parts/common/style.html +++ b/legal-api/report-templates/template-parts/common/style.html @@ -301,6 +301,20 @@ font-family: 'BCSans-Bold', sans-serif !important; text-align: center } + + .details-header-text { + font-size: 14px; + color: #234075; + font-family: 'BCSans-Bold', sans-serif !important; + text-align: left; + margin-right: 1rem; + } + + .preserve-line-breaks { + white-space: pre-wrap; + word-wrap: break-word; + line-height: 1.5rem; + } .doc-description, .registrar-title { @@ -577,4 +591,15 @@ margin: 0 0.25rem; text-align: center; } + .correction-label-added { + font-family: 'BCSans-Bold', sans-serif !important; + color: #313132; + background: #E2E7E7; + font-size: 8px; + padding: 4px 7px; + margin: 0 0.25rem; + text-align: center; + border-radius: 0.25rem; + } + From a5270e9ced60db1a8b256c92d4d4f0333237e36b Mon Sep 17 00:00:00 2001 From: flutistar Date: Sun, 5 Jan 2025 09:06:40 -0800 Subject: [PATCH 015/133] added drs download function --- .../src/legal_api/resources/v2/document.py | 10 +++++++- .../src/legal_api/services/document_record.py | 23 +++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index 96ff6cda82..ea21ccd9ef 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -93,4 +93,12 @@ def upload_document(document_class: str, document_type: str): def delete_document(document_service_id: str): """Delete document file from Document Record Service.""" - return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK \ No newline at end of file + return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK + +@bp.route('/drs//', methods=['GET']) +@cross_origin(origins='*') +@jwt.requires_auth +def get_document(document_class: str, document_service_id: str): + """Get document file from Document Record Service.""" + + return DocumentRecordService.get_document(document_class, document_service_id), HTTPStatus.OK \ No newline at end of file diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 7c3872b694..9f6e33c86b 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -36,7 +36,7 @@ def upload_document(document_class: str, document_type: str) -> dict: return {'data': 'File not provided'} current_app.logger.debug(f'Upload file to document record service {file.filename}') DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}documents/{document_class}/{document_type}' + url = f'{DRS_BASE_URL}/documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. validation_error = DocumentRecordService.validate_pdf(file, request.content_length) @@ -81,7 +81,7 @@ def upload_document(document_class: str, document_type: str) -> dict: def delete_document(document_service_id: str) -> dict: """Delete document from Document Record Service.""" DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}documents/{document_service_id}' + url = f'{DRS_BASE_URL}/documents/{document_service_id}' try: response = requests.patch( @@ -97,6 +97,25 @@ def delete_document(document_service_id: str) -> dict: current_app.logger.debug(f'Error on deleting document {e}') return {} + @staticmethod + def get_document(document_class: str, document_service_id: str) -> dict: + + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}/searches/{document_class}?documentServiceId={document_service_id}' + try: + response = requests.get( + url, + headers={ + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Get document from document record service {response}') + return response[0] + except Exception as e: + current_app.logger.debug(f'Error on downloading document {e}') + return {} + @staticmethod def validate_pdf(file, content_length) -> Optional[list]: """Validate the PDF file.""" From b9d16caa433a5e796331c885c62cbdfb52047edd Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Tue, 7 Jan 2025 15:25:49 -0800 Subject: [PATCH 016/133] 25084 Sync local correction to COLIN (#3164) --- colin-api/src/colin_api/models/filing.py | 32 ++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index f681aa3663..284d113cd8 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -174,7 +174,8 @@ class FilingSource(Enum): 'CORPS_NAME': 'CO_BC', # company name/translated name 'CORPS_DIRECTOR': 'CO_DI', 'CORPS_OFFICE': 'CO_RR', # registered and record offices - 'CORPS_SHARE': 'CO_SS' + 'CORPS_SHARE': 'CO_SS', + 'CORPS_COMMENT_ONLY': 'CO_LI' # Called local correction (adding a comment only) }, 'specialResolution': { 'type_code_list': ['OTSPE'], @@ -355,6 +356,7 @@ class FilingSource(Enum): }, 'putBackOn': { 'type_code_list': ['CO_PO'], + Business.TypeCodes.COOP.value: 'CO_PO', Business.TypeCodes.BCOMP.value: 'CO_PO', Business.TypeCodes.BC_COMP.value: 'CO_PO', Business.TypeCodes.ULC_COMP.value: 'CO_PO', @@ -366,6 +368,7 @@ class FilingSource(Enum): }, 'putBackOff': { 'type_code_list': ['CO_PF'], + Business.TypeCodes.COOP.value: 'CO_PF', Business.TypeCodes.BCOMP.value: 'CO_PF', Business.TypeCodes.BC_COMP.value: 'CO_PF', Business.TypeCodes.ULC_COMP.value: 'CO_PF', @@ -640,7 +643,7 @@ def _insert_filing(cls, cursor, filing, # pylint: disable=too-many-statements, filing_date=filing.filing_date[:10] ) elif filing_type_code in ['NOCAD', 'TRANS', - 'CO_BC', 'CO_DI', 'CO_RR', 'CO_SS', + 'CO_BC', 'CO_DI', 'CO_RR', 'CO_SS', 'CO_LI', 'BEINC', 'ICORP', 'ICORU', 'ICORC', 'AMLRB', 'AMALR', 'AMLRU', 'AMLRC', 'AMLHB', 'AMALH', 'AMLHU', 'AMLHC', @@ -1905,6 +1908,19 @@ def _process_share_correction(cls, cursor, filing: Filing, corp_num: str, filing return filing.event_id + @classmethod + def _process_comment_correction(cls, cursor, filing: Filing, corp_num: str, filing_type_code: str): + """Process comment correction.""" + # create new event record, return event ID + filing.event_id = cls._get_event_id(cursor=cursor, corp_num=corp_num, filing_dt=filing.filing_date) + cls._insert_filing_user(cursor=cursor, filing=filing) + cls._insert_filing(cursor=cursor, filing=filing, filing_type_code=filing_type_code) + + ledger_text = filing.body.get('comment', '') + cls._insert_ledger_text(cursor, filing, ledger_text) + + return filing.event_id + @classmethod def add_correction_filings(cls, con, filing: Filing) -> list: """Create correction filings.""" @@ -1958,11 +1974,13 @@ def add_correction_filings(cls, con, filing: Filing) -> list: 'filing_type': filing.filing_type, 'filing_sub_type': None}) - if not filings_added: # if no filing created - raise GenericException( # pylint: disable=broad-exception-raised - f'No filing created for this correction identifier:{corp_num}.', - HTTPStatus.NOT_IMPLEMENTED - ) + if not filings_added: # only comment added + filing_type_code = Filing.FILING_TYPES[filing.filing_type][f'{sub_type}_COMMENT_ONLY'] + event_id = cls._process_comment_correction(cursor, filing, corp_num, filing_type_code) + + filings_added.append({'event_id': event_id, + 'filing_type': filing.filing_type, + 'filing_sub_type': None}) return filings_added From dbe4c28080e477a9e2f2a6e29008fafed837cb1e Mon Sep 17 00:00:00 2001 From: meawong Date: Wed, 8 Jan 2025 11:37:57 -0800 Subject: [PATCH 017/133] 24818 Link NoW and Withdrawn Filing (#3162) * 24818-Add script to update filing table with withdrawn id and withdrawal pending property * 24818-Add-logic-to-set-NoW-properties * 24818-Update-link-function-name * 24818-Move-NoW-check * 24818-Convert-withdrawn_filining_id-to-foreign-key * 24818-Update-withdrawal-pending-script Co-authored-by: Vysakh Menon * 24818-Update-logic-to-reflect-draft-NoW-and-deleting-NoW * 24818-Update unit test to check for new NoW filing properties * 24818-Remove-unused-setup-var --------- Co-authored-by: Vysakh Menon --- ...bbf4_add_now_properties_to_filing_table.py | 27 ++++++++++++++++ legal-api/src/legal_api/models/filing.py | 16 ++++++++-- .../business_filings/business_filings.py | 31 +++++++++++++++++-- .../v2/test_business_filings/test_filings.py | 22 +++++++++++++ 4 files changed, 92 insertions(+), 4 deletions(-) create mode 100644 legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py diff --git a/legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py b/legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py new file mode 100644 index 0000000000..80cd0a883e --- /dev/null +++ b/legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py @@ -0,0 +1,27 @@ +"""add_NoW_properties_to_filing_table + +Revision ID: d9254d3cbbf4 +Revises: f99e7bda56bb +Create Date: 2025-01-02 16:52:38.449590 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'd9254d3cbbf4' +down_revision = 'f99e7bda56bb' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column('filings', sa.Column('withdrawn_filing_id', sa.Integer(), nullable=True)) + op.create_foreign_key('filings_withdrawn_filing_id_fkey', 'filings', 'filings', ['withdrawn_filing_id'], ['id']) + op.add_column('filings', sa.Column('withdrawal_pending', sa.Boolean(), nullable=False, server_default='False')) + +def downgrade(): + op.drop_constraint('filings_withdrawn_filing_id_fkey', 'filings', type_='foreignkey') + op.drop_column('filings', 'withdrawn_filing_id') + op.drop_column('filings', 'withdrawal_pending') diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 73f33c0557..fd6c2a8c56 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -506,7 +506,9 @@ class Source(Enum): 'transaction_id', 'approval_type', 'application_date', - 'notice_date' + 'notice_date', + 'withdrawal_pending', + 'withdrawn_filing_id' ] } @@ -538,6 +540,7 @@ class Source(Enum): notice_date = db.Column('notice_date', db.DateTime(timezone=True)) resubmission_date = db.Column('resubmission_date', db.DateTime(timezone=True)) hide_in_ledger = db.Column('hide_in_ledger', db.Boolean, unique=False, default=False) + withdrawal_pending = db.Column('withdrawal_pending', db.Boolean, unique=False, default=False) # # relationships transaction_id = db.Column('transaction_id', db.BigInteger, @@ -561,7 +564,16 @@ class Source(Enum): review = db.relationship('Review', lazy='dynamic') parent_filing_id = db.Column(db.Integer, db.ForeignKey('filings.id')) - parent_filing = db.relationship('Filing', remote_side=[id], backref=backref('children')) + parent_filing = db.relationship('Filing', + remote_side=[id], + backref=backref('children', uselist=True), + foreign_keys=[parent_filing_id]) + + withdrawn_filing_id = db.Column('withdrawn_filing_id', db.Integer, + db.ForeignKey('filings.id')) + withdrawn_filing = db.relationship('Filing', + remote_side=[id], + foreign_keys=[withdrawn_filing_id]) # properties @property diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index 9169dd87df..fd43ec46a2 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -198,6 +198,9 @@ def delete_filings(identifier, filing_id=None): if err_code: return jsonify({'message': _(err_message)}), err_code + if filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: + ListFilingResource.unlink_now_and_withdrawn_filing(filing) + filing_type = filing.filing_type filing_json = filing.filing_json filing.delete() @@ -627,6 +630,9 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s if filing.filing_json['filing']['header'].get('effectiveDate', None) else datetime.datetime.utcnow() filing.hide_in_ledger = ListFilingResource._hide_in_ledger(filing) + + if filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: + ListFilingResource.link_now_and_withdrawn_filing(filing) filing.save() except BusinessException as err: return None, None, {'error': err.error}, err.status_code @@ -807,6 +813,28 @@ def get_filing_types_for_dissolution(filing_json: dict, legal_type: str, priorit }) return filing_types + @staticmethod + def get_withdrawn_filing(filing: Filing) -> Filing: + """Get withdrawn filing from NoW filing ID.""" + withdrawn_filing_id = filing.filing_json['filing']['noticeOfWithdrawal']['filingId'] + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + return withdrawn_filing + + @staticmethod + def link_now_and_withdrawn_filing(filing: Filing): + """Add withdrawn filing ID to the NoW and set the withdrawal pending flag to True on the withdrawn filing.""" + withdrawn_filing = ListFilingResource.get_withdrawn_filing(filing) + withdrawn_filing.withdrawal_pending = True + withdrawn_filing.save() + filing.withdrawn_filing_id = withdrawn_filing.id + + @staticmethod + def unlink_now_and_withdrawn_filing(filing: Filing): + """Set the withdrawal pending flag to False when a NoW is deleted.""" + withdrawn_filing = ListFilingResource.get_withdrawn_filing(filing) + withdrawn_filing.withdrawal_pending = False + withdrawn_filing.save() + @staticmethod def create_invoice(business: Business, # pylint: disable=too-many-locals,too-many-branches,too-many-statements filing: Filing, @@ -1077,8 +1105,7 @@ def submit_filing_for_review(filing: Filing): def _get_address_from_withdrawn_new_business_filing(business: Business, filing: Filing): if filing.filing_type != CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL.value: return None, None, None - withdrawn_filing_id = filing.filing_json['filing']['noticeOfWithdrawal']['filingId'] - withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + withdrawn_filing = ListFilingResource.get_withdrawn_filing(filing) if withdrawn_filing.filing_type in CoreFiling.NEW_BUSINESS_FILING_TYPES: office_type = OfficeType.REGISTERED if withdrawn_filing.filing_type == Filing.FILINGS['registration']['name']: diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index f8459a2376..9e82abe5e2 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -1680,3 +1680,25 @@ def test_notice_of_withdraw_filing(session, client, jwt, test_name, legal_type, # validate assert rv_draft.status_code == HTTPStatus.CREATED assert rv_draft.json['filing']['header']['name'] == 'noticeOfWithdrawal' + + # setup + withdrawn_filing = {} + identifier = '' + + # validate NoW flags set on withdrawn filing + if is_temp: + withdrawn_filing = new_business_filing + identifier = 'Tb31yQIuBw' + else: + withdrawn_filing = fe_filing + identifier = 'BC1234567' + + withdrawn_filing_id = withdrawn_filing.withdrawn_filing_id + withdrawal_pending = withdrawn_filing.withdrawal_pending + assert withdrawn_filing_id is None + assert withdrawal_pending == True + + # validate NoW flags set on NoW + now_filing = (Filing.find_by_id(rv_draft.json['filing']['header']['filingId'])) + assert now_filing.withdrawn_filing_id == withdrawn_filing.id + assert now_filing.withdrawal_pending == False From 9f33aad24c6e6067cafdd3eaf6254aeabdc81f91 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Wed, 8 Jan 2025 12:19:16 -0800 Subject: [PATCH 018/133] 24195 tweaks on filing queries (#3166) --- legal-api/src/legal_api/core/filing.py | 2 +- legal-api/src/legal_api/models/business.py | 1 - legal-api/src/legal_api/models/filing.py | 43 +++++++++---------- .../resources/v2/business/business.py | 1 - .../filings/validations/restoration.py | 6 +-- .../business/business_checks/firms.py | 6 +-- legal-api/tests/unit/models/test_filing.py | 4 +- .../filings/validations/test_restoration.py | 12 +++--- 8 files changed, 34 insertions(+), 41 deletions(-) diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index f0e42d81ce..865ec1128d 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -309,7 +309,7 @@ def get_filings_by_status(business_id: int, status: list, after_date: date = Non @staticmethod def get_most_recent_filing_json(business_id: str, filing_type: str = None, jwt: JwtManager = None): """Return the most recent filing json.""" - if storage := FilingStorage.get_most_recent_legal_filing(business_id, filing_type): + if storage := FilingStorage.get_most_recent_filing(business_id, filing_type): submitter_displayname = REDACTED_STAFF_SUBMITTER if (submitter := storage.filing_submitter) \ and submitter.username and jwt \ diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 005ed8c204..b182eacba2 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -511,7 +511,6 @@ def json(self, slim=False): 'lastLedgerTimestamp': self.last_ledger_timestamp.isoformat(), 'lastAddressChangeDate': '', 'lastDirectorChangeDate': '', - 'lastModified': self.last_modified.isoformat(), 'naicsKey': self.naics_key, 'naicsCode': self.naics_code, 'naicsDescription': self.naics_description, diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index fd6c2a8c56..2f4f8d7ed0 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -863,7 +863,7 @@ def get_temp_reg_filing(temp_reg_id: str, filing_id: str = None): q = db.session.query(Filing).filter(Filing.temp_reg == temp_reg_id) if filing_id: - q.filter(Filing.id == filing_id) + q = q.filter(Filing.id == filing_id) filing = q.one_or_none() return filing @@ -908,7 +908,7 @@ def get_filings_by_types(business_id: int, filing_types): filter(Filing.business_id == business_id). \ filter(Filing._filing_type.in_(filing_types)). \ filter(Filing._status == Filing.Status.COMPLETED.value). \ - order_by(desc(Filing.effective_date)). \ + order_by(desc(Filing.transaction_id)). \ all() return filings @@ -968,23 +968,21 @@ def get_filings_by_type_pairs(business_id: int, filing_type_pairs: list, status: return filings @staticmethod - def get_a_businesses_most_recent_filing_of_a_type(business_id: int, filing_type: str, filing_sub_type: str = None): - """Return the filings of a particular type.""" - max_filing = db.session.query(db.func.max(Filing._filing_date).label('last_filing_date')).\ - filter(Filing._filing_type == filing_type). \ - filter(Filing.business_id == business_id) - if filing_sub_type: - max_filing = max_filing.filter(Filing._filing_sub_type == filing_sub_type) - max_filing = max_filing.subquery() + def get_most_recent_filing(business_id: str, filing_type: str = None, filing_sub_type: str = None): + """Return the most recent filing. - filing = Filing.query.join(max_filing, Filing._filing_date == max_filing.c.last_filing_date). \ + filing_type is required, if filing_sub_type is provided, it will be used to filter the query. + """ + query = db.session.query(Filing). \ filter(Filing.business_id == business_id). \ - filter(Filing._filing_type == filing_type). \ filter(Filing._status == Filing.Status.COMPLETED.value) - if filing_sub_type: - filing = filing.filter(Filing._filing_sub_type == filing_sub_type) + if filing_type: + query = query.filter(Filing._filing_type == filing_type) + if filing_sub_type: + query = query.filter(Filing._filing_sub_type == filing_sub_type) - return filing.one_or_none() + query = query.order_by(Filing.transaction_id.desc()) + return query.first() @staticmethod def get_most_recent_legal_filing(business_id: str, filing_type: str = None): @@ -1049,15 +1047,14 @@ def get_all_filings_by_status(status): @staticmethod def get_previous_completed_filing(filing): """Return the previous completed filing.""" - filings = db.session.query(Filing). \ + query = db.session.query(Filing). \ filter(Filing.business_id == filing.business_id). \ - filter(Filing._status == Filing.Status.COMPLETED.value). \ - filter(Filing.id < filing.id). \ - filter(Filing.effective_date < filing.effective_date). \ - order_by(Filing.effective_date.desc()).all() - if filings: - return filings[0] - return None + filter(Filing._status == Filing.Status.COMPLETED.value) + + if filing.transaction_id: # transaction_id will be None for the pending filings (intermediate state) + query = query.filter(Filing.transaction_id < filing.transaction_id) + + return query.order_by(Filing.transaction_id.desc()).first() @staticmethod def has_completed_filing(business_id: int, filing_type: str) -> bool: diff --git a/legal-api/src/legal_api/resources/v2/business/business.py b/legal-api/src/legal_api/resources/v2/business/business.py index d6cfc10c42..95d2dfa310 100644 --- a/legal-api/src/legal_api/resources/v2/business/business.py +++ b/legal-api/src/legal_api/resources/v2/business/business.py @@ -80,7 +80,6 @@ def get_businesses(identifier: str): recent_filing_json = CoreFiling.get_most_recent_filing_json(business.id, None, jwt) if recent_filing_json: business_json['submitter'] = recent_filing_json['filing']['header']['submitter'] - business_json['lastModified'] = recent_filing_json['filing']['header']['date'] allowed_filings = str(request.args.get('allowed_filings', None)).lower() == 'true' if allowed_filings: diff --git a/legal-api/src/legal_api/services/filings/validations/restoration.py b/legal-api/src/legal_api/services/filings/validations/restoration.py index fb6528047d..f6531f3b24 100644 --- a/legal-api/src/legal_api/services/filings/validations/restoration.py +++ b/legal-api/src/legal_api/services/filings/validations/restoration.py @@ -39,9 +39,9 @@ def validate(business: Business, restoration: Dict) -> Optional[Error]: restoration_type = get_str(restoration, '/filing/restoration/type') limited_restoration = None if restoration_type in ('limitedRestorationExtension', 'limitedRestorationToFull'): - limited_restoration = Filing.get_a_businesses_most_recent_filing_of_a_type(business.id, - 'restoration', - 'limitedRestoration') + limited_restoration = Filing.get_most_recent_filing(business.id, + 'restoration', + 'limitedRestoration') if restoration_type in ('limitedRestoration', 'limitedRestorationExtension'): msg.extend(validate_expiry_date(business, restoration, restoration_type)) elif restoration_type in ('fullRestoration', 'limitedRestorationToFull'): diff --git a/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py b/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py index ad2cb54a6a..2804fedaa5 100644 --- a/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py +++ b/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py @@ -82,12 +82,10 @@ def check_parties(legal_type: str, business: Business) -> list: firm_party_roles = business.party_roles.filter(PartyRole.cessation_date.is_(None)) result.extend(check_firm_parties(legal_type, firm_party_roles)) - completing_party_filing = Filing \ - .get_most_recent_legal_filing(business.id, 'conversion') + completing_party_filing = Filing.get_most_recent_filing(business.id, 'conversion') if not completing_party_filing: - completing_party_filing = Filing \ - .get_most_recent_legal_filing(business.id, 'registration') + completing_party_filing = Filing.get_most_recent_filing(business.id, 'registration') result.extend(check_completing_party_for_filing(completing_party_filing)) return result diff --git a/legal-api/tests/unit/models/test_filing.py b/legal-api/tests/unit/models/test_filing.py index c5f5ee6c4e..d90f30a6d7 100644 --- a/legal-api/tests/unit/models/test_filing.py +++ b/legal-api/tests/unit/models/test_filing.py @@ -560,7 +560,7 @@ def test_get_completed_filings_for_colin(session, client, jwt): assert len(filings) == 0 -def test_get_a_businesses_most_recent_filing_of_a_type(session): +def test_get_most_recent_filing(session): """Assert that the most recent completed filing of a specified type is returned.""" from legal_api.models import Filing from tests.unit.models import factory_completed_filing @@ -577,7 +577,7 @@ def test_get_a_businesses_most_recent_filing_of_a_type(session): filing = factory_completed_filing(b, ar, filing_date) filings.append(filing) # test - filing = Filing.get_a_businesses_most_recent_filing_of_a_type(b.id, Filing.FILINGS['annualReport']['name']) + filing = Filing.get_most_recent_filing(b.id, Filing.FILINGS['annualReport']['name']) # assert that we get the last filing assert filings[4] == filing diff --git a/legal-api/tests/unit/services/filings/validations/test_restoration.py b/legal-api/tests/unit/services/filings/validations/test_restoration.py index 765b22dde5..b021adc2f7 100644 --- a/legal-api/tests/unit/services/filings/validations/test_restoration.py +++ b/legal-api/tests/unit/services/filings/validations/test_restoration.py @@ -93,7 +93,7 @@ def execute_test_restoration_nr(mocker, filing_sub_type, legal_type, nr_number, mock_nr_response = MockResponse(temp_nr_response, HTTPStatus.OK) mocker.patch('legal_api.services.NameXService.query_nr_number', return_value=mock_nr_response) - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -180,7 +180,7 @@ def test_validate_relationship(session, test_status, restoration_type, expected_ elif test_status == 'SUCCESS' and restoration_type in ('fullRestoration', 'limitedRestorationToFull'): filing['filing']['restoration']['relationships'] = relationships - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -231,7 +231,7 @@ def test_validate_expiry_date(session, test_name, restoration_type, delta_date, filing['filing']['restoration']['type'] = restoration_type if delta_date: filing['filing']['restoration']['expiry'] = expiry_date.strftime(date_format) - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -281,7 +281,7 @@ def test_approval_type(session, test_status, restoration_types, legal_types, app filing['filing']['restoration']['applicationDate'] = '2023-03-30' filing['filing']['restoration']['noticeDate'] = '2023-03-30' - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -337,7 +337,7 @@ def test_restoration_court_orders(session, test_status, restoration_types, legal else: del filing['filing']['restoration']['courtOrder'] - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -394,7 +394,7 @@ def test_restoration_registrar(session, test_status, restoration_types, legal_ty if notice_date: filing['filing']['restoration']['noticeDate'] = notice_date - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) From 9ca5ad08848b55f63de46ec30d79cf0c22461106 Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Thu, 9 Jan 2025 13:23:40 -0800 Subject: [PATCH 019/133] 24839 - script to add corrections and related changes (#3168) * 24839 - script to add corrections and related changes * 24839 - remove unused file * 24839 - remove unused data * 24839 - updated queries * 24839 - remove commented out code * 24629 - fix linting issue * 24629 - remove uneeded code * 24629 - add correction check * 24629 - fix correction format share class logic --- .../add_corrections.ipynb | 159 ++++++++++++++++++ .../add_registrars_notation.ipynb | 13 +- .../convert_corrections_data.py | 31 ++++ .../convert_registrar_notation_data.py | 32 ++++ .../corrections_output.py | 5 + .../corrections_results.csv | 4 + jobs/correction-ben-statement/data.py | 7 - jobs/correction-ben-statement/queries.sql | 17 ++ .../registrar_notation_result.csv | 4 + jobs/correction-ben-statement/rn_output.py | 5 + legal-api/src/legal_api/reports/report.py | 19 ++- .../filing_components/correction.py | 2 + 12 files changed, 276 insertions(+), 22 deletions(-) create mode 100644 jobs/correction-ben-statement/add_corrections.ipynb create mode 100644 jobs/correction-ben-statement/convert_corrections_data.py create mode 100644 jobs/correction-ben-statement/convert_registrar_notation_data.py create mode 100644 jobs/correction-ben-statement/corrections_output.py create mode 100644 jobs/correction-ben-statement/corrections_results.csv delete mode 100644 jobs/correction-ben-statement/data.py create mode 100644 jobs/correction-ben-statement/queries.sql create mode 100644 jobs/correction-ben-statement/registrar_notation_result.csv create mode 100644 jobs/correction-ben-statement/rn_output.py diff --git a/jobs/correction-ben-statement/add_corrections.ipynb b/jobs/correction-ben-statement/add_corrections.ipynb new file mode 100644 index 0000000000..c64f271382 --- /dev/null +++ b/jobs/correction-ben-statement/add_corrections.ipynb @@ -0,0 +1,159 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Add Correction filing for All active existing companies" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " Purpose: Add Corrections filing for all active existing BENs.\n", + "\n", + "This is a one time (python) script to be run at a given date/time.
\n", + "Set the configuration (client_id, client_secret, url(s)) for a scpecific environment.
\n", + "Get access token for authorization.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Access token returned successfully\n" + ] + } + ], + "source": [ + "import requests\n", + "import os\n", + "from datetime import datetime\n", + "\n", + "# token_url, client_id, client_secret, base_url - update based on environment\n", + "token_url = os.getenv('ACCOUNT_SVC_AUTH_URL')\n", + "client_id = os.getenv('ACCOUNT_SVC_CLIENT_ID')\n", + "client_secret = os.getenv('ACCOUNT_SVC_CLIENT_SECRET')\n", + "base_url = os.getenv('LEGAL_API_BASE_URL')\n", + "\n", + "header = {\n", + " \"Content-Type\": \"application/x-www-form-urlencoded\"\n", + "}\n", + "\n", + "data = 'grant_type=client_credentials'\n", + "\n", + "res = requests.post(token_url, data, auth=(client_id, client_secret), headers=header)\n", + "\n", + "# Check the status code of the response\n", + "if res.status_code == 200:\n", + " print(\"Access token returned successfully\")\n", + " token = res.json()[\"access_token\"]\n", + "else:\n", + " print(f\"Failed to make POST request. Status code: {res.status_code}\")\n", + " print(res.text) # Print the error message if the request fails\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Call API (POST) endpoint to createCorrection filing with details as Ben correction statement for businesses." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Correction created successfully for BC0871147\n", + "Correction created successfully for BC0871183\n", + "Correction created successfully for BC0871186\n" + ] + } + ], + "source": [ + "from urllib.parse import urljoin\n", + "from corrections_output import correction_businesses\n", + "\n", + "current_date = datetime.now().date().isoformat()\n", + "headers = {\n", + " 'Content-Type': 'application/json',\n", + " 'Authorization': 'Bearer ' + token\n", + "}\n", + "\n", + "# loop through list of businesses to create filing\n", + "for correction_businesse in correction_businesses:\n", + " identifier = correction_businesse[0]\n", + " filind_id = correction_businesse[1]\n", + " correction_filing_data = {\n", + " \"filing\": {\n", + " \"header\": {\n", + " \"name\": \"correction\",\n", + " \"date\": current_date,\n", + " \"certifiedBy\": \"system\"\n", + " },\n", + " \"business\": {\n", + " \"identifier\": identifier,\n", + " \"legalType\": \"BC\"\n", + " },\n", + " \"correction\": {\n", + " \"details\": \"First correction\",\n", + " \"correctedFilingId\": filind_id,\n", + " \"correctedFilingType\": \"incorporationApplication\",\n", + " \"comment\": f\"\"\"Correction for Incorporation Application filed on {current_date} \\n\n", + " BC benefit company statement contained in notice of articles as required under section \n", + " 51.992 of the Business Corporations Act corrected from “This company is a benefit company \n", + " and, as such, has purposes that include conducting its business in a responsible and \n", + " sustainable manner and promoting one or more public benefits” to \n", + " “This company is a benefit company and, as such, is committed to conducting its business in \n", + " a responsible and sustainable manner and promoting one or more public benefits”.\"\"\"\n", + " }\n", + " }\n", + " }\n", + "\n", + " filing_url = urljoin(base_url, f\"/api/v2/businesses/{identifier}/filings\")\n", + " rv = requests.post(filing_url, headers=headers, json=correction_filing_data)\n", + "\n", + " # Check the status code of the response\n", + " if rv.status_code == 201:\n", + " print(f\"Correction created successfully for {identifier}\")\n", + " else:\n", + " print(f\"Failed to make POST request. Status code: {rv.status_code}\")\n", + " print(rv.text) # Print the error message if the request fails\n", + " \n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.17" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/jobs/correction-ben-statement/add_registrars_notation.ipynb b/jobs/correction-ben-statement/add_registrars_notation.ipynb index 17d0234a19..0197abf547 100644 --- a/jobs/correction-ben-statement/add_registrars_notation.ipynb +++ b/jobs/correction-ben-statement/add_registrars_notation.ipynb @@ -66,7 +66,7 @@ "outputs": [], "source": [ "from urllib.parse import urljoin\n", - "from data import ben_businesses\n", + "from rn_output import rn_businesses\n", "\n", "current_date = datetime.now().date().isoformat()\n", "headers = {\n", @@ -75,7 +75,7 @@ "}\n", "\n", "# loop through list of businesses to create filing\n", - "for ben in ben_businesses:\n", + "for business in rn_businesses:\n", " filing_data = {\n", " \"filing\": {\n", " \"header\": {\n", @@ -84,7 +84,7 @@ " \"certifiedBy\": \"system\"\n", " },\n", " \"business\": {\n", - " \"identifier\": ben,\n", + " \"identifier\": business,\n", " \"legalType\": \"BEN\"\n", " },\n", " \"registrarsNotation\": {\n", @@ -98,16 +98,15 @@ " }\n", " }\n", "\n", - " filing_url = urljoin(base_url, f\"/api/v2/businesses/{ben}/filings\")\n", + " filing_url = urljoin(base_url, f\"/api/v2/businesses/{business}/filings\")\n", " response = requests.post(filing_url, headers=headers, json=filing_data)\n", "\n", " # Check the status code of the response\n", " if response.status_code == 201:\n", - " print(f\"Registrars Notation cretaed successfully for {ben}\")\n", + " print(f\"Registrars Notation created successfully for {business}\")\n", " else:\n", " print(f\"Failed to make POST request. Status code: {response.status_code}\")\n", - " print(response.text) # Print the error message if the request fails\n", - " \n" + " print(response.text) # Print the error message if the request fails\n" ] } ], diff --git a/jobs/correction-ben-statement/convert_corrections_data.py b/jobs/correction-ben-statement/convert_corrections_data.py new file mode 100644 index 0000000000..c364c3afa0 --- /dev/null +++ b/jobs/correction-ben-statement/convert_corrections_data.py @@ -0,0 +1,31 @@ +import pandas as pd + +# Function to convert CSV to array of arrays using pandas +def convert_csv_to_array_of_arrays(csv_filename): + # Read the CSV file into a pandas DataFrame + df = pd.read_csv(csv_filename) + + # Convert the DataFrame to a list of lists (array of arrays) + rows_array = df.values.tolist() + + return rows_array + +# Write the array of arrays to a Python file +def write_array_to_python_file(array, output_filename): + with open(output_filename, 'w') as f: + f.write('correction_businesses = [\n') # Start the Python array + for row in rows_array: + f.write(f' {row},\n') # Write each row as a list + f.write(']\n') # End the Python array + +# Specify your input and output filenames +csv_filename = 'corrections_results.csv' +output_filename = 'corrections_output.py' + +# Convert CSV to array of arrays +rows_array = convert_csv_to_array_of_arrays(csv_filename) + +# Write the result to a Python file +write_array_to_python_file(rows_array, output_filename) + +print(f"Data has been written to {output_filename}") diff --git a/jobs/correction-ben-statement/convert_registrar_notation_data.py b/jobs/correction-ben-statement/convert_registrar_notation_data.py new file mode 100644 index 0000000000..8e821038a9 --- /dev/null +++ b/jobs/correction-ben-statement/convert_registrar_notation_data.py @@ -0,0 +1,32 @@ +import csv + +# Function to read CSV and convert to a Python array +def csv_to_python_array(file_path): + array = [] + + # Open the CSV file and read its contents + with open(file_path, mode='r', newline='') as file: + reader = csv.reader(file) + for row in reader: + array.extend(row) # Add each element from the row to the array + + return array + +# Function to write the Python array to a file, with each element on a new row +def write_to_python_file(array, output_file): + with open(output_file, 'w') as file: + file.write('rn_businesses = [\n') # Start the array in Python format + for element in array: + file.write(f" '{element}',\n") # Write each element in the array + file.write(']\n') # End the array in Python format + +input_csv = 'registrar_notation_result.csv' +output_python_file = 'rn_output.py' + +# Convert CSV to Python array +python_array = csv_to_python_array(input_csv) + +# Write the array to a Python file +write_to_python_file(python_array, output_python_file) + +print(f"Python array has been written to {output_python_file}") diff --git a/jobs/correction-ben-statement/corrections_output.py b/jobs/correction-ben-statement/corrections_output.py new file mode 100644 index 0000000000..18a26bcc4a --- /dev/null +++ b/jobs/correction-ben-statement/corrections_output.py @@ -0,0 +1,5 @@ +correction_businesses = [ + ['BC0871147', 131528], + ['BC0871183', 133390], + ['BC0871186', 139687], +] diff --git a/jobs/correction-ben-statement/corrections_results.csv b/jobs/correction-ben-statement/corrections_results.csv new file mode 100644 index 0000000000..68832b4eb4 --- /dev/null +++ b/jobs/correction-ben-statement/corrections_results.csv @@ -0,0 +1,4 @@ +"identifier","id" +"BC1218818",110441 +"BC1218819",110445 +"BC1218820",110446 diff --git a/jobs/correction-ben-statement/data.py b/jobs/correction-ben-statement/data.py deleted file mode 100644 index a81110f91e..0000000000 --- a/jobs/correction-ben-statement/data.py +++ /dev/null @@ -1,7 +0,0 @@ -# Populate this list with the existing BEN business identifiers from specified environment -# This works as a data file for Jupyter notebook used to add Registrar's Notation -ben_businesses = [ - "BC0871277", - "BC0871062" -] - diff --git a/jobs/correction-ben-statement/queries.sql b/jobs/correction-ben-statement/queries.sql new file mode 100644 index 0000000000..0cf5992cb0 --- /dev/null +++ b/jobs/correction-ben-statement/queries.sql @@ -0,0 +1,17 @@ +-- query to get all businesses (BENs) for Registrar's Notation +select b.identifier +from businesses b +where b.legal_type = 'BEN' +order by b.identifier asc; + +-- query to get all ACTIVE businesses (BENs) for Corrections +select b.identifier, f.id +from businesses b join filings f on b.id = f.business_id +where b.legal_type = 'BEN' and f.filing_type = 'incorporationApplication' and b.state = 'ACTIVE' +order by b.identifier asc; + +-- query to get all ACTIVE businesses (BENs) which have "in progress" drafts +select b.identifier, f.id, f.filing_type +from businesses b join filings f on b.id = f.business_id +where b.legal_type = 'BEN' and b.state = 'ACTIVE' and f.status = 'DRAFT' +order by b.identifier asc; diff --git a/jobs/correction-ben-statement/registrar_notation_result.csv b/jobs/correction-ben-statement/registrar_notation_result.csv new file mode 100644 index 0000000000..2f58ca3532 --- /dev/null +++ b/jobs/correction-ben-statement/registrar_notation_result.csv @@ -0,0 +1,4 @@ +"BC1230101" +"BC1230102" +"BC1230104" + diff --git a/jobs/correction-ben-statement/rn_output.py b/jobs/correction-ben-statement/rn_output.py new file mode 100644 index 0000000000..7fe05540b4 --- /dev/null +++ b/jobs/correction-ben-statement/rn_output.py @@ -0,0 +1,5 @@ +rn_businesses = [ + 'BC1230101', + 'BC1230102', + 'BC1230104', +] diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index 690a963c6a..3c22066383 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -1073,16 +1073,17 @@ def _format_correction_data(self, filing): def _format_name_request_data(self, filing, versioned_business: Business): name_request_json = filing.get('correction').get('nameRequest', {}) - filing['nameRequest'] = name_request_json - prev_legal_name = versioned_business.legal_name + if name_request_json: + filing['nameRequest'] = name_request_json + prev_legal_name = versioned_business.legal_name - if name_request_json and not (new_legal_name := name_request_json.get('legalName')): - new_legal_name = Business.generate_numbered_legal_name(name_request_json['legalType'], - versioned_business.identifier) + if name_request_json and not (new_legal_name := name_request_json.get('legalName')): + new_legal_name = Business.generate_numbered_legal_name(name_request_json['legalType'], + versioned_business.identifier) - if new_legal_name and prev_legal_name != new_legal_name: - filing['previousLegalName'] = prev_legal_name - filing['newLegalName'] = new_legal_name + if new_legal_name and prev_legal_name != new_legal_name: + filing['previousLegalName'] = prev_legal_name + filing['newLegalName'] = new_legal_name def _format_name_translations_data(self, filing, prev_completed_filing: Filing): filing['listOfTranslations'] = filing['correction'].get('nameTranslations', []) @@ -1165,6 +1166,8 @@ def _format_party_data(self, filing, prev_completed_filing: Filing): filing['ceasedParties'] = parties_deleted def _format_share_class_data(self, filing, prev_completed_filing: Filing): # pylint: disable=too-many-locals; # noqa: E501; + if filing.get('correction').get('shareStructure') is None: + return filing['shareClasses'] = filing.get('correction').get('shareStructure', {}).get('shareClasses') dates = filing['correction']['shareStructure'].get('resolutionDates', []) formatted_dates = [ diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py index 8fbe8de684..b2092a750e 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py @@ -159,6 +159,8 @@ def correct_business_data(business: Business, # pylint: disable=too-many-locals def update_parties(business: Business, parties: list, correction_filing_rec: Filing): """Create a new party or get them if they already exist.""" # Cease the party roles not present in the edit request + if parties is None: + return end_date_time = datetime.datetime.utcnow() parties_to_update = [party.get('officer').get('id') for party in parties if party.get('officer').get('id') is not None] From 13bf95859b9ad565a80d7930b720be1064343898 Mon Sep 17 00:00:00 2001 From: Aimee Date: Thu, 9 Jan 2025 15:00:47 -0800 Subject: [PATCH 020/133] 14816 - Batch job to make limited restoration historical (#3165) * Add GH CI/CD for the expired limited restoration job * Add setup and requirements files * Add Dockerfile * Add Makefile * Add devops configuration * Add Kubernetes configuration * Add logging configuration * Add utils files * Add main job implementation --- .../expired-limited-restoration-cd.yml | 103 +++++++++++ .../expired-limited-restoration-ci.yml | 83 +++++++++ jobs/expired-limited-restoration/Dockerfile | 25 +++ jobs/expired-limited-restoration/Makefile | 148 +++++++++++++++ jobs/expired-limited-restoration/__init__.py | 0 jobs/expired-limited-restoration/config.py | 103 +++++++++++ .../devops/vaults.json | 10 + .../file_expired_limited_restoration.py | 175 ++++++++++++++++++ .../expired-limited-restoration/k8s/Readme.md | 11 ++ .../k8s/templates/bc.yaml | 121 ++++++++++++ .../k8s/templates/cronjob.yaml | 138 ++++++++++++++ jobs/expired-limited-restoration/logging.conf | 28 +++ .../requirements.txt | 30 +++ .../requirements/bcregistry-libraries.txt | 0 .../requirements/dev.txt | 20 ++ .../requirements/prod.txt | 11 ++ jobs/expired-limited-restoration/run.sh | 3 + jobs/expired-limited-restoration/setup.cfg | 60 ++++++ jobs/expired-limited-restoration/setup.py | 22 +++ .../utils/__init__.py | 13 ++ .../utils/logging.py | 26 +++ 21 files changed, 1130 insertions(+) create mode 100644 .github/workflows/expired-limited-restoration-cd.yml create mode 100644 .github/workflows/expired-limited-restoration-ci.yml create mode 100644 jobs/expired-limited-restoration/Dockerfile create mode 100644 jobs/expired-limited-restoration/Makefile create mode 100644 jobs/expired-limited-restoration/__init__.py create mode 100644 jobs/expired-limited-restoration/config.py create mode 100644 jobs/expired-limited-restoration/devops/vaults.json create mode 100644 jobs/expired-limited-restoration/file_expired_limited_restoration.py create mode 100644 jobs/expired-limited-restoration/k8s/Readme.md create mode 100644 jobs/expired-limited-restoration/k8s/templates/bc.yaml create mode 100644 jobs/expired-limited-restoration/k8s/templates/cronjob.yaml create mode 100644 jobs/expired-limited-restoration/logging.conf create mode 100644 jobs/expired-limited-restoration/requirements.txt create mode 100644 jobs/expired-limited-restoration/requirements/bcregistry-libraries.txt create mode 100644 jobs/expired-limited-restoration/requirements/dev.txt create mode 100644 jobs/expired-limited-restoration/requirements/prod.txt create mode 100755 jobs/expired-limited-restoration/run.sh create mode 100644 jobs/expired-limited-restoration/setup.cfg create mode 100644 jobs/expired-limited-restoration/setup.py create mode 100644 jobs/expired-limited-restoration/utils/__init__.py create mode 100644 jobs/expired-limited-restoration/utils/logging.py diff --git a/.github/workflows/expired-limited-restoration-cd.yml b/.github/workflows/expired-limited-restoration-cd.yml new file mode 100644 index 0000000000..902b9cb75b --- /dev/null +++ b/.github/workflows/expired-limited-restoration-cd.yml @@ -0,0 +1,103 @@ +name: Expired Limited Restoration Job CD + +on: + push: + branches: + - main + paths: + - "jobs/expired-limited-restoration/**" + workflow_dispatch: + inputs: + environment: + description: "Environment (dev/test/prod)" + required: true + default: "dev" + +defaults: + run: + shell: bash + working-directory: ./jobs/expired-limited-restoration + +env: + APP_NAME: "expired-limited-restoration" + TAG_NAME: "dev" + +jobs: + expired-limited-restoration-cd-by-push: + runs-on: ubuntu-20.04 + + if: github.event_name == 'push' && github.repository == 'bcgov/lear' + environment: + name: "dev" + + steps: + - uses: actions/checkout@v3 + + - name: Login Openshift + shell: bash + run: | + oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} + + - name: CD Flow + shell: bash + env: + OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} + OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} + OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} + OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} + OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} + TAG_NAME: ${{ env.TAG_NAME }} + run: | + make cd + + - name: Rocket.Chat Notification + uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master + if: failure() + with: + type: ${{ job.status }} + job_name: "*Future Effective Filings Job Built and Deployed to ${{env.TAG_NAME}}*" + channel: "#registries-bot" + url: ${{ secrets.ROCKETCHAT_WEBHOOK }} + commit: true + token: ${{ secrets.GITHUB_TOKEN }} + + expired-limited-restoration-cd-by-dispatch: + runs-on: ubuntu-20.04 + + if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' + environment: + name: "${{ github.event.inputs.environment }}" + + steps: + - uses: actions/checkout@v3 + - name: Set env by input + run: | + echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + + - name: Login Openshift + shell: bash + run: | + oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} + + - name: CD Flow + shell: bash + env: + OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} + OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} + OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} + OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} + OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} + TAG_NAME: ${{ env.TAG_NAME }} + run: | + make cd + + - name: Rocket.Chat Notification + uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master + if: failure() + with: + type: ${{ job.status }} + job_name: "*Future Effective Filings Job Built and Deployed to ${{env.TAG_NAME}}*" + channel: "#registries-bot" + url: ${{ secrets.ROCKETCHAT_WEBHOOK }} + commit: true + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/expired-limited-restoration-ci.yml b/.github/workflows/expired-limited-restoration-ci.yml new file mode 100644 index 0000000000..f32a1b70d9 --- /dev/null +++ b/.github/workflows/expired-limited-restoration-ci.yml @@ -0,0 +1,83 @@ +name: Expired Limited Restoration Job CI + +on: + pull_request: + types: [assigned, synchronize] + paths: + - "jobs/expired-limited-restoration/**" + +defaults: + run: + shell: bash + working-directory: ./jobs/expired-limited-restoration + +jobs: + setup-job: + runs-on: ubuntu-20.04 + + if: github.repository == 'bcgov/lear' + + steps: + - uses: actions/checkout@v3 + - run: "true" + + linting: + needs: setup-job + runs-on: ubuntu-20.04 + + strategy: + matrix: + python-version: [3.8] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + make setup + - name: Lint with pylint + id: pylint + run: | + make pylint + - name: Lint with flake8 + id: flake8 + run: | + make flake8 + + testing: + needs: setup-job + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + make setup + - name: Test with pytest + id: test + run: | + make test + #- name: Upload coverage to Codecov + # uses: codecov/codecov-action@v3 + # with: + # file: ./queue_services/entity-pay/coverage.xml + # flags: entitypay + # name: codecov-entity-pay + # fail_ci_if_error: false + + build-check: + needs: setup-job + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v3 + - name: build to check strictness + id: build + run: | + make build-nc diff --git a/jobs/expired-limited-restoration/Dockerfile b/jobs/expired-limited-restoration/Dockerfile new file mode 100644 index 0000000000..6dcfb50c59 --- /dev/null +++ b/jobs/expired-limited-restoration/Dockerfile @@ -0,0 +1,25 @@ +# platform=linux/amd64 +FROM python:3.8.5-buster +USER root + +# Create working directory +RUN mkdir /opt/app-root && chmod 755 /opt/app-root +WORKDIR /opt/app-root + +# Install the requirements +COPY ./requirements.txt . + +#RUN pip install --upgrade pip +RUN pip install pip==20.1.1 +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +USER 1001 + +# Set Python path +ENV PYTHONPATH=/opt/app-root/src + +EXPOSE 8080 + +CMD [ "python", "/opt/app-root/file_expired_limited_restoration.py" ] diff --git a/jobs/expired-limited-restoration/Makefile b/jobs/expired-limited-restoration/Makefile new file mode 100644 index 0000000000..a3b38ce72a --- /dev/null +++ b/jobs/expired-limited-restoration/Makefile @@ -0,0 +1,148 @@ +.PHONY: license +.PHONY: setup +.PHONY: ci cd +.PHONY: run + +MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) +CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) + +PROJECT_NAME:=expired-limited-restoration +DOCKER_NAME:=expired-limited-restoration + +################################################################################# +# COMMANDS -- Setup # +################################################################################# +setup: install install-dev ## Setup the project + +clean: clean-build clean-pyc clean-test ## Clean the project + rm -rf venv/ + +clean-build: ## Clean build files + rm -fr build/ + rm -fr dist/ + rm -fr .eggs/ + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -fr {} + + +clean-pyc: ## Clean cache files + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + +clean-test: ## clean test files + find . -name '.pytest_cache' -exec rm -fr {} + + rm -fr .tox/ + rm -f .coverage + rm -fr htmlcov/ + +build-req: clean ## Upgrade requirements + test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ + . venv/bin/activate ;\ + pip install pip==20.1.1 ;\ + pip install -Ur requirements/prod.txt ;\ + pip freeze | sort > requirements.txt ;\ + cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ + pip install -Ur requirements/bcregistry-libraries.txt + +install: clean ## Install python virtual environment + test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ + . venv/bin/activate ;\ + pip install pip==20.1.1 ;\ + pip install -Ur requirements.txt + +install-dev: ## Install local application + . venv/bin/activate ; \ + pip install -Ur requirements/dev.txt; \ + pip install -e . + +################################################################################# +# COMMANDS - CI # +################################################################################# +ci: lint flake8 test ## CI flow + +pylint: ## Linting with pylint + . venv/bin/activate && pylint --rcfile=setup.cfg file_expired_limited_restoration.py + +flake8: ## Linting with flake8 + . venv/bin/activate && flake8 file_expired_limited_restoration.py + +lint: pylint flake8 ## run all lint type scripts + +test: ## Unit testing + . venv/bin/activate && pytest + +mac-cov: test ## Run the coverage report and display in a browser window (mac) + @open -a "Google Chrome" htmlcov/index.html + +################################################################################# +# COMMANDS - CD +# expects the terminal to be openshift login +# expects export OPENSHIFT_DOCKER_REGISTRY="" +# expects export OPENSHIFT_SA_NAME="$(oc whoami)" +# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" +# expects export OPENSHIFT_REPOSITORY="" +# expects export TAG_NAME="dev/test/prod" +# expects export OPS_REPOSITORY="" # +################################################################################# +cd: ## CD flow +ifeq ($(TAG_NAME), test) +# cd: update-env +cd: + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME) +else ifeq ($(TAG_NAME), prod) +# cd: update-env +cd: + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME) +else +TAG_NAME=dev +# cd: build update-env tag +cd: build tag +endif + +build: ## Build the docker container + docker build . -t $(DOCKER_NAME) \ + --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ + --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ + +build-nc: ## Build the docker container without caching + docker build --no-cache -t $(DOCKER_NAME) . + +REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) +push: #build ## Push the docker container to the registry & tag latest + @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ + docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\ + docker push $(REGISTRY_IMAGE):latest + +# 1Password CLI1 will be deprecated on Oct 1, 2024 +# VAULTS=`cat devops/vaults.json` +# update-env: ## Update env from 1pass +# oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \ +# -m "secret" \ +# -e "$(TAG_NAME)" \ +# -a "$(DOCKER_NAME)-$(TAG_NAME)" \ +# -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \ +# -v "$(VAULTS)" \ +# -r "false" \ +# -f "false" + +tag: push ## tag image + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME) + +################################################################################# +# COMMANDS - Local # +################################################################################# + +run: ## Run the project in local + . venv/bin/activate && python file_expired_limited_restoration.py + +################################################################################# +# Self Documenting Commands # +################################################################################# +.PHONY: help + +.DEFAULT_GOAL := help + +help: + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/jobs/expired-limited-restoration/__init__.py b/jobs/expired-limited-restoration/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/jobs/expired-limited-restoration/config.py b/jobs/expired-limited-restoration/config.py new file mode 100644 index 0000000000..57e04c49ad --- /dev/null +++ b/jobs/expired-limited-restoration/config.py @@ -0,0 +1,103 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""All of the configuration for the service is captured here. + +All items are loaded, or have Constants defined here that are loaded into the Flask configuration. +All modules and lookups get their configuration from the Flask config, rather than reading environment variables +directly or by accessing this configuration directly. +""" +import os +import random +import sys + +from dotenv import find_dotenv, load_dotenv + + +# this will load all the envars from a .env file located in the project root (api) +load_dotenv(find_dotenv()) + +CONFIGURATION = { + 'development': 'config.DevConfig', + 'testing': 'config.TestConfig', + 'production': 'config.ProdConfig', + 'default': 'config.ProdConfig' +} + + +def get_named_config(config_name: str = 'production'): + """Return the configuration object based on the name. + + :raise: KeyError: if an unknown configuration is requested + """ + if config_name in ['production', 'staging', 'default']: + config = ProdConfig() + elif config_name == 'testing': + config = TestConfig() + elif config_name == 'development': + config = DevConfig() + else: + raise KeyError(f"Unknown configuration '{config_name}'") + return config + + +class _Config(object): # pylint: disable=too-few-public-methods + """Base class configuration that should set reasonable defaults for all the other configurations.""" + + PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) + + LEGAL_API_URL = os.getenv('LEGAL_API_URL', '') + + SENTRY_DSN = os.getenv('SENTRY_DSN') or '' + SENTRY_DSN = '' if SENTRY_DSN.lower() == 'null' else SENTRY_DSN + + ACCOUNT_SVC_AUTH_URL = os.getenv('ACCOUNT_SVC_AUTH_URL', None) + ACCOUNT_SVC_CLIENT_ID = os.getenv('ACCOUNT_SVC_CLIENT_ID', None) + ACCOUNT_SVC_CLIENT_SECRET = os.getenv('ACCOUNT_SVC_CLIENT_SECRET', None) + ACCOUNT_SVC_TIMEOUT = os.getenv('ACCOUNT_SVC_TIMEOUT', 20) + + SECRET_KEY = 'a secret' + + TESTING = False + DEBUG = False + + +class DevConfig(_Config): # pylint: disable=too-few-public-methods + """Config for local development.""" + + TESTING = False + DEBUG = True + + +class TestConfig(_Config): # pylint: disable=too-few-public-methods + """In support of testing only used by the py.test suite.""" + + DEBUG = True + TESTING = True + + LEGAL_API_URL = os.getenv('LEGAL_API_URL_TEST', '') + SENTRY_DSN = os.getenv('SENTRY_DSN_TEST', '') + + +class ProdConfig(_Config): # pylint: disable=too-few-public-methods + """Production environment configuration.""" + + SECRET_KEY = os.getenv('SECRET_KEY', None) + + if not SECRET_KEY: + SECRET_KEY = os.urandom(24) + print('WARNING: SECRET_KEY being set as a one-shot', file=sys.stderr) + + TESTING = False + DEBUG = False diff --git a/jobs/expired-limited-restoration/devops/vaults.json b/jobs/expired-limited-restoration/devops/vaults.json new file mode 100644 index 0000000000..ff693d1c2b --- /dev/null +++ b/jobs/expired-limited-restoration/devops/vaults.json @@ -0,0 +1,10 @@ +[ + { + "vault": "entity", + "application": [ + "filings-jobs", + "entity-service-account", + "sentry" + ] + } +] diff --git a/jobs/expired-limited-restoration/file_expired_limited_restoration.py b/jobs/expired-limited-restoration/file_expired_limited_restoration.py new file mode 100644 index 0000000000..c4324575c4 --- /dev/null +++ b/jobs/expired-limited-restoration/file_expired_limited_restoration.py @@ -0,0 +1,175 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Expired Limited Restoration service. + +This module is being used to process businesses with expired limited restorations. +""" +import asyncio +import logging +import os +from datetime import datetime + +import requests +import sentry_sdk # noqa: I001; pylint: disable=ungrouped-imports; conflicts with Flake8 +from dotenv import find_dotenv, load_dotenv +from flask import Flask +from sentry_sdk.integrations.logging import LoggingIntegration # noqa: I001 + +import config # pylint: disable=import-error +from utils.logging import setup_logging # pylint: disable=import-error + + +setup_logging(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.conf')) # important to do this first + +# this will load all the envars from a .env file located in the project root +load_dotenv(find_dotenv()) + +SENTRY_LOGGING = LoggingIntegration( + event_level=logging.ERROR # send errors as events +) + + +def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): + """Return a configured Flask App using the Factory method.""" + app = Flask(__name__) + app.config.from_object(config.CONFIGURATION[run_mode]) + # Configure Sentry + if app.config.get('SENTRY_DSN', None): + sentry_sdk.init( + dsn=app.config.get('SENTRY_DSN'), + integrations=[SENTRY_LOGGING] + ) + + return app + + +def get_bearer_token(app: Flask, timeout): + """Get a valid Bearer token for the service to use.""" + token_url = app.config.get('ACCOUNT_SVC_AUTH_URL') + client_id = app.config.get('ACCOUNT_SVC_CLIENT_ID') + client_secret = app.config.get('ACCOUNT_SVC_CLIENT_SECRET') + + data = 'grant_type=client_credentials' + + # get service account token + res = requests.post(url=token_url, + data=data, + headers={'content-type': 'application/x-www-form-urlencoded'}, + auth=(client_id, client_secret), + timeout=timeout) + + try: + return res.json().get('access_token') + except Exception: # pylint: disable=broad-exception-caught; # noqa: B902 + return None + + +def get_businesses_to_process(app: Flask): + """Get list of business identifiers that need processing.""" + timeout = int(app.config.get('ACCOUNT_SVC_TIMEOUT')) + token = get_bearer_token(app, timeout) + + response = requests.get( + f'{app.config["LEGAL_API_URL"]}/internal/expired_restoration', + headers={ + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {token}' + }, + timeout=timeout + ) + + if not response or response.status_code != 200: + app.logger.error(f'Failed to get businesses from legal-api. \ + {response} {response.json()} {response.status_code}') + raise Exception # pylint: disable=broad-exception-raised; + + return response.json().get('identifiers', []) + + +def create_put_back_off_filing(app: Flask, identifier: str): + """Create a putBackOff filing for the business.""" + timeout = int(app.config.get('ACCOUNT_SVC_TIMEOUT')) + token = get_bearer_token(app, timeout) + filing_data = { + 'filing': { + 'header': { + 'date': datetime.utcnow().date().isoformat(), + 'name': 'putBackOff', + 'certifiedBy': 'system' + }, + 'business': { + 'identifier': identifier + }, + 'putBackOff': { + 'details': 'Put back off filing due to expired limited restoration.' + } + } + } + + response = requests.post( + f'{app.config["LEGAL_API_URL"]}/businesses/{identifier}/filings', + json=filing_data, + headers={ + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {token}', + 'hide-in-ledger': 'true' # Add this header to hide from ledger + }, + timeout=timeout + ) + + if not response or response.status_code != 201: + app.logger.error(f'Failed to create filing from legal-api. \ + {response} {response.json()} {response.status_code}') + raise Exception # pylint: disable=broad-exception-raised; + + return response.json() + + +async def run(loop, application: Flask): # pylint: disable=redefined-outer-name + """Run the methods for processing expired limited restorations.""" + with application.app_context(): + try: + # 1. get businesses that need to be processed + businesses = get_businesses_to_process(application) + + if not businesses: + application.logger.debug('No businesses to process') + return + + application.logger.debug(f'Processing {len(businesses)} businesses') + + # 2. create put back off filing for each business + for identifier in businesses: + try: + # create putBackOff filing via API + filing = create_put_back_off_filing(application, identifier) + filing_id = filing['filing']['header']['filingId'] + application.logger.debug( + f'Successfully created put back off filing {filing_id} for {identifier}' + ) + except Exception as err: # pylint: disable=broad-except; # noqa: B902 + application.logger.error(f'Error processing business {identifier}: {err}') + continue + except Exception as err: # pylint: disable=broad-except; # noqa: B902 + application.logger.error(f'Job failed: {err}') + + +if __name__ == '__main__': + application = create_app() + try: + event_loop = asyncio.get_event_loop() + event_loop.run_until_complete(run(event_loop, application)) + except Exception as err: # pylint: disable=broad-except; # noqa: B902; Catching all errors from the frameworks + application.logger.error(err) # pylint: disable=no-member + raise err diff --git a/jobs/expired-limited-restoration/k8s/Readme.md b/jobs/expired-limited-restoration/k8s/Readme.md new file mode 100644 index 0000000000..d3e4a6c1dc --- /dev/null +++ b/jobs/expired-limited-restoration/k8s/Readme.md @@ -0,0 +1,11 @@ + + +# buildconfig +oc process -f openshift/templates/bc.yaml -o yaml | oc apply -f - -n cc892f-tools +# cronjob +oc process -f openshift/templates/cronjob.yaml -o yaml | oc apply -f - -n cc892f-dev +oc process -f openshift/templates/cronjob.yaml -p TAG=test -o yaml | oc apply -f - -n cc892f-test +oc process -f openshift/templates/cronjob.yaml -p TAG=prod -o yaml | oc apply -f - -n cc892f-prod + +# manually run job +oc create job --from=cronjob/ -n cc892f-prod diff --git a/jobs/expired-limited-restoration/k8s/templates/bc.yaml b/jobs/expired-limited-restoration/k8s/templates/bc.yaml new file mode 100644 index 0000000000..97da9e9a13 --- /dev/null +++ b/jobs/expired-limited-restoration/k8s/templates/bc.yaml @@ -0,0 +1,121 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + labels: + app: ${NAME} + name: ${NAME}-build +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + name: ${NAME} + labels: + app: ${NAME} +- apiVersion: v1 + kind: BuildConfig + metadata: + name: ${NAME} + labels: + app: ${NAME} + spec: + output: + to: + kind: ImageStreamTag + name: ${NAME}:${OUTPUT_IMAGE_TAG} + resources: + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} + requests: + cpu: ${CPU_REQUEST} + memory: ${MEMORY_REQUEST} + runPolicy: Serial + source: + contextDir: ${SOURCE_CONTEXT_DIR} + git: + ref: ${GIT_REF} + uri: ${GIT_REPO_URL} + dockerfile: | + FROM docker-remote.artifacts.developer.gov.bc.ca/python:3.8.5-buster + USER root + + # Create working directory + RUN mkdir /opt/app-root && chmod 755 /opt/app-root + WORKDIR /opt/app-root + + # Install the requirements + COPY ./requirements.txt . + + #RUN pip install --upgrade pip + RUN pip install pip==20.1.1 + RUN pip install --no-cache-dir -r requirements.txt + + COPY . . + + USER 1001 + + # Set Python path + ENV PYTHONPATH=/opt/app-root/src + + EXPOSE 8080 + + CMD [ "python", "/opt/app-root/file_expired_limited_restoration.py"" ] + type: Git + strategy: + type: Docker + dockerStrategy: + pullSecret: + name: artifactory-creds + + triggers: + - type: ConfigChange +parameters: +- description: | + The name assigned to all of the objects defined in this template. + You should keep this as default unless your know what your doing. + displayName: Name + name: NAME + required: true + value: expired-limited-restoration +- description: | + The URL to your GIT repo, don't use the this default unless + your just experimenting. + displayName: Git Repo URL + name: GIT_REPO_URL + required: true + value: https://github.com/bcgov/lear.git +- description: The git reference or branch. + displayName: Git Reference + name: GIT_REF + required: true + value: main +- description: The source context directory. + displayName: Source Context Directory + name: SOURCE_CONTEXT_DIR + required: false + value: jobs/expired-limited-restoration +- description: The tag given to the built image. + displayName: Output Image Tag + name: OUTPUT_IMAGE_TAG + required: true + value: latest +- description: The resources CPU limit (in cores) for this build. + displayName: Resources CPU Limit + name: CPU_LIMIT + required: true + value: "2" +- description: The resources Memory limit (in Mi, Gi, etc) for this build. + displayName: Resources Memory Limit + name: MEMORY_LIMIT + required: true + value: 2Gi +- description: The resources CPU request (in cores) for this build. + displayName: Resources CPU Request + name: CPU_REQUEST + required: true + value: "1" +- description: The resources Memory request (in Mi, Gi, etc) for this build. + displayName: Resources Memory Request + name: MEMORY_REQUEST + required: true + value: 2Gi diff --git a/jobs/expired-limited-restoration/k8s/templates/cronjob.yaml b/jobs/expired-limited-restoration/k8s/templates/cronjob.yaml new file mode 100644 index 0000000000..6cce33b2bf --- /dev/null +++ b/jobs/expired-limited-restoration/k8s/templates/cronjob.yaml @@ -0,0 +1,138 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + labels: + name: ${NAME} + name: ${NAME}-cronjob +objects: +- kind: "CronJob" + apiVersion: "batch/v1beta1" + metadata: + name: "${NAME}-${TAG}" + labels: + name: ${NAME} + environment: ${TAG} + role: "${ROLE}" + spec: + schedule: "${SCHEDULE}" + concurrencyPolicy: "Forbid" + successfulJobsHistoryLimit: "${{SUCCESS_JOBS_HISTORY_LIMIT}}" + failedJobsHistoryLimit: "${{FAILED_JOBS_HISTORY_LIMIT}}" + jobTemplate: + metadata: + labels: + name: ${NAME} + environment: ${TAG} + role: "${ROLE}" + spec: + backoffLimit: ${{JOB_BACKOFF_LIMIT}} + template: + metadata: + labels: + name: ${NAME} + environment: ${TAG} + role: "${ROLE}" + spec: + containers: + - name: "${NAME}-${TAG}" + image: "${IMAGE_REGISTRY}/${IMAGE_NAMESPACE}/${NAME}:${TAG}" + imagePullPolicy: Always + command: + - /bin/sh + - -c + - cd /opt/app-root; ./run.sh + env: + - name: COLIN_URL + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: COLIN_URL + - name: LEGAL_API_URL + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: LEGAL_API_URL + - name: ACCOUNT_SVC_AUTH_URL + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: ACCOUNT_SVC_AUTH_URL + - name: ACCOUNT_SVC_CLIENT_ID + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: ACCOUNT_SVC_CLIENT_ID + - name: ACCOUNT_SVC_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: ACCOUNT_SVC_CLIENT_SECRET + - name: SENTRY_DSN + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: SENTRY_DSN + restartPolicy: "Never" + terminationGracePeriodSeconds: 30 + activeDeadlineSeconds: 1600 + dnsPolicy: "ClusterFirst" +parameters: + - name: NAME + displayName: Name + description: The name assigned to all of the OpenShift resources associated to the server instance. + required: true + value: expired-limited-restoration + + - name: TAG + displayName: Environment TAG name + description: The TAG name for this environment, e.g., dev, test, prod + value: dev + required: true + + - name: ROLE + displayName: Role + description: Role + required: true + value: job + + - name: NAMESPACE + displayName: Namespace Name + description: The base namespace name for the project. + required: true + value: cc892f + + - name: IMAGE_NAMESPACE + displayName: Image Namespace + required: true + description: The namespace of the OpenShift project containing the imagestream for the application. + value: cc892f-tools + + - name: IMAGE_REGISTRY + displayName: Image Registry + required: true + description: The image registry of the OpenShift project. + value: image-registry.openshift-image-registry.svc:5000 + + - name: "SCHEDULE" + displayName: "Cron Schedule" + description: "Cron Schedule to Execute the Job (using local cluster system TZ)" + value: "59 23 * * *" + required: true + + - name: "SUCCESS_JOBS_HISTORY_LIMIT" + displayName: "Successful Job History Limit" + description: "The number of successful jobs that will be retained" + value: "5" + required: true + + - name: "FAILED_JOBS_HISTORY_LIMIT" + displayName: "Failed Job History Limit" + description: "The number of failed jobs that will be retained" + value: "2" + required: true + + - name: "JOB_BACKOFF_LIMIT" + displayName: "Job Backoff Limit" + description: "The number of attempts to try for a successful job outcome" + value: "0" + required: false diff --git a/jobs/expired-limited-restoration/logging.conf b/jobs/expired-limited-restoration/logging.conf new file mode 100644 index 0000000000..0806a8a2c0 --- /dev/null +++ b/jobs/expired-limited-restoration/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys=root,api + +[handlers] +keys=console + +[formatters] +keys=simple + +[logger_root] +level=DEBUG +handlers=console + +[logger_api] +level=DEBUG +handlers=console +qualname=api +propagate=0 + +[handler_console] +class=StreamHandler +level=DEBUG +formatter=simple +args=(sys.stdout,) + +[formatter_simple] +format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s +datefmt= diff --git a/jobs/expired-limited-restoration/requirements.txt b/jobs/expired-limited-restoration/requirements.txt new file mode 100644 index 0000000000..45702e705b --- /dev/null +++ b/jobs/expired-limited-restoration/requirements.txt @@ -0,0 +1,30 @@ +Flask-Moment==0.10.0 +Flask-Script==2.0.6 +Flask==1.1.2 +Jinja2==2.11.2 +MarkupSafe==1.1.1 +Werkzeug==0.16.1 +aniso8601==8.1.0 +attrs==20.3.0 +blinker==1.4 +certifi==2020.12.5 +chardet==3.0.4 +click==7.1.2 +ecdsa==0.14.1 +flask-jwt-oidc==0.1.5 +flask-restplus==0.13.0 +gunicorn==20.0.4 +idna==2.10 +itsdangerous==1.1.0 +jsonschema==3.2.0 +pyasn1==0.4.8 +pyrsistent==0.17.3 +python-dateutil==2.8.1 +python-dotenv==0.15.0 +python-jose==3.2.0 +pytz==2020.4 +requests==2.25.0 +rsa==4.6 +sentry-sdk==1.20.0 +six==1.15.0 +urllib3==1.26.11 diff --git a/jobs/expired-limited-restoration/requirements/bcregistry-libraries.txt b/jobs/expired-limited-restoration/requirements/bcregistry-libraries.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/jobs/expired-limited-restoration/requirements/dev.txt b/jobs/expired-limited-restoration/requirements/dev.txt new file mode 100644 index 0000000000..e8f1c165a6 --- /dev/null +++ b/jobs/expired-limited-restoration/requirements/dev.txt @@ -0,0 +1,20 @@ +# Everything the developer needs outside of the production requirements + +# Testing +pytest +pytest-mock +requests +pyhamcrest + +# Lint and code style +flake8 +flake8-blind-except +flake8-debugger +flake8-docstrings +flake8-isort +flake8-quotes +pep8-naming +autopep8 +coverage +pylint +pylint-flask diff --git a/jobs/expired-limited-restoration/requirements/prod.txt b/jobs/expired-limited-restoration/requirements/prod.txt new file mode 100644 index 0000000000..c09b53c441 --- /dev/null +++ b/jobs/expired-limited-restoration/requirements/prod.txt @@ -0,0 +1,11 @@ +gunicorn +Flask +Flask-Script +Flask-Moment +Flask-RESTplus +flask-jwt-oidc>=0.1.5 +python-dotenv +requests +sentry-sdk[flask] +python-dateutil +Werkzeug<1 diff --git a/jobs/expired-limited-restoration/run.sh b/jobs/expired-limited-restoration/run.sh new file mode 100755 index 0000000000..bf0a27ab11 --- /dev/null +++ b/jobs/expired-limited-restoration/run.sh @@ -0,0 +1,3 @@ +cd /opt/app-root +echo 'run file_expired_limited_restoration' +python file_expired_limited_restoration.py diff --git a/jobs/expired-limited-restoration/setup.cfg b/jobs/expired-limited-restoration/setup.cfg new file mode 100644 index 0000000000..adfeed4332 --- /dev/null +++ b/jobs/expired-limited-restoration/setup.cfg @@ -0,0 +1,60 @@ +[flake8] +exclude = .git,*migrations* +max-line-length = 120 +docstring-min-length=10 +per-file-ignores = + */__init__.py:F401 + *.py:B902 + +[pycodestyle] +max_line_length = 120 +ignore = E501 +docstring-min-length=10 +notes=FIXME,XXX # TODO is ignored +match_dir = src/legal_api +ignored-modules=flask_sqlalchemy + sqlalchemy +per-file-ignores = + */__init__.py:F401 +good-names= + b, + d, + i, + e, + f, + k, + q, + u, + v, + ar, + id, + rv, + logger, + +[pylint] +ignore=migrations,test +notes=FIXME,XXX,TODO +ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session +ignored-classes=scoped_session +disable=C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101 +good-names= + b, + d, + i, + e, + f, + k, + q, + u, + v, + ar, + id, + rv, + logger, + +[isort] +line_length = 120 +indent = 4 +multi_line_output = 3 +lines_after_imports = 2 +include_trailing_comma = True diff --git a/jobs/expired-limited-restoration/setup.py b/jobs/expired-limited-restoration/setup.py new file mode 100644 index 0000000000..6c75af7455 --- /dev/null +++ b/jobs/expired-limited-restoration/setup.py @@ -0,0 +1,22 @@ +# Copyright © 2025 Province of British Columbia. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Installer and setup for this module.""" + +from setuptools import find_packages, setup + + +setup( + name='expired-limited-restoration', + packages=find_packages() +) diff --git a/jobs/expired-limited-restoration/utils/__init__.py b/jobs/expired-limited-restoration/utils/__init__.py new file mode 100644 index 0000000000..82c0485dc3 --- /dev/null +++ b/jobs/expired-limited-restoration/utils/__init__.py @@ -0,0 +1,13 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/jobs/expired-limited-restoration/utils/logging.py b/jobs/expired-limited-restoration/utils/logging.py new file mode 100644 index 0000000000..9e2f456995 --- /dev/null +++ b/jobs/expired-limited-restoration/utils/logging.py @@ -0,0 +1,26 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Centralized setup of logging for the service.""" +import logging.config +import sys +from os import path + + +def setup_logging(conf): + """Create the services logger.""" + if conf and path.isfile(conf): + logging.config.fileConfig(conf) + print('Configure logging, from conf:{}'.format(conf), file=sys.stdout) + else: + print('Unable to configure logging, attempted conf:{}'.format(conf), file=sys.stderr) From b9a688bdb5aae20e65a188c0eec5e4648c046348 Mon Sep 17 00:00:00 2001 From: flutistar Date: Fri, 10 Jan 2025 07:35:55 -0800 Subject: [PATCH 021/133] update entity id upon approval --- .../business_filings/business_documents.py | 9 +++- .../src/legal_api/services/document_record.py | 45 ++++++++++++++----- 2 files changed, 42 insertions(+), 12 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py index 5b62e18a3e..ddee00159c 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py @@ -26,7 +26,7 @@ from legal_api.exceptions import ErrorCode, get_error_message from legal_api.models import Business, Document, Filing as FilingModel # noqa: I001 from legal_api.reports import get_pdf -from legal_api.services import MinioService, authorized +from legal_api.services import MinioService, authorized, DocumentRecordService from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime from legal_api.utils.util import cors_preflight @@ -82,6 +82,13 @@ def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None return get_pdf(filing.storage, legal_filing_name) elif file_key and (document := Document.find_by_file_key(file_key)): if document.filing_id == filing.id: # make sure the file belongs to this filing + if document.file_key.startswith('DS'): # docID from DRS + response = DocumentRecordService.download_document('CORP', document.file_key) + return current_app.response_class( + response=response, + status=HTTPStatus.OK, + mimetype='application/pdf' + ) response = MinioService.get_file(document.file_key) return current_app.response_class( response=response.data, diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 9f6e33c86b..955fb57817 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -29,7 +29,7 @@ class DocumentRecordService: def upload_document(document_class: str, document_type: str) -> dict: """Upload document to Docuemtn Record Service.""" query_params = request.args.to_dict() - file = request.files.get('file') + file = request.data.get('file') # Ensure file exists if not file: current_app.logger.debug('No file found in request.') @@ -45,8 +45,6 @@ def upload_document(document_class: str, document_type: str) -> dict: 'error': validation_error } - file_content = file.read() - try: # Read and encode the file content as base64 file_content = file.read() @@ -63,7 +61,7 @@ def upload_document(document_class: str, document_type: str) -> dict: headers={ 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), - 'Content-Type': 'application/pdf' + 'Content-Type': file.content_type } ).json() @@ -99,7 +97,7 @@ def delete_document(document_service_id: str) -> dict: @staticmethod def get_document(document_class: str, document_service_id: str) -> dict: - + """Get document record from Document Record Service.""" DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name url = f'{DRS_BASE_URL}/searches/{document_class}?documentServiceId={document_service_id}' try: @@ -113,7 +111,37 @@ def get_document(document_class: str, document_service_id: str) -> dict: current_app.logger.debug(f'Get document from document record service {response}') return response[0] except Exception as e: - current_app.logger.debug(f'Error on downloading document {e}') + current_app.logger.debug(f'Error on getting a document object {e}') + return {} + + @staticmethod + def download_document(document_class: str, document_service_id: str) -> dict: + """Download document from Document Record Service.""" + doc_object = DocumentRecordService.get_document(document_class, document_service_id) + + response = requests.get(doc_object['documentURL']) # Download file from storage + response.raise_for_status() # Raise an HTTPError for bad responses (4xx and 5xx) + + return response + + @staticmethod + def update_business_identifier(business_identifier: str, document_service_id: str): + """Update business identifier up on approval.""" + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}/documents/{document_service_id}' + + try: + response = requests.patch( + url, json={ 'consumerIdentifer': business_identifier }, + headers={ + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Update business identifier - {business_identifier}') + return response + except Exception as e: + current_app.logger.debug(f'Error on deleting document {e}') return {} @staticmethod @@ -123,11 +151,6 @@ def validate_pdf(file, content_length) -> Optional[list]: try: pdf_reader = PyPDF2.PdfFileReader(file) - # Check that all pages in the pdf are letter size and able to be processed. - if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): - msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), - 'path': file.filename}) - if content_length > 30000000: msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) From 43230774b7b42854d0a8441959356a8353d5c366 Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Mon, 13 Jan 2025 08:44:01 -0800 Subject: [PATCH 022/133] 24715 Updates to support legacy ledger display name (#3169) * 24715 Updates to support legacy ledger display name(new) Signed-off-by: Hongjing Chen * fix linting Signed-off-by: Hongjing Chen * update for CoD Signed-off-by: Hongjing Chen * add more display name mapping for unsupported types but comment them out Signed-off-by: Hongjing Chen * remove check for isLedgerPlaceholder Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- .../flows/tombstone/tombstone_mappings.py | 136 +++++++++++++++++- .../flows/tombstone/tombstone_queries.py | 1 + data-tool/flows/tombstone/tombstone_utils.py | 21 ++- legal-api/src/legal_api/core/meta/filing.py | 4 + 4 files changed, 156 insertions(+), 6 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index ebd6ed059b..4c49d539a4 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -210,8 +210,8 @@ def has_value(cls, value): EventFilings.FILE_ADVD2: ['dissolution', 'voluntary'], EventFilings.FILE_ADVDS: ['dissolution', 'voluntary'], - EventFilings.DISLV_NULL: ['dissolution', 'voluntary'], - EventFilings.DISLC_NULL: ['dissolution', 'administrative'], + EventFilings.DISLV_NULL: ['dissolution', 'voluntary'], # TODO: re-map + EventFilings.DISLC_NULL: ['dissolution', 'administrative'], # TODO: re-map EventFilings.SYSDA_NULL: ['dissolution', 'administrative'], EventFilings.SYSDS_NULL: ['dissolution', 'administrative'], EventFilings.SYSDF_NULL: ['dissolution', 'involuntary'], @@ -220,7 +220,7 @@ def has_value(cls, value): EventFilings.FILE_ICORP: 'incorporationApplication', EventFilings.FILE_ICORU: 'incorporationApplication', EventFilings.FILE_ICORC: 'incorporationApplication', - EventFilings.CONVICORP_NULL: 'incorporationApplication', + EventFilings.CONVICORP_NULL: 'incorporationApplication', # TODO: re-map # TODO: Ledger - unsupported # TODO: Legacy Other - unsupported @@ -249,6 +249,135 @@ def has_value(cls, value): } +EVENT_FILING_DISPLAY_NAME_MAPPING = { + EventFilings.FILE_AGMDT: 'Notice of Change - AGM Date', + EventFilings.FILE_AGMLC: 'Notice of Change - AGM Location', + + EventFilings.FILE_NOALA: 'Notice of Alteration', + EventFilings.FILE_NOALB: 'Notice of Alteration from a BC Unlimited Liability Company to Become a BC Company', + EventFilings.FILE_NOALU: 'Notice of Alteration from a BC Company to Become a BC Unlimited Liability Company', + EventFilings.FILE_NOALC: 'Notice of Alteration from a BC Company to Become a Community Contribution Company', + EventFilings.FILE_AM_BC: 'Amendment - Translated Name', + EventFilings.FILE_AM_LI: 'Amendment - Ledger Information', + EventFilings.FILE_AM_RM: 'Amendment - Receiver or Receiver Manager', + EventFilings.FILE_AM_SS: 'Amendment - Share Structure', + + # TODO: Amalgamation Out Consent - unsupported + # IAMGO: 'Application For Authorization For Amalgamation (into a Foreign Corporation) with 6 months consent granted' + # TODO: Amalgamation Out - unsupported + # AMALO: 'Record of Amalgamation' + + + EventFilings.FILE_AMALH: 'Amalgamation Application Short Form (Horizontal)', + EventFilings.FILE_AMALR: 'Amalgamation Application (Regular)', + EventFilings.FILE_AMALV: 'Amalgamation Application Short Form (Vertical)', + EventFilings.FILE_AMLHU: 'Amalgamation Application Short Form (Horizontal) for a BC Unlimited Liability Company', + EventFilings.FILE_AMLRU: 'Amalgamation Application (Regular) for a BC Unlimited Liability Company', + EventFilings.FILE_AMLVU: 'Amalgamation Application Short Form (Vertical) for a BC Unlimited Liability Company', + EventFilings.FILE_AMLHC: 'Amalgamation Application Short Form (Horizontal) for a Community Contribution Company', + EventFilings.FILE_AMLRC: 'Amalgamation Application (Regular) for a Community Contribution Company', + EventFilings.FILE_AMLVC: 'Amalgamation Application Short Form (Vertical) for a Community Contribution Company', + + EventFilings.FILE_ANNBC: 'BC Annual Report', # has suffix of date, dynamically add it during formatting + + EventFilings.FILE_APTRA: 'Application to Transfer Registered Office', + EventFilings.FILE_NOERA: 'Notice of Elimination of Registered Office', + EventFilings.FILE_NOCAD: 'Notice of Change of Address', + EventFilings.FILE_AM_DO: 'Amendment - Dissolved Office', + EventFilings.FILE_AM_RR: 'Amendment - Registered and Records Offices', + + EventFilings.FILE_NOCDR: 'Notice of Change of Directors', # dynamically add suffix for some scenarios + EventFilings.FILE_AM_DI: 'Amendment - Director', + + EventFilings.FILE_CONTO: '6 Months Consent to Continue Out', + EventFilings.FILE_COUTI: 'Instrument of Continuation Out', + + EventFilings.FILE_CONTI: 'Continuation Application', + EventFilings.FILE_CONTU: 'Continuation Application for a BC Unlimited Liability Company', + EventFilings.FILE_CONTC: 'Continuation Application for a Community Contribution Company', + + EventFilings.FILE_CO_AR: 'Correction - Annual Report', + EventFilings.FILE_CO_BC: 'Correction - BC Company Name/Translated Name', + EventFilings.FILE_CO_DI: 'Correction - Director', + EventFilings.FILE_CO_DO: 'Correction - Dissolved Office', + EventFilings.FILE_CO_LI: 'Correction - Ledger Information', + EventFilings.FILE_CO_PF: 'Correction - Put Back Off', + EventFilings.FILE_CO_PO: 'Correction - Put Back On', + EventFilings.FILE_CO_RM: 'Correction - Receiver or Receiver Manager', + EventFilings.FILE_CO_RR: 'Correction - Registered and Records Offices', + EventFilings.FILE_CO_SS: 'Correction - Share Structure', + EventFilings.FILE_CO_TR: 'Correction - Transition', + EventFilings.FILE_CORRT: 'Correction', + + EventFilings.FILE_COURT: 'Court Order', + + # TODO: Delay of Dissolution - unsupported (need confirmation) + # no ledger item in colin + + EventFilings.DISD1_DISDE: "Registrar''s Notation - Dissolution or Cancellation Delay", # has prefix "Registrar's Notation - " + EventFilings.DISD2_DISDE: "Registrar''s Notation - Dissolution or Cancellation Delay", + + EventFilings.FILE_ADVD2: 'Application for Dissolution (Voluntary Dissolution)', + EventFilings.FILE_ADVDS: 'Application for Dissolution (Voluntary Dissolution)', + EventFilings.DISLV_NULL: None, # TODO: re-map, voluntary - no ledger in colin + status liquidated + EventFilings.DISLC_NULL: None, # TODO: re-map, admin - no ledger in colin + status liquidated + EventFilings.SYSDA_NULL: None, # admin - status Administrative Dissolution + EventFilings.SYSDS_NULL: None, # admin - status Administrative Dissolution + EventFilings.SYSDF_NULL: None, # invol - no ledger in lear & colin + EventFilings.SYSDT_NULL: None, # invol - no ledger in lear & colin + + EventFilings.FILE_ICORP: 'Incorporation Application', + EventFilings.FILE_ICORU: 'Incorporation Application for a BC Unlimited Liability Company', + EventFilings.FILE_ICORC: 'Incorporation Application for a Community Contribution Company', + EventFilings.CONVICORP_NULL: None, # TODO: re-map + + # TODO: Ledger - unsupported + # TODO: Legacy Other - unsupported + EventFilings.FILE_AM_PF: 'Amendment - Put Back Off', + EventFilings.FILE_AM_PO: 'Amendment - Put Back On', + EventFilings.FILE_AM_TR: 'Amendment - Transition', + + # TODO: Liquidation - unsupported (need to check if anything missing) + # NOLDS: "Notice of Location of Dissolved Company''s Records" + # NOCDS: "Notice of Change Respecting Dissolved Company''s Records" + # NOTRA: 'Notice of Transfer of Records' + # NOAPL: 'Notice of Appointment of Liquidator' + # NOCAL: 'Notice of Change of Address of Liquidator And/Or Liquidation Records Office' + # NOCEL: 'Notice of Ceasing to Act as Liquidator' + # LIQUR: 'Liquidation Report' + # LQWOS: 'Notice of Withdrawal Statement of Intent to Liquidate' + # NOARM: 'Notice of Appointment of Receiver or Receiver Manager' + # NOCER: 'Notice of Ceasing to Act as Receiver or Receiver Manager' + # LQSIN: 'Statement of Intent to Liquidate' + # LQSCO: 'Stay of Liquidation - Court Ordered' + # LQDIS: 'Discontinuance of Liquidation - Court Ordered' + # LQCON: 'Continuance of Liquidation - Court Ordered' + # NOCRM: 'Notice of Change of Address of Receiver or Receiver Manager' + # ADVLQ: 'Application for Dissolution (Voluntary Liquidation)' + # AM_LR: 'Amendment - Liquidation Report' + # CO_LR: 'Correction - Liquidation Report' + # AM_LQ: 'Amendment - Liquidator' + # CO_LQ: 'Correction - Liquidator' + + EventFilings.FILE_NWITH: 'Notice of Withdrawal', + + EventFilings.FILE_REGSN: "Registrar''s Notation", + EventFilings.FILE_REGSO: "Registrar''s Order", + + EventFilings.FILE_RESTL: 'Restoration Application - Limited', + EventFilings.FILE_RESTF: 'Restoration Application - Full', + EventFilings.FILE_RESXL: 'Restoration Application (Extend Time Limit)', + EventFilings.FILE_RESXF: 'Restoration Application (Convert Limited to Full)', + EventFilings.FILE_RUSTL: 'Restoration Application - Limited for a BC Unlimited Liability Company', + EventFilings.FILE_RUSTF: 'Restoration Application - Full for a BC Unlimited Liability Company', + EventFilings.FILE_RUSXL: 'Restoration Application (Extend Time Limit) for a BC Unlimited Liability Company', + EventFilings.FILE_RUSXF: 'Restoration Application (Convert Limited to Full) for a BC Unlimited Liability Company', + + EventFilings.FILE_TRANS: 'Transition Application', + EventFilings.FILE_TRANP: 'Post Restoration Transition Application', +} + + LEAR_FILING_BUSINESS_UPDATE_MAPPING = { 'incorporationApplication': ['last_coa_date', 'last_cod_date'], 'changeOfAddress': ['last_coa_date'], @@ -272,4 +401,3 @@ def has_value(cls, value): # ingore the following since we won't map to them # 'dissolved', 'restorationApplication', 'continuedOut' ] - diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 0103bd8812..11ed8f924c 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -447,6 +447,7 @@ def get_filings_query(corp_num): -- paper only now -> f_ods_type f.nr_num as f_nr_num, to_char(f.period_end_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, + to_char(f.change_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_change_at_str, --- filing user upper(u.user_id) as u_user_id, u.last_name as u_last_name, diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 9245b2276c..c228b4cba9 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -1,6 +1,7 @@ import copy import json from datetime import datetime, timezone +from decimal import Decimal import pandas as pd import pytz @@ -8,9 +9,10 @@ from tombstone.tombstone_base_data import (ALIAS, FILING, FILING_JSON, OFFICE, PARTY, PARTY_ROLE, RESOLUTION, SHARE_CLASSES, USER) -from tombstone.tombstone_mappings import (EVENT_FILING_LEAR_TARGET_MAPPING, +from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, + EVENT_FILING_LEAR_TARGET_MAPPING, LEAR_FILING_BUSINESS_UPDATE_MAPPING, - LEAR_STATE_FILINGS) + LEAR_STATE_FILINGS, EventFilings) unsupported_event_file_types = set() @@ -380,6 +382,7 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective 'filingType': data['f_filing_type_cd'] }, 'isLedgerPlaceholder': True, + 'colinDisplayName': get_colin_display_name(data) } if filing_type == 'annualReport': @@ -414,6 +417,20 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective return filing_json, meta_data +def get_colin_display_name(data: dict) -> str: + event_file_type = data['event_file_type'] + name = EVENT_FILING_DISPLAY_NAME_MAPPING.get(event_file_type) + if event_file_type == EventFilings.FILE_ANNBC.value: + ar_dt_str = data['f_period_end_dt_str'] + ar_dt = datetime.strptime(ar_dt_str, '%Y-%m-%d %H:%M:%S%z') + suffix = ar_dt.strftime('%b %d, %Y').upper() + name = f'{name} - {suffix}' + elif event_file_type == EventFilings.FILE_NOCDR.value: + if not data['f_change_at_str']: + name = f'{name} - Address Change or Name Correction Only' + return name + + def build_epoch_filing(business_id: int) -> dict: now = datetime.utcnow().replace(tzinfo=pytz.UTC) filing = copy.deepcopy(FILING) diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index e6efdad49f..a1f0899a14 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -655,6 +655,10 @@ class FilingMeta: # pylint: disable=too-few-public-methods @staticmethod def display_name(business: Business, filing: FilingStorage) -> Optional[str]: """Return the name of the filing to display on outputs.""" + # if filing is imported from COLIN and has custom disaply name + if filing.meta_data and\ + (display_name := filing.meta_data.get('colinDisplayName')): + return display_name # if there is no lookup if not (names := FILINGS.get(filing.filing_type, {}).get('displayName')): if not (filing.filing_sub_type and From aba054816cc76919a3a5b1ea917ae30e9d91b920 Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Mon, 13 Jan 2025 15:01:47 -0800 Subject: [PATCH 023/133] 24479 - Tombstone pipeline - amalgamation & legal-api adjustment (#3163) * 24479 - Tombstone pipeline - amalgamation & legal-api adjustment Signed-off-by: Hongjing Chen * fix linting Signed-off-by: Hongjing Chen * legal-api - small tweaks Signed-off-by: Hongjing Chen * fix bug for claim batch query Signed-off-by: Hongjing Chen * support xpro outside CA - US only Signed-off-by: Hongjing Chen * update TODO(nots) for TING & TED Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- data-tool/flows/batch_delete_flow.py | 8 ++ .../common/corp_processing_queue_service.py | 3 +- data-tool/flows/corps_tombstone_flow.py | 44 +++++++-- .../flows/tombstone/tombstone_base_data.py | 66 +++++++++---- .../flows/tombstone/tombstone_queries.py | 55 ++++++++++- data-tool/flows/tombstone/tombstone_utils.py | 95 ++++++++++++++++--- .../scripts/colin_corps_extract_postgres_ddl | 7 +- data-tool/scripts/transfer_cprd_corps.sql | 20 +++- .../legal_api/models/amalgamating_business.py | 20 ++-- .../src/legal_api/models/amalgamation.py | 41 +++++--- legal-api/src/legal_api/models/business.py | 14 ++- legal-api/src/legal_api/models/filing.py | 2 + .../legal_api/reports/business_document.py | 34 +++++-- 13 files changed, 336 insertions(+), 73 deletions(-) diff --git a/data-tool/flows/batch_delete_flow.py b/data-tool/flows/batch_delete_flow.py index d2292f59c8..d9c471e075 100644 --- a/data-tool/flows/batch_delete_flow.py +++ b/data-tool/flows/batch_delete_flow.py @@ -85,6 +85,10 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): 'source': 'resolutions', 'params': {'business_id': business_ids}, }, + { + 'source': 'amalgamations', + 'params': {'business_id': business_ids}, + }, ] query_futures_one = [] @@ -114,6 +118,10 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): 'source': 'share_series', 'params': {'share_class_id': results_one['share_classes']}, }, + { + 'source': 'amalgamating_businesses', + 'params': { 'amalgamation_id': results_one['amalgamations']}, + } ] query_futures_two = [] diff --git a/data-tool/flows/common/corp_processing_queue_service.py b/data-tool/flows/common/corp_processing_queue_service.py index eecd621b00..62636891d5 100644 --- a/data-tool/flows/common/corp_processing_queue_service.py +++ b/data-tool/flows/common/corp_processing_queue_service.py @@ -87,7 +87,7 @@ def claim_batch(self, flow_run_id: str, batch_size: int) -> List[str]: """ query = """ WITH claimable AS ( - SELECT corp_num + SELECT corp_num, id FROM corp_processing WHERE processed_status = :pending_status AND environment = :environment @@ -103,6 +103,7 @@ def claim_batch(self, flow_run_id: str, batch_size: int) -> List[str]: last_modified = NOW() FROM claimable WHERE corp_processing.corp_num = claimable.corp_num + AND corp_processing.id = claimable.id RETURNING corp_processing.corp_num, corp_processing.claimed_at """ diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index bfbb06c49e..55db964233 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -122,7 +122,8 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: # Note: The business info is partially loaded for businesses table now. And it will be fully # updated by the following placeholder historical filings migration. But it depends on the # implementation of next step. - business_id = load_data(conn, 'businesses', tombstone_data['businesses']) + # force to update business info if it exists (used for pre-loaded TING) + business_id = load_data(conn, 'businesses', tombstone_data['businesses'], 'identifier', update=True) for office in tombstone_data['offices']: office['offices']['business_id'] = business_id @@ -172,7 +173,7 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: return business_id -@task(name='3.2-Placeholder-Historical-Filings-Migrate-Task') +@task(name='3.2.1-Placeholder-Historical-Filings-Migrate-Task') def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id: int, users_mapper: dict): """Migrate placeholder historical filings.""" filings_data = tombstone_data['filings'] @@ -180,7 +181,8 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id state_filing_index = update_info['state_filing_index'] update_business_data = update_info['businesses'] # load placeholder filings - for i, f in enumerate(filings_data): + for i, data in enumerate(filings_data): + f = data['filings'] transaction_id = load_data(conn, 'transaction', {'issued_at': datetime.utcnow().isoformat()}) username = f['submitter_id'] user_id = users_mapper.get(username) @@ -192,6 +194,10 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id if i == state_filing_index: update_info['businesses']['state_filing_id'] = filing_id + # load amalgamation snapshot linked to the current filing + if amalgamation_data := data['amalgamations']: + load_amalgamation_snapshot(conn, amalgamation_data, business_id, filing_id) + # load epoch filing epoch_filing_data = build_epoch_filing(business_id) load_data(conn, 'filings', epoch_filing_data) @@ -201,6 +207,30 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id update_data(conn, 'businesses', update_business_data, business_id) +@task(name='3.2.2-Amalgamation-Snapshot-Migrate-Task') +def load_amalgamation_snapshot(conn: Connection, amalgamation_data: dict, business_id: int, filing_id: int): + """Migrate amalgamation snapshot.""" + amalgamation = amalgamation_data['amalgamations'] + amalgamation['business_id'] = business_id + amalgamation['filing_id'] = filing_id + amalgamation_id = load_data(conn, 'amalgamations', amalgamation) + + for ting in amalgamation_data['amalgamating_businesses']: + if ting_identifier:= ting.get('ting_identifier'): + # if TING exists in db, update state filing info, + # if not exist, insert a placeholder with state filing info + del ting['ting_identifier'] + temp_ting = { + 'identifier': ting_identifier, + 'state_filing_id': filing_id, + 'dissolution_date': amalgamation['amalgamation_date'] + } + ting_business_id = load_data(conn, 'businesses', temp_ting, 'identifier', update=True) + ting['business_id'] = ting_business_id + ting['amalgamation_id'] = amalgamation_id + load_data(conn, 'amalgamating_businesses', ting) + + @task(name='3.3-Update-Auth-Task') def update_auth(conn: Connection, config, corp_num: str, tombstone_data: dict): """Create auth entity and affiliate as required.""" @@ -272,6 +302,7 @@ def migrate_tombstone(config, lear_engine: Engine, corp_num: str, clean_data: di transaction.commit() except Exception as e: transaction.rollback() + print(f'❌ Error migrating corp snapshot and filings data for {corp_num}: {repr(e)}') return corp_num, e print(f'✅ Complete migrating {corp_num}!') return corp_num, None @@ -286,6 +317,7 @@ def tombstone_flow(): """Entry of tombstone pipeline""" # TODO: track migration progress + error handling # TODO: update unprocessed query + count query + # TODO: current pipeline doesn't support migrating TED & TING at the same time, need a better strategy try: config = get_config() colin_engine = colin_init(config) @@ -346,10 +378,11 @@ def tombstone_flow(): print(f'❗ Skip migrating {corp_num} due to data collection error.') wait(corp_futures) - + succeeded = 0 for f in corp_futures: corp_num, e = f.result() if not e: + succeeded += 1 processing_service.update_corp_status( flow_run_id, corp_num, @@ -364,8 +397,7 @@ def tombstone_flow(): error=f"Migration failed - {repr(e)}" ) - succeeded = sum(1 for f in corp_futures if f.state.is_completed()) - failed = len(corp_futures) - succeeded + failed = len(corp_futures) - succeeded - skipped print(f'🌟 Complete round {cnt}. Succeeded: {succeeded}. Failed: {failed}. Skip: {skipped}') cnt += 1 migrated_cnt += succeeded diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index cd3a2d247a..1dc409fa63 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -149,25 +149,29 @@ } FILING = { - 'filing_date': None, # timestamptz - 'filing_json': FILING_JSON, - 'filing_type': None, - 'filing_sub_type': None, - 'status': 'COMPLETED', - 'completion_date': None, # timestamptz - 'effective_date': None, # timestamptz - 'meta_data': None, - # default values for now - 'paper_only': True, - 'source': 'COLIN', - 'colin_only': False, - 'deletion_locked': False, - # FK - 'business_id': None, - 'transaction_id': None, - 'submitter_id': None, - # others - 'submitter_roles': None, + 'filings': { + 'filing_date': None, # timestamptz + 'filing_json': FILING_JSON, + 'filing_type': None, + 'filing_sub_type': None, + 'status': 'COMPLETED', + 'completion_date': None, # timestamptz + 'effective_date': None, # timestamptz + 'meta_data': None, + # default values for now + 'paper_only': True, + 'source': 'COLIN', + 'colin_only': False, + 'deletion_locked': False, + # TODO: new column - hide_in_ledger + # FK + 'business_id': None, + 'transaction_id': None, + 'submitter_id': None, + # others + 'submitter_roles': None, + }, + 'amalgamations': None # optional } FILING_COMBINED = { @@ -175,9 +179,31 @@ 'update_business_info': { # business info to update }, - 'state_filing_index': -1 + 'state_filing_index': -1, +} + +AMALGAMATION = { + 'amalgamations': { + 'amalgamation_date': None, + 'court_approval': None, + 'amalgamation_type': None, + # FK + 'business_id': None, + 'filing_id': None, + }, + 'amalgamating_businesses': [] } +AMALGAMTING_BUSINESS = { + 'foreign_jurisdiction': None, + 'foreign_name': None, + 'foreign_identifier': None, + 'role': None, + 'foreign_jurisdiction_region': None, + # FK + 'business_id': None, + 'amalgamation_id': None, +} # ======== tombstone example ======== TOMBSTONE = { diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 11ed8f924c..c37765dd8f 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -32,8 +32,15 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- 'BC0207097', 'BC0693625', 'BC0754041', 'BC0072008', 'BC0355241', 'BC0642237', 'BC0555891', 'BC0308683', -- correction -- 'BC0688906', 'BC0870100', 'BC0267106', 'BC0873461', -- alteration -- 'BC0536998', 'BC0574096', 'BC0663523' -- new mappings of CoA, CoD + -- TED +-- 'BC0812196', -- amalg - r (with xpro) +-- 'BC0870100', -- amalg - v +-- 'BC0747392' -- amalg - h + -- TING +-- 'BC0593394', -- amalg - r (with xpro) +-- 'BC0805986', 'BC0561086', -- amalg - v +-- 'BC0543231', 'BC0358476' -- amalg - h -- ) - and c.corp_type_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') -- TODO: update transfer script and cs.end_event_id is null -- and ((cp.processed_status is null or cp.processed_status != 'COMPLETED')) @@ -448,6 +455,14 @@ def get_filings_query(corp_num): f.nr_num as f_nr_num, to_char(f.period_end_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, to_char(f.change_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_change_at_str, + -- state filing info + ( + select start_event_id + from corp_state + where 1 = 1 + and corp_num = '{corp_num}' + and end_event_id is null + ) as cs_state_event_id, --- filing user upper(u.user_id) as u_user_id, u.last_name as u_last_name, @@ -473,6 +488,41 @@ def get_filings_query(corp_num): return query +def get_amalgamation_query(corp_num): + query = f""" + select + e.event_id as e_event_id, + ted_corp_num, + ting_corp_num, + cs.state_type_cd as ting_state_type_cd, + cs.end_event_id as ting_state_end_event_id, + corp_involve_id, + can_jur_typ_cd, + adopted_corp_ind, + home_juri_num, + othr_juri_desc, + foreign_nme, + -- event + e.event_type_cd as e_event_type_cd, + to_char(e.event_timerstamp::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, + -- filing + f.filing_type_cd as f_filing_type_cd, + to_char(f.effective_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, + f.court_appr_ind as f_court_approval, + -- event_file + e.event_type_cd || '_' || COALESCE(f.filing_type_cd, 'NULL') as event_file_type + from corp_involved_amalgamating cig + left outer join event e on e.event_id = cig.event_id + left outer join filing f on e.event_id = f.event_id + left outer join corp_state cs on cig.ting_corp_num = cs.corp_num and cs.start_event_id = e.event_id + where 1 = 1 + and cs.end_event_id is null + and cig.ted_corp_num = '{corp_num}' + order by cig.corp_involve_id; + """ + return query + + def get_corp_snapshot_filings_queries(config, corp_num): queries = { 'businesses': get_business_query(corp_num, config.CORP_NAME_SUFFIX), @@ -481,7 +531,8 @@ def get_corp_snapshot_filings_queries(config, corp_num): 'share_classes': get_share_classes_share_series_query(corp_num), 'aliases': get_aliases_query(corp_num), 'resolutions': get_resolutions_query(corp_num), - 'filings': get_filings_query(corp_num) + 'filings': get_filings_query(corp_num), + 'amalgamations': get_amalgamation_query(corp_num) } return queries diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index c228b4cba9..8227d37a3f 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -1,4 +1,5 @@ import copy +from decimal import Decimal import json from datetime import datetime, timezone from decimal import Decimal @@ -6,7 +7,7 @@ import pandas as pd import pytz from sqlalchemy import Connection, text -from tombstone.tombstone_base_data import (ALIAS, FILING, FILING_JSON, OFFICE, +from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, FILING_JSON, OFFICE, PARTY, PARTY_ROLE, RESOLUTION, SHARE_CLASSES, USER) from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, @@ -231,7 +232,7 @@ def format_filings_data(data: dict) -> list[dict]: filings_data = data['filings'] formatted_filings = [] - last_state_filing_idx = -1 + state_filing_idx = -1 idx = 0 for x in filings_data: event_file_type = x['event_file_type'] @@ -251,14 +252,15 @@ def format_filings_data(data: dict) -> list[dict]: filing_json, meta_data = build_filing_json_meta_data(filing_type, filing_subtype, effective_date, x) - filing = copy.deepcopy(FILING) + filing_body = copy.deepcopy(FILING['filings']) + amalgamation = None # make it None if no valid value if not (user_id := x['u_user_id']): user_id = x['u_full_name'] if x['u_full_name'] else None - filing = { - **filing, + filing_body = { + **filing_body, 'filing_date': effective_date, 'filing_type': filing_type, 'filing_sub_type': filing_subtype, @@ -266,7 +268,15 @@ def format_filings_data(data: dict) -> list[dict]: 'effective_date': effective_date, 'filing_json': filing_json, 'meta_data': meta_data, - 'submitter_id': user_id # will be updated to real user_id when loading data into db + 'submitter_id': user_id, # will be updated to real user_id when loading data into db + } + + if filing_type == 'amalgamationApplication': + amalgamation = format_amalgamations_data(data, x['e_event_id']) + + filing = { + 'filings': filing_body, + 'amalgamations': amalgamation } formatted_filings.append(filing) @@ -277,18 +287,76 @@ def format_filings_data(data: dict) -> list[dict]: business_update_dict[k] = get_business_update_value(k, effective_date, trigger_date, filing_type, filing_subtype) # save state filing index - if filing_type in LEAR_STATE_FILINGS: - last_state_filing_idx = idx + if filing_type in LEAR_STATE_FILINGS and x['e_event_id'] == x['cs_state_event_id']: + state_filing_idx = idx idx += 1 return { 'filings': formatted_filings, 'update_business_info': business_update_dict, - 'state_filing_index': last_state_filing_idx + 'state_filing_index': state_filing_idx } +def format_amalgamations_data(data: dict, event_id: Decimal) -> dict: + amalgamations_data = data['amalgamations'] + + matched_amalgamations = [ + item for item in amalgamations_data if item.get('e_event_id') == event_id + ] + + if not matched_amalgamations: + return None + + formatted_amalgmation = copy.deepcopy(AMALGAMATION) + amalgmation_info = matched_amalgamations[0] + + amalgmation_date = amalgmation_info['f_effective_dt_str'] + if not amalgmation_date: + amalgmation_date = amalgmation_info['e_event_dt_str'] + formatted_amalgmation['amalgamations']['amalgamation_date'] = amalgmation_date + formatted_amalgmation['amalgamations']['court_approval'] = amalgmation_info['f_court_approval'] + + event_file_type = amalgmation_info['event_file_type'] + _, filing_subtype = get_target_filing_type(event_file_type) + + formatted_amalgmation['amalgamations']['amalgamation_type'] = filing_subtype + formatted_tings = formatted_amalgmation['amalgamating_businesses'] + for ting in matched_amalgamations: + formatted_tings.append(format_amalgamating_businesses(ting)) + + return formatted_amalgmation + + +def format_amalgamating_businesses(ting_data: dict) -> dict: + formatted_ting = {} + foreign_identifier = ting_data['home_juri_num'] + role = 'holding' if ting_data['adopted_corp_ind'] else 'amalgamating' + + foreign_jurisdiction = 'CA' + foreign_jurisdiction_region = ting_data['can_jur_typ_cd'] + if foreign_jurisdiction_region == 'OT': + foreign_jurisdiction = 'US' + foreign_jurisdiction_region = ting_data['othr_juri_desc'] + + if foreign_identifier: + formatted_ting = { + 'foreign_jurisdiction': foreign_jurisdiction, + 'foreign_name': ting_data['foreign_nme'], + 'foreign_identifier': foreign_identifier, + 'role': role, + 'foreign_jurisdiction_region': foreign_jurisdiction_region + } + else: + formatted_ting = { + 'ting_identifier': ting_data['ting_corp_num'], + 'role': role, + } + + return formatted_ting + + def format_users_data(users_data: list) -> list: formatted_users = [] @@ -379,7 +447,8 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective meta_data = { 'colinFilingInfo': { 'eventType': data['e_event_type_cd'], - 'filingType': data['f_filing_type_cd'] + 'filingType': data['f_filing_type_cd'], + 'eventId': int(data['e_event_id']) }, 'isLedgerPlaceholder': True, 'colinDisplayName': get_colin_display_name(data) @@ -433,7 +502,7 @@ def get_colin_display_name(data: dict) -> str: def build_epoch_filing(business_id: int) -> dict: now = datetime.utcnow().replace(tzinfo=pytz.UTC) - filing = copy.deepcopy(FILING) + filing = copy.deepcopy(FILING['filings']) filing = { **filing, 'filing_type': 'lear_tombstone', @@ -446,7 +515,7 @@ def build_epoch_filing(business_id: int) -> dict: return filing -def load_data(conn: Connection, table_name: str, data: dict, conflict_column: str=None) -> int: +def load_data(conn: Connection, table_name: str, data: dict, conflict_column: str=None, update: bool=False) -> int: columns = ', '.join(data.keys()) values = ', '.join([format_value(v) for v in data.values()]) @@ -455,6 +524,8 @@ def load_data(conn: Connection, table_name: str, data: dict, conflict_column: st check_query = f"select id from {table_name} where {conflict_column} = {conflict_value}" check_result = conn.execute(text(check_query)).scalar() if check_result: + if update: + update_data(conn, table_name, data, check_result) return check_result query = f"""insert into {table_name} ({columns}) values ({values}) returning id""" diff --git a/data-tool/scripts/colin_corps_extract_postgres_ddl b/data-tool/scripts/colin_corps_extract_postgres_ddl index 16938f1c02..f307daac9e 100644 --- a/data-tool/scripts/colin_corps_extract_postgres_ddl +++ b/data-tool/scripts/colin_corps_extract_postgres_ddl @@ -492,8 +492,11 @@ create table if not exists corp_involved_amalgamating event_id numeric(9) not null constraint fk_corp_involved_event references event (event_id), - corp_num varchar(10) not null - constraint fk_corp_involved_corporation + ted_corp_num varchar(10) not null + constraint fk_corp_involved_ted_corporation + references corporation (corp_num), + ting_corp_num varchar(10) not null + constraint fk_corp_involved_ting_corporation references corporation (corp_num), corp_involve_id numeric(9) not null, can_jur_typ_cd char(2), diff --git a/data-tool/scripts/transfer_cprd_corps.sql b/data-tool/scripts/transfer_cprd_corps.sql index 4db418dbcd..28eef3f2f2 100644 --- a/data-tool/scripts/transfer_cprd_corps.sql +++ b/data-tool/scripts/transfer_cprd_corps.sql @@ -556,7 +556,11 @@ from (select e.event_id, -- SELECT BY EVENT case when c.CORP_TYP_CD in ('BC', 'ULC', 'CC') then 'BC' || c.CORP_NUM else c.CORP_NUM - end CORP_NUM, + end TED_CORP_NUM, + case + when c2.corp_typ_cd in ('BC', 'ULC', 'CC') then 'BC' || c2.corp_num + else c2.corp_num + end TING_CORP_NUM, ci.CORP_INVOLVE_ID, ci.CAN_JUR_TYP_CD, case ci.ADOPTED_CORP_IND @@ -570,10 +574,12 @@ from (select e.event_id, -- SELECT BY EVENT from event e , CORP_INVOLVED ci , corporation c + , corporation c2 where e.event_id = ci.event_id and c.corp_num = e.corp_num - and corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') + and c.corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') and event_typ_cd = 'CONVAMAL' + and c2.corp_num = ci.corp_num -- and c.corp_num in ('1396310', '1396309', '1396308', '1396307', '1396306', '1396890', '1396889', '1396885', '1396883', '1396878','1396597', '1396143', '1395925', '1395116', '1394990', '1246445', '1216743', '1396508', '1396505', '1396488', '1396401', '1396387', '1396957', '1355943', '1340611', '1335427', '1327193', '1393945', '1208648', '1117024', '1120292', '1127373', '1135492') -- and rownum <= 5 UNION ALL @@ -581,7 +587,11 @@ from (select e.event_id, -- SELECT BY EVENT case when c.CORP_TYP_CD in ('BC', 'ULC', 'CC') then 'BC' || c.CORP_NUM else c.CORP_NUM - end CORP_NUM, + end TED_CORP_NUM, + case + when c2.corp_typ_cd in ('BC', 'ULC', 'CC') then 'BC' || c2.corp_num + else c2.corp_num + end TING_CORP_NUM, ci.CORP_INVOLVE_ID, ci.CAN_JUR_TYP_CD, case ci.ADOPTED_CORP_IND @@ -595,11 +605,13 @@ from (select e.event_id, -- SELECT BY EVENT from event e , CORP_INVOLVED ci , corporation c + , corporation c2 , filing f where e.event_id = ci.event_id and c.corp_num = e.corp_num + and c2.corp_num = ci.corp_num and e.event_id = f.event_id - and corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') + and c.corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') and filing_typ_cd in ('AMALH', 'AMALV', 'AMALR', 'AMLHU', 'AMLVU', 'AMLRU', 'AMLHC', 'AMLVC', 'AMLRC') -- and c.corp_num in ('1396310', '1396309', '1396308', '1396307', '1396306', '1396890', '1396889', '1396885', '1396883', '1396878','1396597', '1396143', '1395925', '1395116', '1394990', '1246445', '1216743', '1396508', '1396505', '1396488', '1396401', '1396387', '1396957', '1355943', '1340611', '1335427', '1327193', '1393945', '1208648', '1117024', '1120292', '1127373', '1135492') -- and rownum <= 5 diff --git a/legal-api/src/legal_api/models/amalgamating_business.py b/legal-api/src/legal_api/models/amalgamating_business.py index aea42243b7..d719bf8360 100644 --- a/legal-api/src/legal_api/models/amalgamating_business.py +++ b/legal-api/src/legal_api/models/amalgamating_business.py @@ -73,7 +73,7 @@ def get_revision(cls, transaction_id, amalgamation_id): return amalgamating_businesses @classmethod - def get_all_revision(cls, business_id): + def get_all_revision(cls, business_id, tombstone=False): """ Get all amalgamating businesses for the given business id. @@ -83,10 +83,18 @@ def get_all_revision(cls, business_id): 3. Business T1 is dissolved as part of another amalgamation In this case T1 is involved in 2 amalgamation + + If tombstone is True, get all non-versioned amalgamating businesses + for the given business id. """ - amalgamating_businesses_version = version_class(AmalgamatingBusiness) - amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ - .filter(amalgamating_businesses_version.operation_type == 0) \ - .filter(amalgamating_businesses_version.business_id == business_id) \ - .order_by(amalgamating_businesses_version.transaction_id).all() + if tombstone: + amalgamating_businesses = db.session.query(AmalgamatingBusiness) \ + .filter(AmalgamatingBusiness.business_id == business_id) \ + .all() + else: + amalgamating_businesses_version = version_class(AmalgamatingBusiness) + amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ + .filter(amalgamating_businesses_version.operation_type == 0) \ + .filter(amalgamating_businesses_version.business_id == business_id) \ + .order_by(amalgamating_businesses_version.transaction_id).all() return amalgamating_businesses diff --git a/legal-api/src/legal_api/models/amalgamation.py b/legal-api/src/legal_api/models/amalgamation.py index 40650a26c0..0b2ecc0ad3 100644 --- a/legal-api/src/legal_api/models/amalgamation.py +++ b/legal-api/src/legal_api/models/amalgamation.py @@ -80,17 +80,25 @@ def json(self): } @classmethod - def get_revision_by_id(cls, transaction_id, amalgamation_id): - """Get amalgamation for the given id.""" + def get_revision_by_id(cls, amalgamation_id, transaction_id=None, tombstone=False): + """Get amalgamation for the given id. + + If tombstone is True, get all non-versioned amalgamating for the given id. + """ # pylint: disable=singleton-comparison; - amalgamation_version = version_class(Amalgamation) - amalgamation = db.session.query(amalgamation_version) \ - .filter(amalgamation_version.transaction_id <= transaction_id) \ - .filter(amalgamation_version.operation_type == 0) \ - .filter(amalgamation_version.id == amalgamation_id) \ - .filter(or_(amalgamation_version.end_transaction_id == None, # noqa: E711; - amalgamation_version.end_transaction_id > transaction_id)) \ - .order_by(amalgamation_version.transaction_id).one_or_none() + if tombstone: + amalgamation = db.session.query(Amalgamation) \ + .filter(Amalgamation.id == amalgamation_id) \ + .one_or_none() + else: + amalgamation_version = version_class(Amalgamation) + amalgamation = db.session.query(amalgamation_version) \ + .filter(amalgamation_version.transaction_id <= transaction_id) \ + .filter(amalgamation_version.operation_type == 0) \ + .filter(amalgamation_version.id == amalgamation_id) \ + .filter(or_(amalgamation_version.end_transaction_id == None, # noqa: E711; + amalgamation_version.end_transaction_id > transaction_id)) \ + .order_by(amalgamation_version.transaction_id).one_or_none() return amalgamation @classmethod @@ -108,8 +116,17 @@ def get_revision(cls, transaction_id, business_id): return amalgamation @classmethod - def get_revision_json(cls, transaction_id, business_id): - """Get amalgamation json for the given transaction id.""" + def get_revision_json(cls, transaction_id, business_id, tombstone=False): + """Get amalgamation json for the given transaction id. + + If tombstone is True, return placeholder amalgamation json. + """ + if tombstone: + return { + 'identifier': 'Not Available', + 'legalName': 'Not Available' + } + amalgamation = Amalgamation.get_revision(transaction_id, business_id) from .business import Business # pylint: disable=import-outside-toplevel business = Business.find_by_internal_id(amalgamation.business_id) diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index b182eacba2..0ca1500cc9 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -472,6 +472,12 @@ def in_dissolution(self): one_or_none() return find_in_batch_processing is not None + @property + def is_tombstone(self): + """Return True if it's a tombstone business, otherwise False.""" + tombstone_filing = Filing.get_filings_by_status(self.id, [Filing.Status.TOMBSTONE]) + return bool(tombstone_filing) + def save(self): """Render a Business to the local cache.""" db.session.add(self) @@ -779,7 +785,13 @@ def get_amalgamated_into(self) -> dict: self.state_filing_id and (state_filing := Filing.find_by_id(self.state_filing_id)) and state_filing.is_amalgamation_application): - return Amalgamation.get_revision_json(state_filing.transaction_id, state_filing.business_id) + if not self.is_tombstone: + return Amalgamation.get_revision_json(state_filing.transaction_id, state_filing.business_id) + else: + return { + 'identifier': 'Not Available', + 'legalName': 'Not Available' + } return None diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 2f4f8d7ed0..b636642496 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -52,6 +52,8 @@ class Status(str, Enum): PENDING_CORRECTION = 'PENDING_CORRECTION' WITHDRAWN = 'WITHDRAWN' + TOMBSTONE = 'TOMBSTONE' + # filings with staff review APPROVED = 'APPROVED' AWAITING_REVIEW = 'AWAITING_REVIEW' diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index 8d8c707050..ae5f8a5f09 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -42,6 +42,7 @@ def __init__(self, business, document_key): self._document_key = document_key self._report_date_time = LegislationDatetime.now() self._epoch_filing_date = None + self._tombstone_filing_date = None def get_pdf(self): """Render the business document pdf response.""" @@ -131,6 +132,7 @@ def _get_template_data(self, get_json=False): business_json['registrarInfo'] = {**RegistrarInfo.get_registrar_info(self._report_date_time)} self._set_description(business_json) self._set_epoch_date(business_json) + self._set_tombstone_date() if self._document_key in ['lseal', 'summary']: self._set_addresses(business_json) @@ -186,6 +188,12 @@ def _set_epoch_date(self, business: dict): self._epoch_filing_date = epoch_filing[0].effective_date business['business']['epochFilingDate'] = self._epoch_filing_date.isoformat() + def _set_tombstone_date(self): + """Set the tombstone filing date if the business is tombstone.""" + tombstone_filing = Filing.get_filings_by_status(self._business.id, [Filing.Status.TOMBSTONE]) + if tombstone_filing: + self._tombstone_filing_date = tombstone_filing[0].effective_date + def _set_description(self, business: dict): """Set business descriptors used by json and pdf template.""" legal_type = self._business.legal_type @@ -319,14 +327,20 @@ def _set_business_state_changes(self, business: dict): 'continuationOut']): state_filings.append(self._format_state_filing(filing)) - # If it has amalgamating businesses - if (amalgamating_businesses := AmalgamatingBusiness.get_all_revision(self._business.id)): + # If it has linked amalgamating businesses + # set placeholder info if this business is tombstone + tombstone = self._business.is_tombstone + if (amalgamating_businesses := AmalgamatingBusiness.get_all_revision(self._business.id, tombstone)): for amalgamating_business in amalgamating_businesses: - amalgamation = Amalgamation.get_revision_by_id(amalgamating_business.transaction_id, - amalgamating_business.amalgamation_id) + if tombstone: + amalgamation = Amalgamation.get_revision_by_id( + amalgamating_business.amalgamation_id, tombstone=True) + else: + amalgamation = Amalgamation.get_revision_by_id( + amalgamating_business.amalgamation_id, amalgamating_business.transaction_id) filing = Filing.find_by_id(amalgamation.filing_id) state_filing = self._format_state_filing(filing) - amalgamation_json = Amalgamation.get_revision_json(filing.transaction_id, filing.business_id) + amalgamation_json = Amalgamation.get_revision_json(filing.transaction_id, filing.business_id, tombstone) state_filings.append({ **state_filing, **amalgamation_json @@ -449,7 +463,9 @@ def _set_amalgamation_details(self, business: dict): if filings: amalgamation_application = filings[0] business['business']['amalgamatedEntity'] = True - if self._epoch_filing_date and amalgamation_application.effective_date < self._epoch_filing_date: + if (self._epoch_filing_date and amalgamation_application.effective_date < self._epoch_filing_date) or\ + (self._tombstone_filing_date and + amalgamation_application.effective_date < self._tombstone_filing_date): # imported from COLIN amalgamated_businesses_info = { 'legalName': 'Not Available', @@ -540,11 +556,15 @@ def _set_continuation_in_details(self, business: dict): @staticmethod def _format_address(address): address['streetAddressAdditional'] = address.get('streetAddressAdditional') or '' + address['addressCity'] = address.get('addressCity') or '' address['addressRegion'] = address.get('addressRegion') or '' address['deliveryInstructions'] = address.get('deliveryInstructions') or '' country = address['addressCountry'] - country = pycountry.countries.search_fuzzy(country)[0].name + if country: + country = pycountry.countries.search_fuzzy(country)[0].name + else: + country = '' address['addressCountry'] = country address['addressCountryDescription'] = country return address From 44a721d2ce2f4b54e5c07373d0df8a7d5bd4e1a7 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Tue, 14 Jan 2025 08:11:55 -0800 Subject: [PATCH 024/133] 25159 - Update NoW filing not Saving temp_reg for T business (#3170) * set temp_reg to none for T business NoW filing * update unit tests --- .../v2/business/business_filings/business_filings.py | 2 ++ .../unit/resources/v2/test_business_filings/test_filings.py | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index fd43ec46a2..0c132207c1 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -633,6 +633,8 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s if filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: ListFilingResource.link_now_and_withdrawn_filing(filing) + if business_identifier.startswith('T'): + filing.temp_reg = None filing.save() except BusinessException as err: return None, None, {'error': err.error}, err.status_code diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index 9e82abe5e2..de5da778fe 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -1608,8 +1608,8 @@ def test_resubmit_filing_failed(session, client, jwt, filing_status, review_stat ('REGULAR-BUSINESS-DISSOLUTION', 'CBEN', 'dissolution', DISSOLUTION, False) ] ) -def test_notice_of_withdraw_filing(session, client, jwt, test_name, legal_type, filing_type, filing_json, is_temp): - """Assert that notice of withdraw for new business filings can be filed""" +def test_notice_of_withdrawal_filing(session, client, jwt, test_name, legal_type, filing_type, filing_json, is_temp): + """Assert that notice of withdrawal for new business filings can be filed""" today = datetime.utcnow().date() future_effective_date = today + timedelta(days=5) future_effective_date = future_effective_date.isoformat() @@ -1702,3 +1702,5 @@ def test_notice_of_withdraw_filing(session, client, jwt, test_name, legal_type, now_filing = (Filing.find_by_id(rv_draft.json['filing']['header']['filingId'])) assert now_filing.withdrawn_filing_id == withdrawn_filing.id assert now_filing.withdrawal_pending == False + if is_temp: + assert now_filing.temp_reg == None From 53d6e7c68afeadfa30208c03f05fb91c66668e69 Mon Sep 17 00:00:00 2001 From: Karim El Jazzar <122301442+JazzarKarim@users.noreply.github.com> Date: Wed, 15 Jan 2025 15:18:14 -0500 Subject: [PATCH 025/133] bumped version numbers up for release 24.1 (#3171) --- legal-api/src/legal_api/version.py | 2 +- queue_services/entity-filer/src/entity_filer/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index 74c5b883e2..09dd08c9d5 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.135.0' # pylint: disable=invalid-name +__version__ = '2.136.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index 6a26e494ec..2fdc34f03b 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.135.0' # pylint: disable=invalid-name +__version__ = '2.136.0' # pylint: disable=invalid-name From 827b65119f34d1fbca9efec832280059d5fc54dc Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:13:34 -0800 Subject: [PATCH 026/133] 25252 - Manual approach to populate TING info (#3172) * 25252 - manual approach Signed-off-by: Hongjing Chen * add comments for manual selection Signed-off-by: Hongjing Chen * add update mapping for amalgamation Signed-off-by: Hongjing Chen * fix delete script for corner cases Signed-off-by: Hongjing Chen * tweak data formatting + add TODOs for NoW Signed-off-by: Hongjing Chen * add notes for subset selection Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- data-tool/flows/batch_delete_flow.py | 8 +- data-tool/flows/corps_tombstone_flow.py | 19 ++-- .../flows/tombstone/tombstone_mappings.py | 7 +- .../flows/tombstone/tombstone_queries.py | 93 ++++++++++++++++++- data-tool/flows/tombstone/tombstone_utils.py | 29 +++--- 5 files changed, 129 insertions(+), 27 deletions(-) diff --git a/data-tool/flows/batch_delete_flow.py b/data-tool/flows/batch_delete_flow.py index d9c471e075..78f6b1d7ef 100644 --- a/data-tool/flows/batch_delete_flow.py +++ b/data-tool/flows/batch_delete_flow.py @@ -482,7 +482,10 @@ def execute_query(conn: Connection, template: dict) -> dict: else: # now only consider str and int in the list v_str = ', '.join(map(lambda x: f'\'{x}\'' if isinstance(x, str) else str(x), filter_none(v))) - query += f' AND {k} IN ({v_str})' + if v_str: + query += f' AND {k} IN ({v_str})' + else: + query += ' AND 1 != 1' results = conn.execute(text(query)) @@ -514,8 +517,9 @@ def execute_delete_plan(conn: Connection, table: str, ids: list): @task(persist_result=False) def delete_by_ids(conn: Connection, table_name: str, ids: list, id_name: str = 'id'): + ids = filter_none(ids) if ids: - ids_str = ', '.join(map(lambda x: f'\'{x}\'' if isinstance(x, str) else str(x), filter_none(ids))) + ids_str = ', '.join(map(lambda x: f'\'{x}\'' if isinstance(x, str) else str(x), ids)) query_str = f'DELETE FROM {table_name} WHERE {id_name} IN ({ids_str})' query = text(query_str) results = conn.execute(query, {'ids': ids}) diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 55db964233..c44dd01451 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -43,7 +43,7 @@ def reserve_unprocessed_corps(config, processing_service, flow_run_id, num_corps @task def get_unprocessed_count(config, colin_engine: Engine) -> int: query = get_total_unprocessed_count_query( - 'local', + 'tombstone-flow', config.DATA_LOAD_ENV ) @@ -122,8 +122,7 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: # Note: The business info is partially loaded for businesses table now. And it will be fully # updated by the following placeholder historical filings migration. But it depends on the # implementation of next step. - # force to update business info if it exists (used for pre-loaded TING) - business_id = load_data(conn, 'businesses', tombstone_data['businesses'], 'identifier', update=True) + business_id = load_data(conn, 'businesses', tombstone_data['businesses'], 'identifier') for office in tombstone_data['offices']: office['offices']['business_id'] = business_id @@ -204,7 +203,7 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id # load updates for business if update_business_data: - update_data(conn, 'businesses', update_business_data, business_id) + update_data(conn, 'businesses', update_business_data, 'id', business_id) @task(name='3.2.2-Amalgamation-Snapshot-Migrate-Task') @@ -217,15 +216,17 @@ def load_amalgamation_snapshot(conn: Connection, amalgamation_data: dict, busine for ting in amalgamation_data['amalgamating_businesses']: if ting_identifier:= ting.get('ting_identifier'): - # if TING exists in db, update state filing info, - # if not exist, insert a placeholder with state filing info + # TODO: avoid update info for withdrawn amalg filing (will handle in NoW work) + # TING must exists in db before updating state filing info, del ting['ting_identifier'] temp_ting = { 'identifier': ting_identifier, 'state_filing_id': filing_id, 'dissolution_date': amalgamation['amalgamation_date'] } - ting_business_id = load_data(conn, 'businesses', temp_ting, 'identifier', update=True) + ting_business_id = update_data(conn, 'businesses', temp_ting, 'identifier', ting_identifier) + if not ting_business_id: + raise Exception(f'TING {ting_identifier} does not exist, cannot migrate TED before TING') ting['business_id'] = ting_business_id ting['amalgamation_id'] = amalgamation_id load_data(conn, 'amalgamating_businesses', ting) @@ -336,7 +337,7 @@ def tombstone_flow(): # Calculate max corps to initialize max_corps = min(total, config.TOMBSTONE_BATCHES * config.TOMBSTONE_BATCH_SIZE) - print(f'max_corps: {max_corps}') + print(f'👷 max_corps: {max_corps}') reserved_corps = reserve_unprocessed_corps(config, processing_service, flow_run_id, max_corps) print(f'👷 Reserved {reserved_corps} corps for processing') print(f'👷 Going to migrate {total} corps with batch size of {batch_size}') @@ -397,7 +398,7 @@ def tombstone_flow(): error=f"Migration failed - {repr(e)}" ) - failed = len(corp_futures) - succeeded - skipped + failed = len(corp_futures) - succeeded print(f'🌟 Complete round {cnt}. Succeeded: {succeeded}. Failed: {failed}. Skip: {skipped}') cnt += 1 migrated_cnt += succeeded diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index 4c49d539a4..9a6f294f32 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -35,6 +35,8 @@ class EventFilings(str, Enum): FILE_AMLRC = 'FILE_AMLRC' FILE_AMLVC = 'FILE_AMLVC' + CONVAMAL_NULL = 'CONVAMAL_NULL' # TODO: re-map + # Annual Report FILE_ANNBC = 'FILE_ANNBC' @@ -98,7 +100,7 @@ class EventFilings(str, Enum): FILE_ICORP = 'FILE_ICORP' FILE_ICORU = 'FILE_ICORU' FILE_ICORC = 'FILE_ICORC' - CONVICORP_NULL = 'CONVICORP_NULL' # TODO: may need to be removed + CONVICORP_NULL = 'CONVICORP_NULL' # TODO: re-map # TODO: Ledger - unsupported @@ -170,6 +172,7 @@ def has_value(cls, value): EventFilings.FILE_AMLHC: ['amalgamationApplication', 'horizontal'], EventFilings.FILE_AMLRC: ['amalgamationApplication', 'regular'], EventFilings.FILE_AMLVC: ['amalgamationApplication', 'vertical'], + EventFilings.CONVAMAL_NULL: ['amalgamationApplication', 'regular'], # TODO: re-map EventFilings.FILE_ANNBC: 'annualReport', @@ -277,6 +280,7 @@ def has_value(cls, value): EventFilings.FILE_AMLHC: 'Amalgamation Application Short Form (Horizontal) for a Community Contribution Company', EventFilings.FILE_AMLRC: 'Amalgamation Application (Regular) for a Community Contribution Company', EventFilings.FILE_AMLVC: 'Amalgamation Application Short Form (Vertical) for a Community Contribution Company', + EventFilings.CONVAMAL_NULL: None, # TODO: re-map EventFilings.FILE_ANNBC: 'BC Annual Report', # has suffix of date, dynamically add it during formatting @@ -383,6 +387,7 @@ def has_value(cls, value): 'changeOfAddress': ['last_coa_date'], 'changeOfDirectors': ['last_cod_date'], 'agmExtension': ['last_agm_date'], + 'amalgamationApplication': ['last_coa_date', 'last_cod_date'], # TODO: 'dissolution_date' - Amalgamating business, continuation out # TODO: 'continuation_out_date' - continuation out 'dissolution': ['dissolution_date'], diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index c37765dd8f..003c6c0350 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -1,5 +1,93 @@ + +def get_unprocessed_corps_subquery(flow_name, environment): + subqueries = [ + { + 'name': 'default(all corps)', + 'cte': '', + 'where': '' + }, + { + 'name':'TING', + 'cte': """ + with ting_corps as ( + select distinct ting_corp_num + from corp_involved_amalgamating + ), + ted_corps as ( + select distinct ted_corp_num + from corp_involved_amalgamating + ) + """, + 'where': """ + and exists ( + select 1 from ting_corps t where t.ting_corp_num = c.corp_num + ) + and not exists ( + select 1 from ted_corps t where t.ted_corp_num = c.corp_num + ) + """ + }, + { + 'name':'TED that all its TINGs(XP excluded) have been migrated', + 'cte': f""" + with t2 as ( + select distinct cia1.ted_corp_num + from corp_involved_amalgamating cia1 + where not exists ( + select 1 + from corp_involved_amalgamating cia2 + left join corp_processing cp + on cia2.ting_corp_num = cp.corp_num + and cp.flow_name = '{flow_name}' + and cp.environment = '{environment}' + and cp.processed_status = 'COMPLETED' + where cia2.ted_corp_num = cia1.ted_corp_num + and (cia2.ting_corp_num like 'BC%' or cia2.ting_corp_num like 'Q%' or cia2.ting_corp_num like 'C%') + and cp.corp_num is null + ) + ) + """, + 'where': """ + and exists ( + select 1 from t2 where c.corp_num = t2.ted_corp_num + ) + """ + }, + { + 'name':'Other corps, non-TING and non-TED', + 'cte': """ + with t3 as ( + select ting_corp_num as corp_num + from corp_involved_amalgamating + union + select ted_corp_num as corp_num + from corp_involved_amalgamating + ) + """, + 'where': """ + and not exists ( + select 1 + from t3 + where t3.corp_num = c.corp_num + ) + """ + } + ] + # Note: change index to select subset of corps + # [0] all, [1] TING, [2] TED that linked TINGs are migrated, [3] exclude TING & TED + # Acceptable order when it comes to the actual migration: + # [1]->[2]->[3] + # [2]->[1]->[3] (may fetch fewer eligible corps in [2] at the beginning, if so, go to [1] and then go back to [2], repeatedly) + # Other usage: + # [0] is used for other purposes, e.g. tweak query to select specific corps + subquery = subqueries[2] + return subquery['cte'], subquery['where'] + def get_unprocessed_corps_query(flow_name, environment, batch_size): + cte_clause, where_clause = get_unprocessed_corps_subquery(flow_name, environment) + query = f""" + {cte_clause} select c.corp_num, c.corp_type_cd, cs.state_type_cd, cp.flow_name, cp.processed_status, cp.last_processed_event_id, cp.failed_event_id, cp.failed_event_file_type from corporation c left outer join corp_state cs @@ -9,6 +97,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): and cp.flow_name = '{flow_name}' and cp.environment = '{environment}' where 1 = 1 + {where_clause} -- and c.corp_type_cd like 'BC%' -- some are 'Q%' -- and c.corp_num = 'BC0000621' -- state changes a lot -- and c.corp_num = 'BC0883637' -- one pary with multiple roles, but werid address_ids, same filing submitter but diff email @@ -18,7 +107,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- and c.corp_num = 'BC0326163' -- double quotes in corp name, no share structure, city in street additional of party's address -- and c.corp_num = 'BC0395512' -- long RG, RC addresses -- and c.corp_num = 'BC0043406' -- lots of directors --- and c.corp_num in ('BC0326163', 'BC0395512', 'BC0883637') -- TODO: re-migrate issue (can be solved by adding tracking) +-- and c.corp_num in ('BC0326163', 'BC0395512', 'BC0883637') -- and c.corp_num = 'BC0870626' -- lots of filings - IA, CoDs, ARs -- and c.corp_num = 'BC0004969' -- lots of filings - IA, ARs, transition, alteration, COD, COA -- and c.corp_num = 'BC0002567' -- lots of filings - IA, ARs, transition, COD @@ -173,7 +262,7 @@ def get_business_query(corp_num, suffix): else false end admin_freeze from corporation c - left outer join event e on e.corp_num = c.corp_num and e.event_type_cd = 'CONVICORP' -- need to add other event like CONVAMAL, CONVCIN... + left outer join event e on e.corp_num = c.corp_num and e.event_type_cd IN ('CONVICORP', 'CONVAMAL') -- need to add other event like CONVCIN... where 1 = 1 --and c.corp_num = 'BC0684912' -- state - ACT --and c.corp_num = 'BC0000621' -- state - HLD diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 8227d37a3f..76bd82bed2 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -316,7 +316,7 @@ def format_amalgamations_data(data: dict, event_id: Decimal) -> dict: if not amalgmation_date: amalgmation_date = amalgmation_info['e_event_dt_str'] formatted_amalgmation['amalgamations']['amalgamation_date'] = amalgmation_date - formatted_amalgmation['amalgamations']['court_approval'] = amalgmation_info['f_court_approval'] + formatted_amalgmation['amalgamations']['court_approval'] = bool(amalgmation_info['f_court_approval']) event_file_type = amalgmation_info['event_file_type'] _, filing_subtype = get_target_filing_type(event_file_type) @@ -331,16 +331,20 @@ def format_amalgamations_data(data: dict, event_id: Decimal) -> dict: def format_amalgamating_businesses(ting_data: dict) -> dict: formatted_ting = {} - foreign_identifier = ting_data['home_juri_num'] role = 'holding' if ting_data['adopted_corp_ind'] else 'amalgamating' - foreign_jurisdiction = 'CA' - foreign_jurisdiction_region = ting_data['can_jur_typ_cd'] - if foreign_jurisdiction_region == 'OT': - foreign_jurisdiction = 'US' - foreign_jurisdiction_region = ting_data['othr_juri_desc'] + foreign_identifier = None + if not (ting_data['ting_corp_num'].startswith('BC') or\ + ting_data['ting_corp_num'].startswith('Q') or\ + ting_data['ting_corp_num'].startswith('C')): + foreign_identifier = ting_data['ting_corp_num'] if foreign_identifier: + foreign_jurisdiction = 'CA' + foreign_jurisdiction_region = ting_data['can_jur_typ_cd'] + if foreign_jurisdiction_region == 'OT': + foreign_jurisdiction = 'US' + foreign_jurisdiction_region = ting_data['othr_juri_desc'] formatted_ting = { 'foreign_jurisdiction': foreign_jurisdiction, 'foreign_name': ting_data['foreign_nme'], @@ -515,7 +519,7 @@ def build_epoch_filing(business_id: int) -> dict: return filing -def load_data(conn: Connection, table_name: str, data: dict, conflict_column: str=None, update: bool=False) -> int: +def load_data(conn: Connection, table_name: str, data: dict, conflict_column: str=None) -> int: columns = ', '.join(data.keys()) values = ', '.join([format_value(v) for v in data.values()]) @@ -524,8 +528,6 @@ def load_data(conn: Connection, table_name: str, data: dict, conflict_column: st check_query = f"select id from {table_name} where {conflict_column} = {conflict_value}" check_result = conn.execute(text(check_query)).scalar() if check_result: - if update: - update_data(conn, table_name, data, check_result) return check_result query = f"""insert into {table_name} ({columns}) values ({values}) returning id""" @@ -536,14 +538,15 @@ def load_data(conn: Connection, table_name: str, data: dict, conflict_column: st return id -def update_data(conn: Connection, table_name: str, data: dict, id: int) -> bool: +def update_data(conn: Connection, table_name: str, data: dict, column: str, value: any) -> int: update_pairs = [f'{k} = {format_value(v)}' for k, v in data.items()] update_pairs_str = ', '.join(update_pairs) - query = f"""update {table_name} set {update_pairs_str} where id={id}""" + query = f"""update {table_name} set {update_pairs_str} where {column}={format_value(value)} returning id""" result = conn.execute(text(query)) + id = result.scalar() - return result.rowcount > 0 + return id def format_value(value) -> str: From 076b9d31bd68986cc316f92c25ae0958df71d0d8 Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Mon, 20 Jan 2025 06:57:26 -0800 Subject: [PATCH 027/133] 24482 Tombstone pipeline - Implement continuation in (#3174) --- data-tool/flows/corps_tombstone_flow.py | 5 ++ .../flows/tombstone/tombstone_base_data.py | 14 ++++++ .../flows/tombstone/tombstone_mappings.py | 1 + .../flows/tombstone/tombstone_queries.py | 21 ++++++++ data-tool/flows/tombstone/tombstone_utils.py | 49 ++++++++++++++++++- .../legal_api/reports/business_document.py | 6 +++ 6 files changed, 94 insertions(+), 2 deletions(-) diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index c44dd01451..8392a6ce07 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -192,6 +192,11 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id if i == state_filing_index: update_info['businesses']['state_filing_id'] = filing_id + + if jurisdiction := data['jurisdiction']: + jurisdiction['business_id'] = business_id + jurisdiction['filing_id'] = filing_id + load_data(conn, 'jurisdictions', jurisdiction) # load amalgamation snapshot linked to the current filing if amalgamation_data := data['amalgamations']: diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index 1dc409fa63..f7700c31e6 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -132,6 +132,19 @@ } +# ======== jurisdiction ======== +JURISDICTION = { + 'country': None, + 'region': None, + 'identifier': None, + 'legal_name': None, + 'tax_id': None, + 'incorporation_date': None, # date + 'expro_identifier': None, + 'expro_legal_name': None, +} + + # ======== filing ======== USER = { 'username': None, @@ -171,6 +184,7 @@ # others 'submitter_roles': None, }, + 'jurisdiction': None, # optional 'amalgamations': None # optional } diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index 9a6f294f32..6c7c3c3b0b 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -390,6 +390,7 @@ def has_value(cls, value): 'amalgamationApplication': ['last_coa_date', 'last_cod_date'], # TODO: 'dissolution_date' - Amalgamating business, continuation out # TODO: 'continuation_out_date' - continuation out + 'continuationIn': ['last_coa_date', 'last_cod_date'], 'dissolution': ['dissolution_date'], 'putBackOn': ['dissolution_date'], 'restoration': ['dissolution_date', 'restoration_expiry_date'], diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 003c6c0350..9db812ae7f 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -118,6 +118,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- 'BC0472301', 'BC0649417', 'BC0808085', 'BC0803411', 'BC0511226', 'BC0833000', 'BC0343855', 'BC0149266', -- dissolution -- 'BC0548839', 'BC0541207', 'BC0462424', 'BC0021973', -- restoration -- 'BC0034290', -- legacy other +-- 'C0870179', 'C0870343', 'C0883424', -- continuation in (C, CCC, CUL) -- 'BC0207097', 'BC0693625', 'BC0754041', 'BC0072008', 'BC0355241', 'BC0642237', 'BC0555891', 'BC0308683', -- correction -- 'BC0688906', 'BC0870100', 'BC0267106', 'BC0873461', -- alteration -- 'BC0536998', 'BC0574096', 'BC0663523' -- new mappings of CoA, CoD @@ -525,6 +526,25 @@ def get_resolutions_query(corp_num): return query +def get_jurisdictions_query(corp_num): + query = f""" + select + j.corp_num as j_corp_num, + j.start_event_id as j_start_event_id, + j.can_jur_typ_cd as j_can_jur_typ_cd, + j.xpro_typ_cd as j_xpro_typ_cd, + j.home_company_nme as j_home_company_nme, + j.home_juris_num as j_home_juris_num, + to_char(j.home_recogn_dt, 'YYYY-MM-DD') as j_home_recogn_dt, + j.othr_juris_desc as j_othr_juris_desc, + j.bc_xpro_num as j_bc_xpro_num + from jurisdiction j + where corp_num = '{corp_num}' + ; + """ + return query + + def get_filings_query(corp_num): query = f""" select @@ -620,6 +640,7 @@ def get_corp_snapshot_filings_queries(config, corp_num): 'share_classes': get_share_classes_share_series_query(corp_num), 'aliases': get_aliases_query(corp_num), 'resolutions': get_resolutions_query(corp_num), + 'jurisdictions': get_jurisdictions_query(corp_num), 'filings': get_filings_query(corp_num), 'amalgamations': get_amalgamation_query(corp_num) } diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 76bd82bed2..ae19f1f9f2 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -7,7 +7,8 @@ import pandas as pd import pytz from sqlalchemy import Connection, text -from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, FILING_JSON, OFFICE, +from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, FILING_JSON, + JURISDICTION, OFFICE, PARTY, PARTY_ROLE, RESOLUTION, SHARE_CLASSES, USER) from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, @@ -226,6 +227,45 @@ def format_resolutions_data(data: dict) -> list[dict]: return formatted_resolutions +def format_jurisdictions_data(data: dict, event_id: Decimal) -> dict: + jurisdictions_data = data['jurisdictions'] + + matched_jurisdictions = [ + item for item in jurisdictions_data if item.get('j_start_event_id') == event_id + ] + + if not matched_jurisdictions: + return None + + formatted_jurisdiction = copy.deepcopy(JURISDICTION) + jurisdiction_info = matched_jurisdictions[0] + + formatted_jurisdiction['legal_name'] = jurisdiction_info['j_home_company_nme'] + formatted_jurisdiction['identifier'] = jurisdiction_info['j_home_juris_num'] + formatted_jurisdiction['incorporation_date'] = jurisdiction_info['j_home_recogn_dt'] + formatted_jurisdiction['expro_identifier'] = jurisdiction_info['j_bc_xpro_num'] + formatted_jurisdiction['country'] = None + formatted_jurisdiction['region'] = None + + can_jurisdiction_code = jurisdiction_info['j_can_jur_typ_cd'] + other_jurisdiction_desc = jurisdiction_info['j_othr_juris_desc'] + + # when canadian jurisdiction, ignore othr_juris_desc + if can_jurisdiction_code != 'OT': + formatted_jurisdiction['country'] = 'CA' + formatted_jurisdiction['region'] = 'FEDERAL' if can_jurisdiction_code == 'FD' else can_jurisdiction_code + # when other jurisdiction and len(othr_juris_desc) = 2, then othr_juris_desc is country code + elif can_jurisdiction_code == 'OT' and len(other_jurisdiction_desc) == 2: + formatted_jurisdiction['country'] = other_jurisdiction_desc + # when other jurisdiction and len(othr_juris_desc) = 6, then othr_juris_desc contains both + # region code and country code (like "US, SS"). Ignore any other cases. + elif can_jurisdiction_code == 'OT' and len(other_jurisdiction_desc) == 6: + formatted_jurisdiction['country'] = other_jurisdiction_desc[:2] + formatted_jurisdiction['region'] = other_jurisdiction_desc[4:] + + return formatted_jurisdiction + + def format_filings_data(data: dict) -> list[dict]: # filing info in business business_update_dict = {} @@ -253,6 +293,7 @@ def format_filings_data(data: dict) -> list[dict]: effective_date, x) filing_body = copy.deepcopy(FILING['filings']) + jurisdiction = None amalgamation = None # make it None if no valid value @@ -271,12 +312,16 @@ def format_filings_data(data: dict) -> list[dict]: 'submitter_id': user_id, # will be updated to real user_id when loading data into db } + if filing_type == 'continuationIn': + jurisdiction = format_jurisdictions_data(data, x['e_event_id']) + if filing_type == 'amalgamationApplication': amalgamation = format_amalgamations_data(data, x['e_event_id']) filing = { 'filings': filing_body, - 'amalgamations': amalgamation + 'jurisdiction': jurisdiction, + 'amalgamations': amalgamation } formatted_filings.append(filing) diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index ae5f8a5f09..e806d380c6 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -550,6 +550,12 @@ def _set_continuation_in_details(self, business: dict): 'business_id': jurisdiction.business_id, 'filing_id': jurisdiction.filing_id, } + + # Imported from COLIN + if self._business.is_tombstone: + jurisdiction_info['expro_identifier'] = 'Not Available' + jurisdiction_info['expro_legal_name'] = 'Not Available' + continuation_in_info['foreignJurisdiction'] = jurisdiction_info business['continuationIn'] = continuation_in_info From c69b560fdd9595baaaa3e37848922cd8e92f591d Mon Sep 17 00:00:00 2001 From: meawong Date: Tue, 21 Jan 2025 09:12:40 -0800 Subject: [PATCH 028/133] 24820 - Return embedded NoW filing for withdrawn bootstrap filing (#3175) * 24820-Embed-NoW-for-withdrawn-boostrap-filing-during-fetch * 24820-Add unit tests * 24820-Update-param-name * 24820-Update withdrawn filing var name to now filing * 24280-Move helper out of Filing model * 24820-Fix lint error * 24820 - Simplify helper function * 24820-Add check for temp business * 24820-Update-unit-test --- .../business_filings/business_filings.py | 11 ++++ .../v2/test_business_filings/test_filings.py | 60 +++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index 0c132207c1..c502a9b573 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -307,6 +307,9 @@ def get_single_filing(identifier: str, filing_id: int): filing_json = rv.json if rv.status == Filing.Status.PENDING.value: ListFilingResource.get_payment_update(filing_json) + if rv.status == Filing.Status.WITHDRAWN.value and identifier.startswith('T'): + now_filing = ListFilingResource.get_notice_of_withdrawal(filing_json['filing']['header']['filingId']) + filing_json['filing']['noticeOfWithdrawal'] = now_filing.json elif (rv.status in [Filing.Status.CHANGE_REQUESTED.value, Filing.Status.APPROVED.value, Filing.Status.REJECTED.value] and @@ -464,6 +467,14 @@ def get_business_and_filing(identifier, filing_id=None) -> Tuple[Optional[Busine business = Business.find_by_identifier(identifier) return business, filing + @staticmethod + def get_notice_of_withdrawal(filing_id: str = None): + """Return a NoW by the withdrawn filing id.""" + filing = db.session.query(Filing). \ + filter(Filing.withdrawn_filing_id == filing_id).one_or_none() + + return filing + @staticmethod def put_basic_checks(identifier, filing, client_request, business) -> Tuple[dict, int]: """Perform basic checks to ensure put can do something.""" diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index de5da778fe..0accdba82a 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -116,6 +116,66 @@ def test_get_temp_business_filing(session, client, jwt, legal_type, filing_type, assert rv.json['filing']['header']['name'] == filing_type assert rv.json['filing'][filing_type] == filing_json +def test_get_withdrawn_temp_business_filing(session, client, jwt): + """Assert that a FE withdrawn temp business returns the filing with the NoW embedded when the status is WITHDRAWN.""" + + # set-up withdrawn boostrap FE filing + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + + identifier = 'Tb31yQIuBw' + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + json_data = copy.deepcopy(FILING_HEADER) + json_data['filing']['header']['name'] = 'incorporationApplication' + del json_data['filing']['business'] + new_bus_filing_json = copy.deepcopy(INCORPORATION) + new_bus_filing_json['nameRequest']['legalType'] = 'BC' + json_data['filing']['incorporationApplication'] = new_bus_filing_json + new_business_filing = factory_pending_filing(None, json_data) + new_business_filing.temp_reg = identifier + new_business_filing.effective_date = future_effective_date + new_business_filing.payment_completion_date = datetime.utcnow().isoformat() + new_business_filing._status = Filing.Status.PAID.value + new_business_filing.skip_status_listener = True + new_business_filing.save() + withdrawn_filing_id = new_business_filing.id + + # set-up notice of withdrawal filing + now_json_data = copy.deepcopy(FILING_HEADER) + now_json_data['filing']['header']['name'] = 'noticeOfWithdrawal' + del now_json_data['filing']['business'] + now_json_data['filing']['business'] = { + "identifier": identifier, + "legalType": 'BC' + } + now_json_data['filing']['noticeOfWithdrawal'] = copy.deepcopy(SCHEMA_NOTICE_OF_WITHDRAWAL) + now_json_data['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + del now_json_data['filing']['header']['filingId'] + now_filing = factory_filing(None, now_json_data) + now_filing.withdrawn_filing_id = withdrawn_filing_id + now_filing.save() + + # fetch filings when withdrawn filing status is PAID + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate that the NoW is not embedded in the withdrawn filing + assert 'noticeOfWithdrawal' not in rv.json['filing'] + + # set status to WITHDRAWN + new_business_filing._status = Filing.Status.WITHDRAWN.value + new_business_filing.save() + + # fetch filings when withdrawn filing status is WITHDRAWN + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate that the NoW is now embedded in the withdrawn filing + assert 'noticeOfWithdrawal' in rv.json['filing'] + assert rv.json['filing']['noticeOfWithdrawal'] is not None def test_get_filing_not_found(session, client, jwt): """Assert that the request fails if the filing ID doesn't match an existing filing.""" From f8d63cc771bcba65b41d3059d4d0c2760fd6a652 Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Tue, 21 Jan 2025 15:34:42 -0800 Subject: [PATCH 029/133] 24721 Update extract script not to map Q* to BC (#3177) * update Q* type mapping Signed-off-by: Hongjing Chen * set default index for get unprocessed corps query Signed-off-by: Hongjing Chen * complete & remove one TO-DO in the code Signed-off-by: Hongjing Chen * undo Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- data-tool/flows/tombstone/tombstone_queries.py | 4 ++-- data-tool/scripts/transfer_cprd_corps.sql | 5 +---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 9db812ae7f..c2e9a090a8 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -80,7 +80,7 @@ def get_unprocessed_corps_subquery(flow_name, environment): # [2]->[1]->[3] (may fetch fewer eligible corps in [2] at the beginning, if so, go to [1] and then go back to [2], repeatedly) # Other usage: # [0] is used for other purposes, e.g. tweak query to select specific corps - subquery = subqueries[2] + subquery = subqueries[3] return subquery['cte'], subquery['where'] def get_unprocessed_corps_query(flow_name, environment, batch_size): @@ -131,7 +131,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- 'BC0805986', 'BC0561086', -- amalg - v -- 'BC0543231', 'BC0358476' -- amalg - h -- ) - and c.corp_type_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') -- TODO: update transfer script + and c.corp_type_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') and cs.end_event_id is null -- and ((cp.processed_status is null or cp.processed_status != 'COMPLETED')) and cp.processed_status is null diff --git a/data-tool/scripts/transfer_cprd_corps.sql b/data-tool/scripts/transfer_cprd_corps.sql index 28eef3f2f2..0aeb1e271b 100644 --- a/data-tool/scripts/transfer_cprd_corps.sql +++ b/data-tool/scripts/transfer_cprd_corps.sql @@ -66,10 +66,7 @@ select case else c.CORP_NUM end CORP_NUM, CORP_FROZEN_TYP_CD as corp_frozen_type_cd, - case - when c.CORP_TYP_CD in ('QA', 'QB', 'QC', 'QD', 'QE') then 'BC' - else c.CORP_TYP_CD - end CORP_TYPE_CD, + CORP_TYP_CD as CORP_TYPE_CD, RECOGNITION_DTS, BN_9, bn_15, From 149d3c968dbfa5ad854c8ffd17b118b26a144b22 Mon Sep 17 00:00:00 2001 From: meawong Date: Tue, 21 Jan 2025 16:00:53 -0800 Subject: [PATCH 030/133] 25387-Add Filings with Withdrawn Status to Ledger (#3176) * 25387-Add withdrawn status filings to ledger * 25387-Add-unit-tests --- .../business_filings/business_filings.py | 3 +- .../test_filings_ledger.py | 39 +++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index c502a9b573..b81f0b4747 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -371,7 +371,8 @@ def get_ledger_listing(identifier: str, user_jwt: JwtManager): filings = CoreFiling.ledger(business.id, jwt=user_jwt, - statuses=[Filing.Status.COMPLETED.value, Filing.Status.PAID.value], + statuses=[Filing.Status.COMPLETED.value, Filing.Status.PAID.value, + Filing.Status.WITHDRAWN.value], start=ledger_start, size=ledger_size, effective_date=effective_date) diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py index b49021d9ea..dfbcef6cd5 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py @@ -177,6 +177,45 @@ def test_ledger_comment_count(session, client, jwt): # validate assert rv.json['filings'][0]['commentsCount'] == number_of_comments +@pytest.mark.parametrize('test_name, filing_status, expected', [ + ('filing-status-Completed', Filing.Status.COMPLETED.value, 1), + ('filing-status-Corrected',Filing.Status.CORRECTED.value, 0), + ('filing-status-Draft', Filing.Status.DRAFT.value, 0), + ('filing-status-Epoch', Filing.Status.EPOCH.value, 0), + ('filing-status-Error', Filing.Status.ERROR.value, 0), + ('filing-status-Paid', Filing.Status.PAID.value, 1), + ('filing-status-Pending', Filing.Status.PENDING.value, 0), + ('filing-status-PaperOnly', Filing.Status.PAPER_ONLY.value, 0), + ('filing-status-PendingCorrection', Filing.Status.PENDING_CORRECTION.value, 0), + ('filing-status-Withdrawn', Filing.Status.WITHDRAWN.value, 1), +]) + +def test_get_all_business_filings_permitted_statuses(session, client, jwt, test_name, filing_status, expected): + """Assert that the ledger only shows filings with permitted statuses.""" + # setup + identifier = 'BC1234567' + today = date.today().isoformat() + alteration_meta = {'alteration': { + 'fromLegalType': 'BC', + 'toLegalType': 'BEN' + }} + meta_data = {**{'applicationDate': today}, **alteration_meta} + + business, filing_storage = ledger_element_setup_help(identifier, 'alteration') + filing_storage._meta_data = meta_data + + # set filing status + filing_storage._status = filing_status + filing_storage.skip_status_listener = True + filing_storage.save() + + # test + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [UserRoles.system], identifier)) + + # validate + assert len(rv.json.get('filings')) == expected + @pytest.mark.parametrize('test_name, file_number, order_date, effect_of_order, order_details, expected', [ ('all_elements', 'ABC123', datetime.utcnow(), 'effect', 'details', From 21b5efe178ad34cf063f021a0528bae27837a285 Mon Sep 17 00:00:00 2001 From: flutistar Date: Thu, 23 Jan 2025 08:35:36 -0800 Subject: [PATCH 031/133] updated validation --- legal-api/src/legal_api/constants.py | 10 ++++++++++ .../src/legal_api/services/document_record.py | 18 ++++++++++++++---- .../filings/validations/common_validations.py | 11 ++++++++++- .../filings/validations/continuation_in.py | 11 ++++++++--- .../filing_processors/continuation_in.py | 9 +++++++++ 5 files changed, 51 insertions(+), 8 deletions(-) diff --git a/legal-api/src/legal_api/constants.py b/legal-api/src/legal_api/constants.py index 92e7394c79..403cc32e13 100644 --- a/legal-api/src/legal_api/constants.py +++ b/legal-api/src/legal_api/constants.py @@ -13,4 +13,14 @@ # limitations under the License. """Constants for legal api.""" +from enum import Enum + + BOB_DATE = '2019-03-08' + +class DocumentClassEnum(Enum): + CORP = 'CORP' + +class DocumentTypeEnum(Enum): + CNTO = 'CNTO', + DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' \ No newline at end of file diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 955fb57817..b373181e37 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -18,9 +18,10 @@ import requests from flask import current_app, request from flask_babel import _ - import PyPDF2 +from legal_api.constants import DocumentTypeEnum + class DocumentRecordService: """Document Storage class.""" @@ -39,7 +40,7 @@ def upload_document(document_class: str, document_type: str) -> dict: url = f'{DRS_BASE_URL}/documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. - validation_error = DocumentRecordService.validate_pdf(file, request.content_length) + validation_error = DocumentRecordService.validate_pdf(file, request.content_length, document_type) if validation_error: return { 'error': validation_error @@ -145,12 +146,21 @@ def update_business_identifier(business_identifier: str, document_service_id: st return {} @staticmethod - def validate_pdf(file, content_length) -> Optional[list]: + def validate_pdf(file, content_length, document_type) -> Optional[list]: """Validate the PDF file.""" msg = [] + verify_paper_size = document_type in [ + DocumentTypeEnum.CNTO, + DocumentTypeEnum.DIRECTOR_AFFIDAVIT + ] + try: pdf_reader = PyPDF2.PdfFileReader(file) - + if verify_paper_size: + # Check that all pages in the pdf are letter size and able to be processed. + if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): + msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), + 'path': file.filename}) if content_length > 30000000: msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) diff --git a/legal-api/src/legal_api/services/filings/validations/common_validations.py b/legal-api/src/legal_api/services/filings/validations/common_validations.py index a624e296a6..f58ad3f2e8 100644 --- a/legal-api/src/legal_api/services/filings/validations/common_validations.py +++ b/legal-api/src/legal_api/services/filings/validations/common_validations.py @@ -22,7 +22,7 @@ from legal_api.errors import Error from legal_api.models import Business -from legal_api.services import MinioService, flags, namex +from legal_api.services import MinioService, flags, namex, DocumentRecordService from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt @@ -329,3 +329,12 @@ def validate_foreign_jurisdiction(foreign_jurisdiction: dict, msg.append({'error': 'Invalid region.', 'path': f'{foreign_jurisdiction_path}/region'}) return msg + +def validate_file_on_drs(document_class: str, document_service_id: str, path) -> bool: + """Validate file existence on DRS""" + msg = [] + doc = DocumentRecordService.get_document(document_class, document_service_id) + if not bool(doc.get("documentURL")): + msg.append({'error': 'File does not exist on Document Record Service', 'path': path}) + + return msg \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 68925ede5d..40b43f01cd 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -25,8 +25,8 @@ validate_foreign_jurisdiction, validate_name_request, validate_parties_names, - validate_pdf, validate_share_structure, + validate_file_on_drs ) from legal_api.services.filings.validations.incorporation_application import ( validate_incorporation_effective_date, @@ -35,6 +35,7 @@ ) from legal_api.services.utils import get_bool, get_str from legal_api.utils.datetime import datetime as dt +from legal_api.constants import DocumentClassEnum def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many-branches; @@ -51,6 +52,7 @@ def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many- return msg # Cannot continue validation without legal_type msg.extend(validate_business_in_colin(filing_json, filing_type)) + msg.extend(validate_continuation_in_authorization(filing_json, filing_type)) msg.extend(_validate_foreign_jurisdiction(filing_json, filing_type, legal_type)) msg.extend(validate_name_request(filing_json, legal_type, filing_type)) @@ -125,7 +127,10 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty foreign_jurisdiction['country'] == 'CA' and ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' - if not foreign_jurisdiction.get('affidavitFileKey'): + if file_key := foreign_jurisdiction.get('affidavitFileKey'): + if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, affidavit_file_key_path): + msg.extend(err) + else: msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) try: # Check the incorporation date is in valid format @@ -153,7 +158,7 @@ def validate_continuation_in_authorization(filing_json: dict, filing_type: str) for index, file in enumerate(filing_json['filing'][filing_type]['authorization']['files']): file_key = file['fileKey'] file_key_path = f'{authorization_path}/files/{index}/fileKey' - if err := validate_pdf(file_key, file_key_path, False): + if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, file_key_path): msg.extend(err) return msg diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py index 393c30db69..c803536d1a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py @@ -17,6 +17,7 @@ from entity_queue_common.service_utils import QueueException from legal_api.models import Business, Document, DocumentType, Filing, Jurisdiction +from legal_api.services import DocumentRecordService from legal_api.utils.legislation_datetime import LegislationDatetime from entity_filer.filing_meta import FilingMeta @@ -160,4 +161,12 @@ def process(business: Business, # pylint: disable=too-many-branches,too-many-lo filing_json['filing']['business']['legalType'] = business.legal_type filing_json['filing']['business']['foundingDate'] = business.founding_date.isoformat() filing_rec._filing_json = filing_json # pylint: disable=protected-access; bypass to update filing data + # Get a file key from continuation in object. + files = continuation_in.get('authorization', {}).get('files', []) + if not len(files): + raise QueueException( + f'continuationIn {filing_rec.id}, Unable to update business identifier on Document Record Service.' + ) + # Update business identifier on Document Record Service + DocumentRecordService.update_business_identifier(business.identifier, files[0].get('fileKey')) return business, filing_rec, filing_meta From 026d984bd0b46df36770a5f7b3335f8b96b0ac19 Mon Sep 17 00:00:00 2001 From: meawong Date: Thu, 23 Jan 2025 10:21:59 -0800 Subject: [PATCH 032/133] 24772-Update API Specs and Unit Test Mocks (#3178) * 24772-Update specs and mocks in unit test * 24772-Add-new-properties-to-success-response * 24772-Update titles to reflect changes in schema --- docs/business.yaml | 10 ++++++++++ .../v2/test_business_filings/test_filing_documents.py | 5 ++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/docs/business.yaml b/docs/business.yaml index b6e30f18ba..6fe8d06c2e 100644 --- a/docs/business.yaml +++ b/docs/business.yaml @@ -726,6 +726,8 @@ paths: submitter: 'mocked submitter' noticeOfWithdrawal: filingId: 123456 + hasTakenEffect: false + partOfPoa: false voluntary-dissolution-success-response: summary: Voluntary Dissolution Response value: @@ -1523,6 +1525,8 @@ paths: courtOrder: fileNumber: "A12345" effectOfOrder: planOfArrangement + hasTakenEffect: false + partOfPoa: false voluntary-dissolution-request: summary: Voluntary Dissolution Request value: @@ -6355,6 +6359,12 @@ components: title: ID for the future effective filing courtOrder: $ref: '#/components/schemas/Court_order' + hasTakenEffect: + type: boolean + title: One of the terms of arrangement for the FED filing have taken effect + partOfPoa: + type: boolean + title: FED filing is part of a Plan of Arrangement x-examples: Example 1: noticeOfWithdrawal: diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py index 91aecc84f8..4ca9facb6c 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py @@ -141,12 +141,11 @@ def test_unpaid_filing(session, client, jwt): ALTERATION_MEMORANDUM_RULES_IN_RESOLUTION['memorandumInResolution'] = True ALTERATION_MEMORANDUM_RULES_IN_RESOLUTION['rulesInResolution'] = True -# a mock notice of withdrawal filing, since its schema is not ready yet -# may need to delete this and change variables in the test cases in the future MOCK_NOTICE_OF_WITHDRAWAL = {} MOCK_NOTICE_OF_WITHDRAWAL['courtOrder'] = copy.deepcopy(COURT_ORDER) MOCK_NOTICE_OF_WITHDRAWAL['filingId'] = '123456' - +MOCK_NOTICE_OF_WITHDRAWAL['hasTakenEffect'] = False +MOCK_NOTICE_OF_WITHDRAWAL['partOfPoa'] = False @pytest.mark.parametrize('test_name, identifier, entity_type, filing_name_1, legal_filing_1, filing_name_2, legal_filing_2, status, expected_msg, expected_http_code, payment_completion_date', [ ('special_res_paper', 'CP7654321', Business.LegalTypes.COOP.value, From be4608719603df7227ca86a5441338c42d532157 Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:24:17 -0800 Subject: [PATCH 033/133] Hotfix updates (#3187) * email reminder flag update (#3179) * 25446 - add check for lear businesses before freezing corps (#3180) * 25446 - add check for lear businesses before freezing corps * 25446 - update comment * 25446 - refactoring * 25446 - refactoring to handle exceptions * 25446 - refactor * 25446 - refactor * 25446 - refactor * 25446 - update vaults.json * 25446 - tweaks to prevent performance issue (#3184) * 25446 - add check for lear businesses before freezing corps * 25446 - update comment * 25446 - refactoring * 25446 - refactoring to handle exceptions * 25446 - refactor * 25446 - refactor * 25446 - refactor * 25446 - update vaults.json * 25446 - tweaks to prevent performance issue --- colin-api/devops/vaults.json | 3 +- colin-api/src/colin_api/config.py | 9 ++++ colin-api/src/colin_api/models/filing.py | 19 ++++++-- colin-api/src/colin_api/resources/filing.py | 9 ++-- colin-api/src/colin_api/services/account.py | 52 ++++++++++++++++++++ colin-api/src/colin_api/services/legal.py | 54 +++++++++++++++++++++ jobs/email-reminder/email_reminder.py | 2 +- 7 files changed, 140 insertions(+), 8 deletions(-) create mode 100644 colin-api/src/colin_api/services/account.py create mode 100644 colin-api/src/colin_api/services/legal.py diff --git a/colin-api/devops/vaults.json b/colin-api/devops/vaults.json index ace7440da6..4e7deeb8c4 100644 --- a/colin-api/devops/vaults.json +++ b/colin-api/devops/vaults.json @@ -6,7 +6,8 @@ "test-oracle", "sentry", "jwt", - "launchdarkly" + "launchdarkly", + "entity-service-account" ] } ] diff --git a/colin-api/src/colin_api/config.py b/colin-api/src/colin_api/config.py index f4308a675c..bb6c9e7e07 100644 --- a/colin-api/src/colin_api/config.py +++ b/colin-api/src/colin_api/config.py @@ -93,6 +93,15 @@ class _Config: # pylint: disable=too-few-public-methods except (TypeError, ValueError): JWT_OIDC_JWKS_CACHE_TIMEOUT = 300 + # legal api + LEGAL_API_URL = os.getenv('LEGAL_API_URL') + + # service accounts + ACCOUNT_SVC_AUTH_URL = os.getenv('ACCOUNT_SVC_AUTH_URL') + ACCOUNT_SVC_CLIENT_ID = os.getenv('ACCOUNT_SVC_CLIENT_ID') + ACCOUNT_SVC_CLIENT_SECRET = os.getenv('ACCOUNT_SVC_CLIENT_SECRET') + ACCOUNT_SVC_TIMEOUT = os.getenv('ACCOUNT_SVC_TIMEOUT') + TESTING = False DEBUG = False diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index 284d113cd8..c0c7e0e4f6 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -46,6 +46,7 @@ ) # noqa: I001 from colin_api.resources.db import DB from colin_api.services import flags +from colin_api.services.legal import LegalApiService from colin_api.utils import convert_to_json_date, convert_to_json_datetime, convert_to_pacific_time, convert_to_snake @@ -1240,7 +1241,7 @@ def add_involuntary_dissolution_event(cls, con, corp_num, filing_dt, filing_body # pylint: disable=too-many-locals,too-many-statements,too-many-branches,too-many-nested-blocks; @classmethod - def add_filing(cls, con, filing: Filing) -> int: + def add_filing(cls, con, filing: Filing, lear_identifier: str) -> int: """Add new filing to COLIN tables.""" try: if filing.filing_type not in ['agmExtension', 'agmLocationChange', 'alteration', @@ -1396,11 +1397,14 @@ def add_filing(cls, con, filing: Filing) -> int: Business.TypeCodes.BCOMP_CONTINUE_IN.value, ]) - # Freeze all entities except CP if 'enable-bc-ccc-ulc' flag is on else just freeze BEN + # Freeze all entities except CP if business exists in lear and + # 'enable-bc-ccc-ulc' flag is on else just freeze BEN is_frozen_condition = ( flags.is_on('enable-bc-ccc-ulc') and - business['business']['legalType'] != Business.TypeCodes.COOP.value + business['business']['legalType'] != Business.TypeCodes.COOP.value and + cls.is_business_in_lear(lear_identifier) ) + current_app.logger.debug(f'Business {lear_identifier}, is_frozen_condition:{is_frozen_condition}') is_new_or_altered_ben = is_new_ben or is_new_cben or is_alteration_to_ben_or_cben @@ -1414,6 +1418,15 @@ def add_filing(cls, con, filing: Filing) -> int: current_app.logger.error(err.with_traceback(None)) raise err + @classmethod + def is_business_in_lear(cls, lear_identifier: str) -> bool: + """Check if business is in lear.""" + response = LegalApiService.query_business(lear_identifier, slim=True) + + if response.status_code == HTTPStatus.OK: + return True + return False + @classmethod def _get_last_ar_filed_date(cls, header: dict, business: dict): filing_year = header.get('filingYear') diff --git a/colin-api/src/colin_api/resources/filing.py b/colin-api/src/colin_api/resources/filing.py index ea46694193..0fb1a355a9 100644 --- a/colin-api/src/colin_api/resources/filing.py +++ b/colin-api/src/colin_api/resources/filing.py @@ -117,6 +117,9 @@ def post(legal_type, identifier, **kwargs): {'message': 'Error: Identifier in URL does not match identifier in filing data'} ), HTTPStatus.BAD_REQUEST + # setting this for lear business check as identifier is converted from lear to colin below + lear_identifier = identifier + # convert identifier if BC legal_type identifier = Business.get_colin_identifier(identifier, legal_type) @@ -177,7 +180,7 @@ def post(legal_type, identifier, **kwargs): } }), HTTPStatus.CREATED - filings_added = FilingInfo._add_filings(con, json_data, filing_list, identifier) + filings_added = FilingInfo._add_filings(con, json_data, filing_list, identifier, lear_identifier) # success! commit the db changes con.commit() @@ -202,7 +205,7 @@ def post(legal_type, identifier, **kwargs): }), HTTPStatus.INTERNAL_SERVER_ERROR @staticmethod - def _add_filings(con, json_data: dict, filing_list: list, identifier: str) -> list: + def _add_filings(con, json_data: dict, filing_list: list, identifier: str, lear_identifier: str) -> list: """Process all parts of the filing.""" filings_added = [] for filing_type in filing_list: @@ -224,7 +227,7 @@ def _add_filings(con, json_data: dict, filing_list: list, identifier: str) -> li if filing_type == 'correction': filings_added.extend(Filing.add_correction_filings(con, filing)) else: - event_id = Filing.add_filing(con, filing) + event_id = Filing.add_filing(con, filing, lear_identifier) filings_added.append({'event_id': event_id, 'filing_type': filing_type, 'filing_sub_type': filing.filing_sub_type}) diff --git a/colin-api/src/colin_api/services/account.py b/colin-api/src/colin_api/services/account.py new file mode 100644 index 0000000000..5366a8dfbd --- /dev/null +++ b/colin-api/src/colin_api/services/account.py @@ -0,0 +1,52 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This class provides the service for auth calls.""" + +import requests +from flask import current_app + + +# pylint: disable=too-few-public-methods +class AccountService: + """Provides service to call Authentication Services.""" + + BEARER: str = 'Bearer ' + CONTENT_TYPE_JSON = {'Content-Type': 'application/json'} + + try: + timeout = int(current_app.config.get('ACCOUNT_SVC_TIMEOUT', 20)) + except Exception: # pylint: disable=broad-except + timeout = 20 + + @classmethod + def get_bearer_token(cls): + """Get a valid Bearer token for the service to use.""" + token_url = current_app.config.get('ACCOUNT_SVC_AUTH_URL') + client_id = current_app.config.get('ACCOUNT_SVC_CLIENT_ID') + client_secret = current_app.config.get('ACCOUNT_SVC_CLIENT_SECRET') + + data = 'grant_type=client_credentials' + + # get service account token + res = requests.post(url=token_url, + data=data, + headers={'content-type': 'application/x-www-form-urlencoded'}, + auth=(client_id, client_secret), + timeout=cls.timeout) + + try: + return res.json().get('access_token') + except Exception: # pylint: disable=broad-except + return None diff --git a/colin-api/src/colin_api/services/legal.py b/colin-api/src/colin_api/services/legal.py new file mode 100644 index 0000000000..38dad26178 --- /dev/null +++ b/colin-api/src/colin_api/services/legal.py @@ -0,0 +1,54 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This class provides the service for legal-api calls.""" +import requests +from flask import current_app + +from colin_api.services.account import AccountService + + +# pylint: disable=too-few-public-methods +class LegalApiService(): + """Provides service to call the legal-api.""" + + @staticmethod + def query_business(identifier: str, slim: bool = False): + """Return a JSON object with business information. + + Args: + identifier (str): The business identifier + slim (bool, optional): If True, requests minimal business data. Defaults to False. + """ + timeout = int(current_app.config.get('ACCOUNT_SVC_TIMEOUT')) + legal_api_url = current_app.config.get('LEGAL_API_URL') + token = AccountService.get_bearer_token() + + try: + url = f'{legal_api_url}/businesses/{identifier}' + if slim: + url += '?slim=true' + # Perform proxy call using the input identifier (e.g. BC 123456) + response = requests.get(url, + headers={'Content-Type': 'application/json', 'Authorization': 'Bearer ' + token}, + timeout=timeout + ) + # If the status code is 200 or 404, return the response + if response.status_code in (200, 404): + return response + response.raise_for_status() + + except Exception as err: # pylint: disable=broad-except: + current_app.logger.error(err, exc_info=True) + raise # re-throw the exception after logging diff --git a/jobs/email-reminder/email_reminder.py b/jobs/email-reminder/email_reminder.py index b87d5faab1..8436b1ba58 100644 --- a/jobs/email-reminder/email_reminder.py +++ b/jobs/email-reminder/email_reminder.py @@ -143,7 +143,7 @@ async def find_and_send_ar_reminder(app: Flask, qsm: QueueService): # pylint: d Business.LegalTypes.ULC_CONTINUE_IN.value, Business.LegalTypes.CCC_CONTINUE_IN.value,] # entity types to send ar reminder - if flags.is_on('enable-bc-ccc-ulc'): + if flags.is_on('enable-bc-ccc-ulc-email-reminder'): legal_types.extend( [Business.LegalTypes.COMP.value, Business.LegalTypes.BC_CCC.value, From e5748bd93f97c16cc85964e102b1b83d4124f37c Mon Sep 17 00:00:00 2001 From: meawong Date: Tue, 28 Jan 2025 11:15:01 -0800 Subject: [PATCH 034/133] 25412 - Update NoW Validation with hasTakenEffect and partOfPoa Properties (#3188) * 25412-Add-validation-for-new-properties * 25412-Add-unit-tests --- .../validations/notice_of_withdrawal.py | 14 +++++++++-- .../validations/test_notice_of_withdrawal.py | 23 ++++++++++++------- 2 files changed, 27 insertions(+), 10 deletions(-) diff --git a/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py b/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py index 74d1d5f495..9a1f6d4014 100644 --- a/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py +++ b/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py @@ -20,7 +20,7 @@ from legal_api.errors import Error from legal_api.models import Filing from legal_api.models.db import db # noqa: I001 -from legal_api.services.utils import get_int +from legal_api.services.utils import get_bool, get_int from legal_api.utils.datetime import datetime as dt @@ -31,12 +31,22 @@ def validate(filing: Dict) -> Optional[Error]: msg = [] - withdrawn_filing_id_path: Final = '/filing/noticeOfWithdrawal/filingId' + base_path: Final = '/filing/noticeOfWithdrawal' + + withdrawn_filing_id_path: Final = f'{base_path}/filingId' withdrawn_filing_id = get_int(filing, withdrawn_filing_id_path) + + has_taken_effect = get_bool(filing, f'{base_path}/hasTakenEffect') + part_of_poa = get_bool(filing, f'{base_path}/partOfPoa') + if not withdrawn_filing_id: msg.append({'error': babel('Filing Id is required.'), 'path': withdrawn_filing_id_path}) return msg # cannot continue validation without the to be withdrawn filing id + if has_taken_effect and part_of_poa: + msg.append({'error': babel('Cannot file a Notice of Withdrawal as the filing has a POA in effect.')}) + return Error(HTTPStatus.BAD_REQUEST, msg) # cannot continue validation if the filing has a POA in effect + is_not_found, err_msg = validate_withdrawn_filing(withdrawn_filing_id) if is_not_found: return Error(HTTPStatus.NOT_FOUND, err_msg) diff --git a/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py b/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py index b4ea68ee92..8bf91d71cb 100644 --- a/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py +++ b/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py @@ -31,6 +31,7 @@ # setup +FILING_HAS_POA_IN_EFFECT = {'error': 'Cannot file a Notice of Withdrawal as the filing has a POA in effect.'} FILING_NOT_EXIST_MSG = {'error': 'The filing to be withdrawn cannot be found.'} FILING_NOT_FED_MSG = {'error': 'Only filings with a future effective date can be withdrawn.'} FILING_NOT_PAID_MSG = {'error': 'Only paid filings with a future effective date can be withdrawn.'} @@ -40,16 +41,17 @@ # tests @pytest.mark.parametrize( - 'test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, expected_code, expected_msg',[ - ('EXIST_BUSINESS_SUCCESS', True, Filing.Status.PAID, True, True, None, None), - ('EXIST_BUSINESS_FAIL_NOT_PAID', True, Filing.Status.PENDING, True, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_PAID_MSG]), - ('EXIST_BUSINESS_FAIL_NOT_FED', True, Filing.Status.PAID, False, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG]), - ('EXIST_BUSINESS_FAIL_FILING_NOT_EXIST', False, Filing.Status.PAID, True, True, HTTPStatus.NOT_FOUND, [FILING_NOT_EXIST_MSG]), - ('EXIST_BUSINESS_FAIL_MISS_FILING_ID', True, Filing.Status.PAID, True, False, HTTPStatus.UNPROCESSABLE_ENTITY, ''), - ('EXIST_BUSINESS_FAIL_NOT_PAID_NOT_FED', True, Filing.Status.PENDING, False, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG, FILING_NOT_PAID_MSG]) + 'test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, has_taken_effect, part_of_poa, expected_code, expected_msg',[ + ('EXIST_BUSINESS_SUCCESS', True, Filing.Status.PAID, True, True, False, False, None, None), + ('EXIST_BUSINESS_FAIL_NOT_PAID', True, Filing.Status.PENDING, True, True, None, None, HTTPStatus.BAD_REQUEST, [FILING_NOT_PAID_MSG]), + ('EXIST_BUSINESS_FAIL_NOT_FED', True, Filing.Status.PAID, False, True, None, None, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG]), + ('EXIST_BUSINESS_FAIL_FILING_NOT_EXIST', False, Filing.Status.PAID, True, True, None, None, HTTPStatus.NOT_FOUND, [FILING_NOT_EXIST_MSG]), + ('EXIST_BUSINESS_FAIL_MISS_FILING_ID', True, Filing.Status.PAID, True, False, None, None, HTTPStatus.UNPROCESSABLE_ENTITY, ''), + ('EXIST_BUSINESS_FAIL_NOT_PAID_NOT_FED', True, Filing.Status.PENDING, False, True, None, None, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG, FILING_NOT_PAID_MSG]), + ('EXIST_BUSINESS_FAIL_POA_IN_EFFECT', True, Filing.Status.PAID, True, True, True, True, HTTPStatus.BAD_REQUEST, [FILING_HAS_POA_IN_EFFECT]), ] ) -def test_validate_notice_of_withdrawal(session, test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, expected_code, expected_msg): +def test_validate_notice_of_withdrawal(session, test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, has_taken_effect, part_of_poa, expected_code, expected_msg): """Assert that notice of withdrawal flings can be validated""" today = datetime.utcnow().date() future_effective_date = today + timedelta(days=5) @@ -83,6 +85,11 @@ def test_validate_notice_of_withdrawal(session, test_name, is_filing_exist, with else: del filing_json['filing']['noticeOfWithdrawal']['filingId'] + if has_taken_effect is not None: + filing_json['filing']['noticeOfWithdrawal']['hasTakenEffect'] = has_taken_effect + if part_of_poa is not None: + filing_json['filing']['noticeOfWithdrawal']['partOfPoa'] = part_of_poa + err = validate(business, filing_json) if expected_code: assert err.code == expected_code From 507d7c006bcd0d76b0278d6198af03fea741f17a Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Tue, 28 Jan 2025 11:16:03 -0800 Subject: [PATCH 035/133] bump up version for 24.1b (#3189) --- colin-api/src/colin_api/version.py | 2 +- legal-api/src/legal_api/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/colin-api/src/colin_api/version.py b/colin-api/src/colin_api/version.py index 6a26e494ec..0fbd4e4227 100644 --- a/colin-api/src/colin_api/version.py +++ b/colin-api/src/colin_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.135.0' # pylint: disable=invalid-name +__version__ = '2.137.0' # pylint: disable=invalid-name diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index 09dd08c9d5..9800f9bbfa 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.136.0' # pylint: disable=invalid-name +__version__ = '2.137.0' # pylint: disable=invalid-name From f7a217b21e38ed578ed4aa46f46f398324c9d323 Mon Sep 17 00:00:00 2001 From: meawong Date: Tue, 28 Jan 2025 11:38:52 -0800 Subject: [PATCH 036/133] 25328 - Support Update and Save NoW Draft for Bootstrap Filing (#3183) * 25328-Add-special-case-for-NoW-when-getting-file-for-temp-reg-id * 25328-Update-requirements-for-schema-update * 25328-Clean-up-code * 25328-Optimize-query-for-temp-reg-filing --- legal-api/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 2 +- legal-api/src/legal_api/core/filing.py | 2 +- legal-api/src/legal_api/models/filing.py | 28 +++++++++++++------ .../v2/test_business_filings/test_filings.py | 13 +++++++++ 5 files changed, 36 insertions(+), 11 deletions(-) diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index 7d9965f9a8..7c1a3e152d 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.33#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/requirements/bcregistry-libraries.txt b/legal-api/requirements/bcregistry-libraries.txt index 94c67a8b48..80d739e0f5 100644 --- a/legal-api/requirements/bcregistry-libraries.txt +++ b/legal-api/requirements/bcregistry-libraries.txt @@ -1,2 +1,2 @@ -git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.33#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index 865ec1128d..e61490215d 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -273,7 +273,7 @@ def validate(): def get(identifier, filing_id=None) -> Optional[Filing]: """Return a Filing domain by the id.""" if identifier.startswith('T'): - storage = FilingStorage.get_temp_reg_filing(identifier) + storage = FilingStorage.get_temp_reg_filing(identifier, filing_id) else: storage = Business.get_filing_by_id(identifier, filing_id) diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index b636642496..0f98547e0e 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -861,14 +861,26 @@ def find_by_id(cls, filing_id: str = None): @staticmethod def get_temp_reg_filing(temp_reg_id: str, filing_id: str = None): - """Return a Filing by it's payment token.""" - q = db.session.query(Filing).filter(Filing.temp_reg == temp_reg_id) - - if filing_id: - q = q.filter(Filing.id == filing_id) - - filing = q.one_or_none() - return filing + """Return a filing by the temp id and filing id (if applicable).""" + if not filing_id: + return db.session.query(Filing).filter(Filing.temp_reg == temp_reg_id).one_or_none() + + return ( + db.session.query(Filing).filter( + db.or_( + db.and_( + Filing.id == filing_id, + Filing.temp_reg == temp_reg_id + ), + db.and_( # special case for NoW + Filing.id == filing_id, + Filing._filing_type == 'noticeOfWithdrawal', + Filing.withdrawn_filing_id == db.session.query(Filing.id) + .filter(Filing.temp_reg == temp_reg_id) + .scalar_subquery() + ) + ) + ).one_or_none()) @staticmethod def get_filing_by_payment_token(token: str): diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index 0accdba82a..d7f2df42a4 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -1764,3 +1764,16 @@ def test_notice_of_withdrawal_filing(session, client, jwt, test_name, legal_type assert now_filing.withdrawal_pending == False if is_temp: assert now_filing.temp_reg == None + + # update and save notice of withdrawal draft filing + now_json_data['filing']['header']['certifiedBy'] = 'test123' + + rv_draft = client.put(f'/api/v2/businesses/{identifier}/filings/{now_filing.id}?draft=true', + json=now_json_data, + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate + assert rv_draft.status_code == HTTPStatus.ACCEPTED + assert rv_draft.json['filing']['header']['certifiedBy'] == 'test123' + + From 48550e2a2f6811f2471ce75b3fa45d8c43e9017c Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:43:19 +0800 Subject: [PATCH 037/133] 24988 now documents temp business (#3182) * 24988 now_documents_temp_business * add unit test * clean up * fix lint issue * clean up * fix lint issue * update busines name * 25418 show withdrawn filing outputs * fix lint issue * fix typo --- legal-api/src/legal_api/core/filing.py | 24 +++++- legal-api/src/legal_api/models/filing.py | 13 ++++ legal-api/src/legal_api/reports/report.py | 3 + .../business_filings/business_documents.py | 28 ++++--- .../test_filing_documents.py | 76 +++++++++++++++++++ 5 files changed, 131 insertions(+), 13 deletions(-) diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index e61490215d..bc5af45c6d 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -37,7 +37,7 @@ # @dataclass(init=False, repr=False) -class Filing: +class Filing: # pylint: disable=too-many-public-methods """Domain class for Filings.""" class Status(str, Enum): @@ -284,6 +284,18 @@ def get(identifier, filing_id=None) -> Optional[Filing]: return None + @staticmethod + def get_by_withdrawn_filing_id(filing_id, withdrawn_filing_id, filing_type: str = None) -> Optional[Filing]: + """Return a Filing domain by the id, withdrawn_filing_id and filing_type.""" + storage = FilingStorage.get_temp_reg_filing_by_withdrawn_filing(filing_id, withdrawn_filing_id, filing_type) + + if storage: + filing = Filing() + filing._storage = storage # pylint: disable=protected-access + return filing + + return None + @staticmethod def find_by_id(filing_id) -> Optional[Filing]: """Return a Filing domain by the id.""" @@ -487,6 +499,10 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m base_url = current_app.config.get('LEGAL_API_BASE_URL') base_url = base_url[:base_url.find('/api')] identifier = business.identifier if business else filing.storage.temp_reg + if not identifier and filing.storage.withdrawn_filing_id: + withdrawn_filing = Filing.find_by_id(filing.storage.withdrawn_filing_id) + identifier = withdrawn_filing.storage.temp_reg + doc_url = url_for('API2.get_documents', **{'identifier': identifier, 'filing_id': filing.id, 'legal_filing_name': None}) @@ -508,15 +524,15 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m if filing.storage and filing.storage.payment_completion_date: documents['documents']['receipt'] = f'{base_url}{doc_url}/receipt' - no_legal_filings_in_paid_status = [ + no_legal_filings_in_paid_withdrawn_status = [ Filing.FilingTypes.REGISTRATION.value, Filing.FilingTypes.CONSENTCONTINUATIONOUT.value, Filing.FilingTypes.CONTINUATIONOUT.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, ] - if filing.status == Filing.Status.PAID and \ - not (filing.filing_type in no_legal_filings_in_paid_status + if filing.status in (Filing.Status.PAID, Filing.Status.WITHDRAWN) and \ + not (filing.filing_type in no_legal_filings_in_paid_withdrawn_status or (filing.filing_type == Filing.FilingTypes.DISSOLUTION.value and business.legal_type in [ Business.LegalTypes.SOLE_PROP.value, diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 0f98547e0e..4654eeb604 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -882,6 +882,19 @@ def get_temp_reg_filing(temp_reg_id: str, filing_id: str = None): ) ).one_or_none()) + @staticmethod + def get_temp_reg_filing_by_withdrawn_filing(filing_id: str, withdrawn_filing_id: str, filing_type: str = None): + """Return an temp reg Filing by withdrawn filing.""" + q = db.session.query(Filing). \ + filter(Filing.withdrawn_filing_id == withdrawn_filing_id). \ + filter(Filing.id == filing_id) + + if filing_type: + q = q.filter(Filing._filing_type == filing_type) + + filing = q.one_or_none() + return filing + @staticmethod def get_filing_by_payment_token(token: str): """Return a Filing by it's payment token.""" diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index 3c22066383..d24811e400 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -366,6 +366,9 @@ def _set_description(self, filing): if not legal_type and self._business: legal_type = self._business.legal_type + if not legal_type and self._filing.filing_type == 'noticeOfWithdrawal': + legal_type = self._filing.filing_json.get('filing').get('business', {}).get('legalType') + filing['numberedDescription'] = Business.BUSINESSES.get(legal_type, {}).get('numberedDescription') filing['numberedLegalNameSuffix'] = Business.BUSINESSES.get(legal_type, {}).get('numberedLegalNameSuffix') diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py index 59ce9c1937..bfb696598b 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py @@ -45,6 +45,7 @@ @cross_origin(origin='*') @jwt.requires_auth def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None, file_key: str = None): + # pylint: disable=too-many-branches """Return a JSON object with meta information about the Service.""" # basic checks if not authorized(identifier, jwt, ['view', ]): @@ -63,7 +64,15 @@ def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None message=get_error_message(ErrorCode.MISSING_BUSINESS, **{'identifier': identifier}) ), HTTPStatus.NOT_FOUND - if not (filing := Filing.get(identifier, filing_id)): + filing = Filing.get(identifier, filing_id) + if filing and identifier.startswith('T') and filing.id != filing_id: + withdrawn_filing = Filing.get_by_withdrawn_filing_id(filing_id=filing_id, + withdrawn_filing_id=filing.id, + filing_type=Filing.FilingTypes.NOTICEOFWITHDRAWAL) + if withdrawn_filing: + filing = withdrawn_filing + + if not filing: return jsonify( message=get_error_message(ErrorCode.FILING_NOT_FOUND, **{'filing_id': filing_id, 'identifier': identifier}) @@ -106,6 +115,7 @@ def _get_receipt(business: Business, filing: Filing, token): Filing.Status.COMPLETED, Filing.Status.CORRECTED, Filing.Status.PAID, + Filing.Status.WITHDRAWN ): return {}, HTTPStatus.BAD_REQUEST @@ -142,15 +152,15 @@ def _get_corp_name(business, filing): if business: return business.legal_name - name_request = (filing.filing_json - .get('filing') - .get(filing.filing_type) - .get('nameRequest', {})) - if name_request.get('legalName'): - return name_request.get('legalName') + filing_json = filing.filing_json.get('filing', {}) + name_request = filing_json.get(filing.filing_type, {}).get('nameRequest', {}) + + legal_name = name_request.get('legalName') or filing_json.get('business', {}).get('legalName') + if legal_name: + return legal_name - legal_type = name_request.get('legalType') + legal_type = name_request.get('legalType') or filing_json.get('business', {}).get('legal_type') if legal_type: - return Business.BUSINESSES.get(legal_type, {}).get('numberedDescription') + return Business.BUSINESSES.get(legal_type, {}).get('numberedDescription', '') return '' diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py index 4ca9facb6c..2410e892bb 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py @@ -725,6 +725,17 @@ def test_unpaid_filing(session, client, jwt): }, HTTPStatus.OK, '2017-10-01' ), + ('bc_ia_completed', 'BC7654321', Business.LegalTypes.COMP.value, + 'incorporationApplication', INCORPORATION, None, None, Filing.Status.WITHDRAWN, + {'documents': {'receipt': f'{base_url}/api/v2/businesses/BC7654321/filings/1/documents/receipt', + 'legalFilings': [ + {'incorporationApplication': + f'{base_url}/api/v2/businesses/BC7654321/filings/1/documents/incorporationApplication'}, + ] + } + }, + HTTPStatus.OK, '2017-10-01' + ), ('bc_annual_report_completed', 'BC7654321', Business.LegalTypes.COMP.value, 'annualReport', ANNUAL_REPORT, None, None, Filing.Status.COMPLETED, {'documents': {'receipt': f'{base_url}/api/v2/businesses/BC7654321/filings/1/documents/receipt', @@ -1468,6 +1479,14 @@ def filer_action(filing_name, filing_json, meta_data, business): ]}}, HTTPStatus.OK ), + ('ben_ia_paid', 'Tb31yQIuBw', None, Business.LegalTypes.BCOMP.value, + 'incorporationApplication', INCORPORATION, Filing.Status.WITHDRAWN, + {'documents': {'receipt': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/receipt', + 'legalFilings': [ + {'incorporationApplication': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/incorporationApplication'}, + ]}}, + HTTPStatus.OK + ), ('ben_ia_completed', 'Tb31yQIuBw', 'BC7654321', Business.LegalTypes.BCOMP.value, 'incorporationApplication', INCORPORATION, Filing.Status.COMPLETED, {'documents': {}}, HTTPStatus.OK @@ -1627,3 +1646,60 @@ def test_get_receipt_request_mock(session, client, jwt, requests_mock): assert rv.status_code == HTTPStatus.CREATED assert requests_mock.called_once + + +@pytest.mark.parametrize('test_name, temp_identifier, entity_type, expected_msg, expected_http_code', [ + ('now_ia_paid', 'Tb31yQIuBw', Business.LegalTypes.BCOMP.value, + {'documents': {'receipt': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/receipt', + 'legalFilings': [ + {'noticeOfWithdrawal': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/noticeOfWithdrawal'}, + ]}}, + HTTPStatus.OK + ) +]) +def test_temp_document_list_for_now(mocker, session, client, jwt, + test_name, + temp_identifier, + entity_type, + expected_msg, expected_http_code): + """Test document list for noticeOfWithdrawal states with temp identifier.""" + # Setup + + withdrawn_filing_json = copy.deepcopy(FILING_HEADER) + withdrawn_filing_json['filing']['header']['name'] = 'incorporationApplication' + withdrawn_filing_json['filing']['business']['legalType'] = entity_type + withdrawn_filing_json['filing']['incorporationApplication'] = INCORPORATION + + filing_json = copy.deepcopy(FILING_HEADER) + filing_json['filing']['header']['name'] = 'noticeOfWithdrawal' + filing_json['filing']['business']['legalType'] = entity_type + filing_json['filing']['noticeOfWithdrawal'] = MOCK_NOTICE_OF_WITHDRAWAL + + filing_date = datetime.utcnow() + + temp_reg = RegistrationBootstrap() + temp_reg._identifier = temp_identifier + temp_reg.save() + + business = None + withdrawn_filing = factory_filing(business, withdrawn_filing_json, filing_date=filing_date) + withdrawn_filing.temp_reg = temp_identifier + withdrawn_filing.save() + filing = factory_filing(business, filing_json, filing_date=filing_date) + filing.skip_status_listener = True + filing._status = Filing.Status.PAID + filing._payment_completion_date = '2017-10-01' + filing.temp_reg = None + filing.withdrawn_filing_id = withdrawn_filing.id + filing.save() + + mocker.patch('legal_api.core.filing.has_roles', return_value=True) + rv = client.get(f'/api/v2/businesses/{temp_identifier}/filings/{filing.id}/documents', + headers=create_header(jwt, [STAFF_ROLE], temp_identifier)) + + # remove the filing ID + rv_data = json.loads(re.sub("/\d+/", "/", rv.data.decode("utf-8")).replace("\n", "")) + expected = json.loads(re.sub("/\d+/", "/", json.dumps(expected_msg))) + + assert rv.status_code == expected_http_code + assert rv_data == expected From c37cc79fe3b1658eb0a1d2643bb7553ff3c50cd8 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Thu, 30 Jan 2025 01:02:14 +0800 Subject: [PATCH 038/133] 25405 fix_entity_description_filing_outputs (#3185) * 25405 fix_entity_description_filing_outputs * clean up --- legal-api/src/legal_api/reports/report.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index d24811e400..091ae2c763 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -354,21 +354,23 @@ def _set_tax_id(self, filing): def _set_description(self, filing): legal_type = None - if self._filing.filing_type == 'alteration': - legal_type = self._filing.filing_json.get('filing').get('alteration').get('business', {}).get('legalType') + filing_json = self._filing.filing_json.get('filing', {}) + filing_type = self._filing.filing_type + + # Check for alteration filing type + if filing_type == 'alteration': + legal_type = filing_json.get('alteration', {}).get('business', {}).get('legalType') else: - legal_type = (self._filing.filing_json - .get('filing') - .get(self._filing.filing_type) - .get('nameRequest', {}) - .get('legalType')) + legal_type = filing_json.get(filing_type, {}).get('nameRequest', {}).get('legalType') + # Fallback: Check the general business section + if not legal_type: + legal_type = filing_json.get('business', {}).get('legalType') + + # Final fallback: Check the _business object if not legal_type and self._business: legal_type = self._business.legal_type - if not legal_type and self._filing.filing_type == 'noticeOfWithdrawal': - legal_type = self._filing.filing_json.get('filing').get('business', {}).get('legalType') - filing['numberedDescription'] = Business.BUSINESSES.get(legal_type, {}).get('numberedDescription') filing['numberedLegalNameSuffix'] = Business.BUSINESSES.get(legal_type, {}).get('numberedLegalNameSuffix') From 3bdaebc3f1ab63bb3ac8fc548f7503d38d4f1e51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Thu, 30 Jan 2025 09:48:24 -0800 Subject: [PATCH 039/133] Bumped Legal API version to 2.138.0 (#3192) --- legal-api/src/legal_api/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index 9800f9bbfa..90f3a929fd 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.137.0' # pylint: disable=invalid-name +__version__ = '2.138.0' # pylint: disable=invalid-name From 250191c8ec0a78a1e45e526d9ce35c4f3ff9e335 Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Thu, 30 Jan 2025 12:19:56 -0800 Subject: [PATCH 040/133] 24594 Tombstone pipeline - Implement conversion ledger (#3186) --- data-tool/flows/tombstone/tombstone_mappings.py | 5 +++++ data-tool/flows/tombstone/tombstone_queries.py | 10 +++++++--- data-tool/flows/tombstone/tombstone_utils.py | 9 +++++++++ 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index 6c7c3c3b0b..b72dba851c 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -62,6 +62,9 @@ class EventFilings(str, Enum): FILE_CONTU = 'FILE_CONTU' FILE_CONTC = 'FILE_CONTC' + # Conversion + FILE_CONVL = 'FILE_CONVL' + # Correction FILE_CO_AR = 'FILE_CO_AR' FILE_CO_BC = 'FILE_CO_BC' @@ -192,6 +195,8 @@ def has_value(cls, value): EventFilings.FILE_CONTU: 'continuationIn', EventFilings.FILE_CONTC: 'continuationIn', + EventFilings.FILE_CONVL: 'conversionLedger', + EventFilings.FILE_CO_AR: 'correction', EventFilings.FILE_CO_BC: 'correction', EventFilings.FILE_CO_DI: 'correction', diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index c2e9a090a8..da61f9861a 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -109,7 +109,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- and c.corp_num = 'BC0043406' -- lots of directors -- and c.corp_num in ('BC0326163', 'BC0395512', 'BC0883637') -- and c.corp_num = 'BC0870626' -- lots of filings - IA, CoDs, ARs --- and c.corp_num = 'BC0004969' -- lots of filings - IA, ARs, transition, alteration, COD, COA +-- and c.corp_num = 'BC0004969' -- lots of filings - IA, ARs, transition, alteration, COD, COA -- and c.corp_num = 'BC0002567' -- lots of filings - IA, ARs, transition, COD -- and c.corp_num in ('BC0068889', 'BC0441359') -- test users mapping -- and c.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') @@ -119,6 +119,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- 'BC0548839', 'BC0541207', 'BC0462424', 'BC0021973', -- restoration -- 'BC0034290', -- legacy other -- 'C0870179', 'C0870343', 'C0883424', -- continuation in (C, CCC, CUL) +-- 'BC0019921', 'BC0010385', -- conversion ledger -- 'BC0207097', 'BC0693625', 'BC0754041', 'BC0072008', 'BC0355241', 'BC0642237', 'BC0555891', 'BC0308683', -- correction -- 'BC0688906', 'BC0870100', 'BC0267106', 'BC0873461', -- alteration -- 'BC0536998', 'BC0574096', 'BC0663523' -- new mappings of CoA, CoD @@ -128,7 +129,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- 'BC0747392' -- amalg - h -- TING -- 'BC0593394', -- amalg - r (with xpro) --- 'BC0805986', 'BC0561086', -- amalg - v +-- 'BC0805986', 'BC0561086', -- amalg - v -- 'BC0543231', 'BC0358476' -- amalg - h -- ) and c.corp_type_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') @@ -582,10 +583,13 @@ def get_filings_query(corp_num): else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) end as u_full_name, u.email_addr as u_email_addr, - u.role_typ_cd as u_role_typ_cd + u.role_typ_cd as u_role_typ_cd, + --- conversion ledger + cl.ledger_title_txt as cl_ledger_title_txt from event e left outer join filing f on e.event_id = f.event_id left outer join filing_user u on u.event_id = e.event_id + left outer join conv_ledger cl on cl.event_id = e.event_id where 1 = 1 and e.corp_num = '{corp_num}' -- and e.corp_num = 'BC0068889' diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index ae19f1f9f2..e8975f23ca 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -538,14 +538,23 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective def get_colin_display_name(data: dict) -> str: event_file_type = data['event_file_type'] name = EVENT_FILING_DISPLAY_NAME_MAPPING.get(event_file_type) + + # Annual Report if event_file_type == EventFilings.FILE_ANNBC.value: ar_dt_str = data['f_period_end_dt_str'] ar_dt = datetime.strptime(ar_dt_str, '%Y-%m-%d %H:%M:%S%z') suffix = ar_dt.strftime('%b %d, %Y').upper() name = f'{name} - {suffix}' + + # Change of Directors elif event_file_type == EventFilings.FILE_NOCDR.value: if not data['f_change_at_str']: name = f'{name} - Address Change or Name Correction Only' + + # Conversion Ledger + elif event_file_type == EventFilings.FILE_CONVL.value: + name = data['cl_ledger_title_txt'] + return name From 23655e1dfd77b2b314f2facc7a11bdf67866f8af Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Thu, 30 Jan 2025 12:35:09 -0800 Subject: [PATCH 041/133] 25419 - Tombstone - support staff comments (#3181) * 25419 - Tombstone - support staff comments Signed-off-by: Hongjing Chen * fix test Signed-off-by: Hongjing Chen * update summary output template - (temporary approach) Signed-off-by: Hongjing Chen * Fix 25508 at the same time - fix populated value of last ar date in pipeline Signed-off-by: Hongjing Chen * fix side issue - 25504 - fix IA date of XPRO Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- data-tool/flows/corps_tombstone_flow.py | 20 ++- .../flows/tombstone/tombstone_base_data.py | 34 +++-- .../flows/tombstone/tombstone_queries.py | 124 ++++++++++++++---- data-tool/flows/tombstone/tombstone_utils.py | 73 +++++++++-- .../common/businessDetails.html | 2 +- legal-api/src/legal_api/models/comment.py | 5 +- legal-api/tests/unit/models/test_comments.py | 2 +- 7 files changed, 213 insertions(+), 47 deletions(-) diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 8392a6ce07..762f723316 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -117,7 +117,7 @@ def clean_snapshot_filings_data(data: dict) -> dict: @task(name='3.1-Corp-Snapshot-Migrate-Task') -def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: +def load_corp_snapshot(conn: Connection, tombstone_data: dict, users_mapper: dict) -> int: """Migrate corp snapshot.""" # Note: The business info is partially loaded for businesses table now. And it will be fully # updated by the following placeholder historical filings migration. But it depends on the @@ -169,6 +169,13 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: resolution['business_id'] = business_id load_data(conn, 'resolutions', resolution) + for comment in tombstone_data['comments']: + comment['business_id'] = business_id + username = comment['staff_id'] + staff_id = users_mapper.get(username) + comment['staff_id'] = staff_id + load_data(conn, 'comments', comment) + return business_id @@ -202,6 +209,15 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id if amalgamation_data := data['amalgamations']: load_amalgamation_snapshot(conn, amalgamation_data, business_id, filing_id) + if comments_data := data['comments']: + for comment in comments_data: + comment['business_id'] = business_id + comment['filing_id'] = filing_id + username = comment['staff_id'] + staff_id = users_mapper.get(username) + comment['staff_id'] = staff_id + load_data(conn, 'comments', comment) + # load epoch filing epoch_filing_data = build_epoch_filing(business_id) load_data(conn, 'filings', epoch_filing_data) @@ -302,7 +318,7 @@ def migrate_tombstone(config, lear_engine: Engine, corp_num: str, clean_data: di with lear_engine.connect() as lear_conn: transaction = lear_conn.begin() try: - business_id = load_corp_snapshot(lear_conn, clean_data) + business_id = load_corp_snapshot(lear_conn, clean_data, users_mapper) load_placeholder_filings(lear_conn, clean_data, business_id, users_mapper) update_auth(lear_conn, config, corp_num, clean_data) transaction.commit() diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index f7700c31e6..907ecd1cb0 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -17,6 +17,28 @@ } +# ======== user ======== +USER = { + 'username': None, + 'firstname': None, + 'middlename': None, + 'lastname': None, + 'email': None, + 'creation_date': None +} + + +# ======== comment ======== +COMMENT = { + 'comment': None, + 'timestamp': None, + # FK + 'business_id': None, + 'staff_id': None, + 'filing_id': None +} + + # ======== address ======== ADDRESS = { 'address_type': None, # mailing or delivery @@ -146,15 +168,6 @@ # ======== filing ======== -USER = { - 'username': None, - 'firstname': None, - 'middlename': None, - 'lastname': None, - 'email': None, - 'creation_date': None -} - FILING_JSON = { 'filing': { 'header': {} @@ -185,7 +198,8 @@ 'submitter_roles': None, }, 'jurisdiction': None, # optional - 'amalgamations': None # optional + 'amalgamations': None, # optional + 'comments': None # optional } FILING_COMBINED = { diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index da61f9861a..cc6955bd5d 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -172,32 +172,50 @@ def get_corp_users_query(corp_nums: list): u_middle_name, u_last_name, to_char( - min(event_timerstamp::timestamp at time zone 'UTC'), + min(u_timestamp::timestamp at time zone 'UTC'), 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as earliest_event_dt_str, min(u_email_addr) as u_email_addr, u_role_typ_cd from ( - select - upper(u.user_id) as u_user_id, - u.last_name as u_last_name, - u.first_name as u_first_name, - u.middle_name as u_middle_name, - e.event_type_cd, - f.filing_type_cd, - e.event_timerstamp, - case - when u.first_name is null and u.middle_name is null and u.last_name is null then null - else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) - end as u_full_name, - u.email_addr as u_email_addr, - u.role_typ_cd as u_role_typ_cd - from event e - left outer join filing f on e.event_id = f.event_id - left outer join filing_user u on u.event_id = e.event_id - where 1 = 1 --- and e.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') - and e.corp_num in ({corp_nums_str}) + select + upper(u.user_id) as u_user_id, + u.last_name as u_last_name, + u.first_name as u_first_name, + u.middle_name as u_middle_name, + e.event_type_cd, + f.filing_type_cd, + e.event_timerstamp as u_timestamp, + case + when u.first_name is null and u.middle_name is null and u.last_name is null then null + else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) + end as u_full_name, + u.email_addr as u_email_addr, + u.role_typ_cd as u_role_typ_cd + from event e + left outer join filing f on e.event_id = f.event_id + left outer join filing_user u on u.event_id = e.event_id + where 1 = 1 + -- and e.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') + and e.corp_num in ({corp_nums_str}) + union + -- staff comment at business level + select + upper(cc.user_id) as u_user_id, + cc.last_nme as u_last_name, + cc.first_nme as u_first_name, + cc.middle_nme as u_middle_name, + 'STAFF' as event_type_cd, -- placeholder + 'COMMENT' as filing_type_cd, -- placeholder + comment_dts as u_timestamp, + case + when cc.first_nme is null and cc.middle_nme is null and cc.last_nme is null then null + else upper(concat_ws('_', nullif(trim(cc.first_nme),''), nullif(trim(cc.middle_nme),''), nullif(trim(cc.last_nme),''))) + end as u_full_name, + null as u_email_addr, + null as u_role_typ_cd + from corp_comments cc + where cc.corp_num in ({corp_nums_str}) ) sub group by sub.u_user_id, sub.u_full_name, sub.u_first_name, sub.u_middle_name, sub.u_last_name, sub.u_role_typ_cd order by sub.u_user_id; @@ -536,7 +554,9 @@ def get_jurisdictions_query(corp_num): j.xpro_typ_cd as j_xpro_typ_cd, j.home_company_nme as j_home_company_nme, j.home_juris_num as j_home_juris_num, - to_char(j.home_recogn_dt, 'YYYY-MM-DD') as j_home_recogn_dt, + to_char( + j.home_recogn_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as j_home_recogn_dt, j.othr_juris_desc as j_othr_juris_desc, j.bc_xpro_num as j_bc_xpro_num from jurisdiction j @@ -636,6 +656,62 @@ def get_amalgamation_query(corp_num): return query +def get_business_comments_query(corp_num): + query = f""" + select + to_char( + cc.comment_dts::timestamp at time zone 'UTC', + 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as cc_comments_dts_str, + cc.comments as cc_comments, + cc.accession_comments as cc_accession_comments, + upper(cc.user_id) as cc_user_id, + cc.first_nme as cc_first_name, + cc.last_nme as cc_last_name, + cc.middle_nme as cc_middle_name, + case + when cc.first_nme is null and cc.middle_nme is null and cc.last_nme is null then null + else upper(concat_ws('_', nullif(trim(cc.first_nme),''), nullif(trim(cc.middle_nme),''), nullif(trim(cc.last_nme),''))) + end as cc_full_name + from corp_comments cc + where corp_num = '{corp_num}'; + """ + return query + + +def get_filing_comments_query(corp_num): + query = f""" + select + e.event_id as e_event_id, + to_char( + lt.ledger_text_dts::timestamp at time zone 'UTC', + 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as lt_ledger_text_dts_str, + lt.user_id as lt_user_id, + trim(lt.notation) as lt_notation, + null as cl_ledger_desc + from event e + join ledger_text lt on e.event_id = lt.event_id + join corporation c on e.corp_num = c.corp_num and c.corp_num = '{corp_num}' + where + nullif(trim(lt.notation), '') is not null + union + select + e.event_id as e_event_id, + null as lt_ledger_text_dts_str, + null as lt_user_id, + null as lt_notation, + trim(cl.ledger_desc) as cl_ledger_desc + from event e + join conv_ledger cl on e.event_id = cl.event_id + join corporation c on e.corp_num = c.corp_num and c.corp_num = '{corp_num}' + where + nullif(trim(cl.ledger_desc), '') is not null + ; + """ + return query + + def get_corp_snapshot_filings_queries(config, corp_num): queries = { 'businesses': get_business_query(corp_num, config.CORP_NAME_SUFFIX), @@ -646,7 +722,9 @@ def get_corp_snapshot_filings_queries(config, corp_num): 'resolutions': get_resolutions_query(corp_num), 'jurisdictions': get_jurisdictions_query(corp_num), 'filings': get_filings_query(corp_num), - 'amalgamations': get_amalgamation_query(corp_num) + 'amalgamations': get_amalgamation_query(corp_num), + 'business_comments': get_business_comments_query(corp_num), + 'filing_comments': get_filing_comments_query(corp_num) } return queries diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index e8975f23ca..006613c597 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -25,10 +25,11 @@ def format_business_data(data: dict) -> dict: state = business_data['state'] business_data['state'] = 'ACTIVE' if state == 'ACT' else 'HISTORICAL' - if not (last_ar_date := business_data['last_ar_date']): - last_ar_date = business_data['founding_date'] - - last_ar_year = int(last_ar_date.split('-')[0]) + if last_ar_date := business_data['last_ar_date']: + last_ar_year = int(last_ar_date.split('-')[0]) + else: + last_ar_date = None + last_ar_year = None formatted_business = { **business_data, @@ -318,10 +319,13 @@ def format_filings_data(data: dict) -> list[dict]: if filing_type == 'amalgamationApplication': amalgamation = format_amalgamations_data(data, x['e_event_id']) + comments = format_filing_comments_data(data, x['e_event_id']) + filing = { 'filings': filing_body, 'jurisdiction': jurisdiction, - 'amalgamations': amalgamation + 'amalgamations': amalgamation, + 'comments': comments } formatted_filings.append(filing) @@ -406,14 +410,66 @@ def format_amalgamating_businesses(ting_data: dict) -> dict: return formatted_ting +def format_filing_comments_data(data: dict, event_id: Decimal) -> list: + filing_comments_data = data['filing_comments'] + + matched_filing_comments = [ + item for item in filing_comments_data if item.get('e_event_id') == event_id + ] + + if not matched_filing_comments: + return None + + formatted_filing_comments = [] + for x in matched_filing_comments: + if c := x['lt_notation']: + timestamp = x['lt_ledger_text_dts_str'] + # Note that only a small number of lt_user_id is BCOMPS, + # others are None + # TODO: investigate BCOMPS related stuff + staff_id = x['lt_user_id'] + else: + c = x['cl_ledger_desc'] + timestamp = None + staff_id = None + comment = { + 'comment': c, + 'timestamp': timestamp, + 'staff_id': staff_id, # will be updated to real staff_id when loading data into db + } + + formatted_filing_comments.append(comment) + + return formatted_filing_comments + + +def format_business_comments_data(data: dict) -> list: + business_comments_data = data['business_comments'] + formatted_business_comments = [] + + for x in business_comments_data: + c = x['cc_comments'] if x['cc_comments'] else x['cc_accession_comments'] + if not (staff_id := x['cc_user_id']): + staff_id = x['cc_full_name'] if x['cc_full_name'] else None + comment = { + 'comment': c, + 'timestamp': x['cc_comments_dts_str'], + 'staff_id': staff_id, # will be updated to real staff_id when loading data into db + } + formatted_business_comments.append(comment) + + return formatted_business_comments + + def format_users_data(users_data: list) -> list: formatted_users = [] for x in users_data: user = copy.deepcopy(USER) event_file_types = x['event_file_types'].split(',') - # skip users if all event_file_type is unsupported - if not any(get_target_filing_type(ef)[0] for ef in event_file_types): + # skip users if all event_file_type is unsupported or not users for staff comments + if not any(get_target_filing_type(ef)[0] for ef in event_file_types)\ + and not any (ef == 'STAFF_COMMENT' for ef in event_file_types): continue if not (username := x['u_user_id']): @@ -458,7 +514,8 @@ def get_data_formatters() -> dict: 'share_classes': format_share_classes_data, 'aliases': format_aliases_data, 'resolutions': format_resolutions_data, - 'filings': format_filings_data + 'filings': format_filings_data, + 'comments': format_business_comments_data, # only for business level, filing level will be formatted ith filings } return ret diff --git a/legal-api/report-templates/template-parts/common/businessDetails.html b/legal-api/report-templates/template-parts/common/businessDetails.html index 03b0561663..1fc3e13b63 100644 --- a/legal-api/report-templates/template-parts/common/businessDetails.html +++ b/legal-api/report-templates/template-parts/common/businessDetails.html @@ -43,7 +43,7 @@
{{report_date_time}}
{{business.state}} - {% if business.state in ('HISTORICAL', 'LIQUIDATION') %} + {% if business.state in ('HISTORICAL', 'LIQUIDATION') and stateFilings %} - {% if business.legalType in ['GP', 'SP'] and business.state == 'HISTORICAL' %} Dissolved diff --git a/legal-api/src/legal_api/models/comment.py b/legal-api/src/legal_api/models/comment.py index a48da5ba6a..de2da52da2 100644 --- a/legal-api/src/legal_api/models/comment.py +++ b/legal-api/src/legal_api/models/comment.py @@ -52,15 +52,16 @@ class Comment(db.Model): @property def json(self): """Return the json repressentation of a comment.""" + from legal_api.core.constants import REDACTED_STAFF_SUBMITTER # pylint: disable=import-outside-toplevel user = User.find_by_id(self.staff_id) return { 'comment': { 'id': self.id, - 'submitterDisplayName': user.display_name if user else None, + 'submitterDisplayName': user.display_name if user else REDACTED_STAFF_SUBMITTER, 'comment': self.comment, 'filingId': self.filing_id, 'businessId': self.business_id, - 'timestamp': self.timestamp.isoformat() + 'timestamp': self.timestamp.isoformat() if self.timestamp else None } } diff --git a/legal-api/tests/unit/models/test_comments.py b/legal-api/tests/unit/models/test_comments.py index d03739da75..c90598b694 100644 --- a/legal-api/tests/unit/models/test_comments.py +++ b/legal-api/tests/unit/models/test_comments.py @@ -75,7 +75,7 @@ def test_filing_comment_dump_json(session): assert c.json == { 'comment': { 'id': c.id, - 'submitterDisplayName': None, + 'submitterDisplayName': 'Registry Staff', 'comment': 'a comment', 'filingId': f.id, 'businessId': None, From b1d82aef5a66ade31afc3f8d819931cd3ab18159 Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Thu, 30 Jan 2025 14:16:52 -0800 Subject: [PATCH 042/133] 25393 - Suppress emails for system file BEN Corrections Statement (#3190) * 25393 - supress emails for system file BEN Corrections Statement * 25393 - fix lint issues * 25393 - fix lint issues * 25393 - fix lint issues * 25393 - too-many-local disable * 25393 - update unit test * 25393 - fix lint issues --- .../entity-filer/src/entity_filer/worker.py | 37 +++++++++---- .../unit/test_worker/test_correction_bcia.py | 54 +++++++++++++++++++ 2 files changed, 80 insertions(+), 11 deletions(-) diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 1de4fb1c26..944ed7849e 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -199,7 +199,8 @@ async def publish_mras_email(filing: Filing): ) -async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable=too-many-branches,too-many-statements +async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches,too-many-statements,too-many-locals + flask_app: Flask): """Render the filings contained in the submission. Start the migration to using core/Filing @@ -400,16 +401,20 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable if filing_type != FilingCore.FilingTypes.CHANGEOFNAME: business_profile.update_business_profile(business, filing_submission, filing_type) - try: - await publish_email_message( - qsm, APP_CONFIG.EMAIL_PUBLISH_OPTIONS['subject'], filing_submission, filing_submission.status) - except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; - # mark any failure for human review - capture_message( - f'Queue Error: Failed to place email for filing:{filing_submission.id}' - f'on Queue with error:{err}', - level='error' - ) + # This will be True only in the case where filing is filed by Jupyter notebook for BEN corrections + is_system_filed_correction = is_correction and is_system_filed_filing(filing_submission) + + if not is_system_filed_correction: + try: + await publish_email_message( + qsm, APP_CONFIG.EMAIL_PUBLISH_OPTIONS['subject'], filing_submission, filing_submission.status) + except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; + # mark any failure for human review + capture_message( + f'Queue Error: Failed to place email for filing:{filing_submission.id}' + f'on Queue with error:{err}', + level='error' + ) try: await publish_event(business, filing_submission) @@ -434,6 +439,16 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable ) +def is_system_filed_filing(filing_submission) -> bool: + """Check if filing is filed by system. + + Filing filed using Jupyter Notebook will have 'certified_by' field = 'system'. + + """ + certified_by = filing_submission.json['filing']['header']['certifiedBy'] + return certified_by == 'system' if certified_by else False + + async def cb_subscription_handler(msg: nats.aio.client.Msg): """Use Callback to process Queue Msg objects.""" try: diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py b/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py index 9585f70d18..6cf3cf4d69 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py @@ -213,6 +213,26 @@ } } +BC_CORRECTION_SHORT = { + 'filing': { + 'header': { + 'name': 'correction', + 'date': '2025-01-01', + 'certifiedBy': 'system' + }, + 'business': { + 'identifier': 'BC1234567', + 'legalType': 'BC' + }, + 'correction': { + 'details': 'First correction', + 'correctedFilingId': '123456', + 'correctedFilingType': 'incorporationApplication', + 'comment': 'Correction for Incorporation Application filed on 2025-01-01 by system' + } + } +} + BC_CORRECTION_APPLICATION = BC_CORRECTION naics_response = { @@ -828,3 +848,37 @@ async def test_worker_share_class_and_series_change(app, session, mocker, test_n assert business.share_classes.all()[0].par_value == share_class_json2['parValue'] assert business.share_classes.all()[0].currency == share_class_json2['currency'] assert [item.json for item in business.share_classes.all()[0].series] == share_class_json2['series'] + + +async def test_correction_ben_statement(app, session, mocker): + """Assert the worker process calls the BEN correction statement correctly.""" + + identifier = 'BC1234567' + business = create_entity(identifier, 'BEN', 'ABC test inc.') + business.save() + business_id = business.id + + filing = copy.deepcopy(BC_CORRECTION_SHORT) + + corrected_filing_id = factory_completed_filing(business, BC_CORRECTION_SHORT).id + filing['filing']['correction']['correctedFilingId'] = corrected_filing_id + + payment_id = str(random.SystemRandom().getrandbits(0x58)) + + filing_id = (create_filing(payment_id, filing, business_id=business_id)).id + filing_msg = {'filing': {'id': filing_id}} + + # mock out the email sender and event publishing + mocker.patch('entity_filer.worker.publish_event', return_value=None) + mocker.patch('entity_filer.filing_processors.filing_components.name_request.consume_nr', return_value=None) + mocker.patch('entity_filer.filing_processors.filing_components.business_profile.update_business_profile', + return_value=None) + mocker.patch('legal_api.services.bootstrap.AccountService.update_entity', return_value=None) + + await process_filing(filing_msg, app) + + final_filing = Filing.find_by_id(filing_id) + + filing_comments = final_filing.comments.all() + assert len(filing_comments) == 1 + assert filing_comments[0].comment == filing['filing']['correction']['comment'] From 852fec125af0c2f1770b68af4f4968ee66e5ed5a Mon Sep 17 00:00:00 2001 From: meawong Date: Thu, 30 Jan 2025 14:25:42 -0800 Subject: [PATCH 043/133] 25578 - Expose WithdrawalPending Property in CommonLedgerItems for FE (#3193) * 25578 - Expose withdrawalPending in commonLedgerItems for FE use * 25578-Fix-unit-test * 25578-Add-missing-assert-for-withdrawalPending --- legal-api/src/legal_api/core/filing.py | 1 + legal-api/tests/unit/core/test_filing_ledger.py | 7 ++++++- .../v2/test_business_filings/test_filings_ledger.py | 3 ++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index bc5af45c6d..4ece6c6a9b 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -454,6 +454,7 @@ def common_ledger_items(business_identifier: str, filing_storage: FilingStorage) filing_storage.filing_type not in no_output_filing_types else None, 'filingLink': f'{base_url}/{business_identifier}/filings/{filing_storage.id}', 'isFutureEffective': filing.is_future_effective, + 'withdrawalPending': filing_storage.withdrawal_pending } @staticmethod diff --git a/legal-api/tests/unit/core/test_filing_ledger.py b/legal-api/tests/unit/core/test_filing_ledger.py index 2ddd5a9977..dd01f67033 100644 --- a/legal-api/tests/unit/core/test_filing_ledger.py +++ b/legal-api/tests/unit/core/test_filing_ledger.py @@ -70,7 +70,7 @@ def test_simple_ledger_search(session): alteration = next((f for f in ledger if f.get('name') == 'alteration'), None) assert alteration - assert 16 == len(alteration.keys()) + assert 17 == len(alteration.keys()) assert 'availableOnPaperOnly' in alteration assert 'effectiveDate' in alteration assert 'filingId' in alteration @@ -80,6 +80,7 @@ def test_simple_ledger_search(session): assert 'submittedDate' in alteration assert 'submitter' in alteration assert 'displayLedger' in alteration + assert 'withdrawalPending' in alteration # assert alteration['commentsLink'] # assert alteration['correctionLink'] # assert alteration['filingLink'] @@ -119,3 +120,7 @@ def test_common_ledger_items(session): factory_completed_filing(business, filing, filing_date=founding_date + datedelta.datedelta(months=1), filing_type='adminFreeze') common_ledger_items = CoreFiling.common_ledger_items(identifier, completed_filing) assert common_ledger_items['displayLedger'] is False + + completed_filing.withdrawal_pending = True + common_ledger_items = CoreFiling.common_ledger_items(identifier, completed_filing) + assert common_ledger_items['withdrawalPending'] is True diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py index dfbcef6cd5..1f36d4c26f 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py @@ -125,7 +125,7 @@ def test_ledger_search(session, client, jwt): alteration = next((f for f in ledger['filings'] if f.get('name') == 'alteration'), None) assert alteration - assert 16 == len(alteration.keys()) + assert 17 == len(alteration.keys()) assert 'availableOnPaperOnly' in alteration assert 'effectiveDate' in alteration assert 'filingId' in alteration @@ -135,6 +135,7 @@ def test_ledger_search(session, client, jwt): assert 'submittedDate' in alteration assert 'submitter' in alteration assert 'displayLedger' in alteration + assert 'withdrawalPending' in alteration # assert alteration['commentsLink'] # assert alteration['correctionLink'] # assert alteration['filingLink'] From 4007264425c3151b4f0496d0eda5c838ed05ac81 Mon Sep 17 00:00:00 2001 From: meawong Date: Thu, 30 Jan 2025 14:27:21 -0800 Subject: [PATCH 044/133] 25583 - Return Documents for NoW when Status is COMPLETED (#3191) * 25583-Add-special-case-for-NoW-in-completed-status * 25583-Update-unit-tests * 25583-Fix typo --- legal-api/src/legal_api/core/filing.py | 4 +++- .../business_filings/business_documents.py | 3 ++- .../test_business_filings/test_filing_documents.py | 14 ++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index 4ece6c6a9b..f2768d133d 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -532,7 +532,9 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, ] - if filing.status in (Filing.Status.PAID, Filing.Status.WITHDRAWN) and \ + if (filing.status in (Filing.Status.PAID, Filing.Status.WITHDRAWN) or + (filing.status == Filing.Status.COMPLETED and + filing.filing_type == Filing.FilingTypes.NOTICEOFWITHDRAWAL.value)) and \ not (filing.filing_type in no_legal_filings_in_paid_withdrawn_status or (filing.filing_type == Filing.FilingTypes.DISSOLUTION.value and business.legal_type in [ diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py index bfb696598b..5bc0e81c72 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py @@ -79,7 +79,8 @@ def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None ), HTTPStatus.NOT_FOUND if not legal_filing_name and not file_key: - if identifier.startswith('T') and filing.status == Filing.Status.COMPLETED: + if identifier.startswith('T') and filing.status == Filing.Status.COMPLETED and \ + filing.filing_type != Filing.FilingTypes.NOTICEOFWITHDRAWAL: return {'documents': {}}, HTTPStatus.OK return _get_document_list(business, filing) diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py index 2410e892bb..21c9dcef45 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py @@ -1703,3 +1703,17 @@ def test_temp_document_list_for_now(mocker, session, client, jwt, assert rv.status_code == expected_http_code assert rv_data == expected + + filing._status = Filing.Status.COMPLETED + filing.save() + + mocker.patch('legal_api.core.filing.has_roles', return_value=True) + rv = client.get(f'/api/v2/businesses/{temp_identifier}/filings/{filing.id}/documents', + headers=create_header(jwt, [STAFF_ROLE], temp_identifier)) + + # remove the filing ID + rv_data = json.loads(re.sub("/\d+/", "/", rv.data.decode("utf-8")).replace("\n", "")) + expected = json.loads(re.sub("/\d+/", "/", json.dumps(expected_msg))) + + assert rv.status_code == expected_http_code + assert rv_data == expected From 71d7dde8ff81c7a1c35b995278d0867dfb25476b Mon Sep 17 00:00:00 2001 From: flutistar Date: Fri, 31 Jan 2025 14:19:46 -0800 Subject: [PATCH 045/133] updated get document api --- .../src/legal_api/resources/v2/document.py | 24 +++++++++++++++---- .../filings/validations/continuation_in.py | 4 ++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index ea21ccd9ef..38115fd5da 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -13,6 +13,7 @@ # limitations under the License. """Module for handling Minio document operations.""" +import re from http import HTTPStatus from flask import Blueprint, current_app, jsonify @@ -95,10 +96,25 @@ def delete_document(document_service_id: str): return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK -@bp.route('/drs//', methods=['GET']) +@bp.route('/drs//', methods=['GET']) @cross_origin(origins='*') @jwt.requires_auth -def get_document(document_class: str, document_service_id: str): - """Get document file from Document Record Service.""" +def get_document(document_class: str, document_key: str): + """Get document file from Minio or Document Record Service.""" + drs_id_pattern = r"^DS\d{10}$" - return DocumentRecordService.get_document(document_class, document_service_id), HTTPStatus.OK \ No newline at end of file + try: + if re.match(drs_id_pattern, document_key): + return DocumentRecordService.get_document(document_class, document_key), HTTPStatus.OK + else: + response = MinioService.get_file(document_key) + return current_app.response_class( + response=response.data, + status=response.status, + mimetype='application/pdf' + ) + except Exception as e: + current_app.logger.error(f'Error getting file {document_key}: {e}') + return jsonify( + message=f'Error getting file {document_key}.' + ), HTTPStatus.INTERNAL_SERVER_ERROR \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 40b43f01cd..245278b3e7 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -128,7 +128,7 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' if file_key := foreign_jurisdiction.get('affidavitFileKey'): - if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, affidavit_file_key_path): + if err := validate_file_on_drs(DocumentClassEnum.CORP.value, file_key, affidavit_file_key_path): msg.extend(err) else: msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) @@ -158,7 +158,7 @@ def validate_continuation_in_authorization(filing_json: dict, filing_type: str) for index, file in enumerate(filing_json['filing'][filing_type]['authorization']['files']): file_key = file['fileKey'] file_key_path = f'{authorization_path}/files/{index}/fileKey' - if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, file_key_path): + if err := validate_file_on_drs(DocumentClassEnum.CORP.value, file_key, file_key_path): msg.extend(err) return msg From 5b486248c8ab70dfcbd9e296edad011c27d2640a Mon Sep 17 00:00:00 2001 From: meawong Date: Mon, 3 Feb 2025 13:10:51 -0800 Subject: [PATCH 046/133] 25641-Embed-NoW-once-available-and-update-unit-tests (#3194) --- .../business_filings/business_filings.py | 2 +- .../v2/test_business_filings/test_filings.py | 17 ++++++++++------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index b81f0b4747..c98bb13ed3 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -307,7 +307,7 @@ def get_single_filing(identifier: str, filing_id: int): filing_json = rv.json if rv.status == Filing.Status.PENDING.value: ListFilingResource.get_payment_update(filing_json) - if rv.status == Filing.Status.WITHDRAWN.value and identifier.startswith('T'): + if (rv.status == Filing.Status.WITHDRAWN.value or rv.storage.withdrawal_pending) and identifier.startswith('T'): now_filing = ListFilingResource.get_notice_of_withdrawal(filing_json['filing']['header']['filingId']) filing_json['filing']['noticeOfWithdrawal'] = now_filing.json elif (rv.status in [Filing.Status.CHANGE_REQUESTED.value, diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index d7f2df42a4..c198cd8689 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -117,7 +117,7 @@ def test_get_temp_business_filing(session, client, jwt, legal_type, filing_type, assert rv.json['filing'][filing_type] == filing_json def test_get_withdrawn_temp_business_filing(session, client, jwt): - """Assert that a FE withdrawn temp business returns the filing with the NoW embedded when the status is WITHDRAWN.""" + """Assert that a withdrawn FE temp business returns the filing with the NoW embedded once available.""" # set-up withdrawn boostrap FE filing today = datetime.utcnow().date() @@ -157,23 +157,26 @@ def test_get_withdrawn_temp_business_filing(session, client, jwt): now_filing = factory_filing(None, now_json_data) now_filing.withdrawn_filing_id = withdrawn_filing_id now_filing.save() + new_business_filing.withdrawal_pending = True + new_business_filing.save() - # fetch filings when withdrawn filing status is PAID + # fetch filings once the NoW has been submitted rv = client.get(f'/api/v2/businesses/{identifier}/filings', headers=create_header(jwt, [STAFF_ROLE], identifier)) - # validate that the NoW is not embedded in the withdrawn filing - assert 'noticeOfWithdrawal' not in rv.json['filing'] + # validate that the NoW is embedded in the withdrawn filing + assert 'noticeOfWithdrawal' in rv.json['filing'] - # set status to WITHDRAWN + # withdraw bootstrap filing new_business_filing._status = Filing.Status.WITHDRAWN.value + new_business_filing.withdrawal_pending = False new_business_filing.save() - # fetch filings when withdrawn filing status is WITHDRAWN + # fetch filings after the bootstrap filing has been withdrawn rv = client.get(f'/api/v2/businesses/{identifier}/filings', headers=create_header(jwt, [STAFF_ROLE], identifier)) - # validate that the NoW is now embedded in the withdrawn filing + # validate that the NoW is still embedded in the withdrawn filing assert 'noticeOfWithdrawal' in rv.json['filing'] assert rv.json['filing']['noticeOfWithdrawal'] is not None From f6546a783cd3ff48600e89b267bda6e655b7e459 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Mon, 3 Feb 2025 15:57:47 -0800 Subject: [PATCH 047/133] 23351 - Notice of Withdrawal Emailer (#3195) * add NoW email template * update tracker * add NoW email processor * update unit tests --- .../entity-emailer/requirements.txt | 2 +- .../notice_of_withdrawal_notification.py | 180 ++++++++++++++++++ .../email_templates/NOW-COMPLETED.html | 63 ++++++ .../email_templates/common/style.html | 5 + .../entity_emailer/message_tracker/tracker.py | 2 +- .../src/entity_emailer/worker.py | 4 + .../entity-emailer/tests/unit/__init__.py | 83 +++++++- .../test_notice_of_withdrawal_notification.py | 82 ++++++++ .../entity-emailer/tests/unit/test_tracker.py | 8 + 9 files changed, 426 insertions(+), 3 deletions(-) create mode 100644 queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py create mode 100644 queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html create mode 100644 queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py diff --git a/queue_services/entity-emailer/requirements.txt b/queue_services/entity-emailer/requirements.txt index d8eeae2c8c..bcbae5a182 100644 --- a/queue_services/entity-emailer/requirements.txt +++ b/queue_services/entity-emailer/requirements.txt @@ -78,7 +78,7 @@ webcolors==1.13 Werkzeug==1.0.1 yarl==1.8.2 zipp==3.15.0 -git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.33#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py new file mode 100644 index 0000000000..c36f6dac76 --- /dev/null +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py @@ -0,0 +1,180 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Email processing rules and actions for Notice of Withdrawal notifications.""" +import base64 +import re +from http import HTTPStatus +from pathlib import Path + +import requests +from entity_queue_common.service_utils import logger +from flask import current_app +from jinja2 import Template +from legal_api.core.meta.filing import FilingMeta +from legal_api.models import Business, Filing + +from entity_emailer.email_processors import ( + get_filing_document, + get_filing_info, + get_recipient_from_auth, + substitute_template_parts, +) + + +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals + """Build the email for Notice of Withdrawal notification.""" + logger.debug('notice_of_withdrawal_notification: %s', email_info) + # get template and fill in parts + filing_type = email_info['type'] + + # get template variables from filing + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info['filingId']) + + # display company name only for existing businesses + if business.get('identifier').startswith('T'): + company_name = None + else: + company_name = business.get('legalName') + # record to be withdrawn --> withdrawn filing display name + withdrawn_filing = Filing.find_by_id(filing.withdrawn_filing_id) + withdrawn_filing_display_name = FilingMeta.get_display_name( + business['legalType'], + withdrawn_filing.filing_type, + withdrawn_filing.filing_sub_type + ) + template = Path( + f'{current_app.config.get("TEMPLATE_PATH")}/NOW-COMPLETED.html' + ).read_text() + filled_template = substitute_template_parts(template) + # render template with vars + jnja_template = Template(filled_template, autoescape=True) + filing_data = (filing.json)['filing'][f'{filing_type}'] + filing_name = filing.filing_type[0].upper() + ' '.join(re.findall('[a-zA-Z][^A-Z]*', filing.filing_type[1:])) + html_out = jnja_template.render( + business=business, + filing=filing_data, + header=(filing.json)['filing']['header'], + company_name=company_name, + filing_date_time=leg_tmz_filing_date, + effective_date_time=leg_tmz_effective_date, + withdrawnFilingType=withdrawn_filing_display_name, + entity_dashboard_url=current_app.config.get('DASHBOARD_URL') + + (filing.json)['filing']['business'].get('identifier', ''), + email_header=filing_name.upper(), + filing_type=filing_type + ) + + # get attachments + pdfs = _get_pdfs(token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) + + # get recipients + identifier = filing.filing_json['filing']['business']['identifier'] + recipients = _get_contacts(identifier, token, withdrawn_filing) + recipients = list(set(recipients)) + recipients = ', '.join(filter(None, recipients)).strip() + + # assign subject + subject = 'Notice of Withdrawal filed Successfully' + + legal_name = business.get('legalName', None) + legal_name = 'Numbered Company' if legal_name.startswith(identifier) else legal_name + if not identifier.startswith('T'): + subject = f'{legal_name} - {subject}' if legal_name else subject + + return { + 'recipients': recipients, + 'requestBy': 'BCRegistries@gov.bc.ca', + 'content': { + 'subject': subject, + 'body': f'{html_out}', + 'attachments': pdfs + } + } + + +def _get_pdfs( + token: str, + business: dict, + filing: Filing, + filing_date_time: str, + effective_date: str) -> list: + """Get the PDFs for the Notice of Withdrawal output.""" + pdfs = [] + attach_order = 1 + headers = { + 'Accept': 'application/pdf', + 'Authorization': f'Bearer {token}' + } + + # add filing PDF + filing_pdf_type = 'noticeOfWithdrawal' + filing_pdf_encoded = get_filing_document(business['identifier'], filing.id, filing_pdf_type, token) + if filing_pdf_encoded: + pdfs.append( + { + 'fileName': 'Notice of Withdrawal.pdf', + 'fileBytes': filing_pdf_encoded.decode('utf-8'), + 'fileUrl': '', + 'attachOrder': str(attach_order) + } + ) + attach_order += 1 + + # add receipt PDF + corp_name = business.get('legalName') + if business.get('identifier').startswith('T'): + business_data = None + else: + business_data = Business.find_by_internal_id(filing.business_id) + receipt = requests.post( + f'{current_app.config.get("PAY_API_URL")}/{filing.payment_token}/receipts', + json={ + 'corpName': corp_name, + 'filingDateTime': filing_date_time, + 'effectiveDateTime': effective_date if effective_date else '', + 'filingIdentifier': str(filing.id), + 'businessNumber': business_data.tax_id if business_data and business_data.tax_id else '' + }, headers=headers) + + if receipt.status_code != HTTPStatus.CREATED: + logger.error('Failed to get receipt pdf for filing: %s', filing.id) + else: + receipt_encoded = base64.b64encode(receipt.content) + pdfs.append( + { + 'fileName': 'Receipt.pdf', + 'fileBytes': receipt_encoded.decode('utf-8'), + 'fileUrl': '', + 'attachOrder': str(attach_order) + }) + attach_order += 1 + return pdfs + + +def _get_contacts(identifier, token, withdrawn_filing): + recipients = [] + if identifier.startswith('T'): + # get from withdrawn filing (FE new business filing) + filing_type = withdrawn_filing.filing_type + recipients.append(withdrawn_filing.filing_json['filing'][filing_type]['contactPoint']['email']) + + for party in withdrawn_filing.filing_json['filing'][filing_type]['parties']: + for role in party['roles']: + if role['roleType'] == 'Completing Party': + recipients.append(party['officer'].get('email')) + break + else: + recipients.append(get_recipient_from_auth(identifier, token)) + + return recipients diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html new file mode 100644 index 0000000000..2ad556acfb --- /dev/null +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html @@ -0,0 +1,63 @@ + + + + + + + + + + Notice of Withdrawal + [[style.html]] + + + + + + + + + + + \ No newline at end of file diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html index e435deeb2c..faa26d9eb7 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html @@ -89,4 +89,9 @@ .continuation-application-details .value { line-height: 24px; } + +.now-filing-info-title { + font-weight: 700; + margin-bottom: 4px; +} diff --git a/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py b/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py index 6d76dc475f..2c3b4c4b4f 100644 --- a/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py +++ b/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py @@ -97,7 +97,7 @@ def get_message_context_properties(queue_msg: nats.aio.client.Msg): message_id = f'{etype}_{option}_{ar_year}_{business_id}' return create_message_context_properties(etype, message_id, None, None, False) - if etype in ('agmLocationChange', 'agmExtension') \ + if etype in ('agmLocationChange', 'agmExtension', 'noticeOfWithdrawal') \ and (option := email.get('option', None)) \ and option == 'COMPLETED' \ and (filing_id := email.get('filingId', None)): diff --git a/queue_services/entity-emailer/src/entity_emailer/worker.py b/queue_services/entity-emailer/src/entity_emailer/worker.py index 308880d609..b440e0137a 100644 --- a/queue_services/entity-emailer/src/entity_emailer/worker.py +++ b/queue_services/entity-emailer/src/entity_emailer/worker.py @@ -58,6 +58,7 @@ involuntary_dissolution_stage_1_notification, mras_notification, name_request, + notice_of_withdrawal_notification, nr_notification, registration_notification, restoration_notification, @@ -215,6 +216,9 @@ def process_email(email_msg: dict, flask_app: Flask): # pylint: disable=too-man elif etype == 'continuationIn': email = continuation_in_notification.process(email_msg['email'], token) send_email(email, token) + elif etype == 'noticeOfWithdrawal' and option == Filing.Status.COMPLETED.value: + email = notice_of_withdrawal_notification.process(email_msg['email'], token) + send_email(email, token) elif etype in filing_notification.FILING_TYPE_CONVERTER.keys(): if etype == 'annualReport' and option == Filing.Status.COMPLETED.value: logger.debug('No email to send for: %s', email_msg) diff --git a/queue_services/entity-emailer/tests/unit/__init__.py b/queue_services/entity-emailer/tests/unit/__init__.py index 0f80379fe6..b233e2cb4b 100644 --- a/queue_services/entity-emailer/tests/unit/__init__.py +++ b/queue_services/entity-emailer/tests/unit/__init__.py @@ -14,7 +14,7 @@ """The Unit Tests and the helper routines.""" import copy import json -from datetime import datetime +from datetime import datetime, timedelta from random import randrange from unittest.mock import Mock @@ -41,6 +41,7 @@ FILING_HEADER, FILING_TEMPLATE, INCORPORATION_FILING_TEMPLATE, + NOTICE_OF_WITHDRAWAL, REGISTRATION, RESTORATION, ) @@ -698,6 +699,86 @@ def prep_continuation_in_filing(session, identifier, payment_id, option): return filing +def prep_notice_of_withdraw_filing( + identifier, + payment_id, + legal_type, + legal_name, + business_id, + withdrawn_filing): + """Return a new Notice of Withdrawal filing prepped for email notification.""" + filing_template = copy.deepcopy(FILING_HEADER) + filing_template['filing']['header']['name'] = 'noticeOfWithdrawal' + + filing_template['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + filing_template['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing.id + filing_template['filing']['business'] = { + 'identifier': identifier, + 'legalType': legal_type, + 'legalName': legal_name + } + + # create NoW filing + filing = create_filing( + token=payment_id, + filing_json=filing_template, + business_id=business_id, + ) + # populate NoW related properties + filing.withdrawn_filing_id = withdrawn_filing.id + filing.save() + withdrawn_filing.withdrawal_pending = True + withdrawn_filing.save() + + return filing + + +def create_future_effective_filing( + identifier, + legal_type, + legal_name, + filing_type, + filing_json, + is_temp, + business_id=None): + """Create a future effective filing.""" + filing_template = copy.deepcopy(FILING_HEADER) + filing_template['filing']['header']['name'] = filing_type + future_effective_date = EPOCH_DATETIME + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + + if is_temp: + del filing_template['filing']['business'] + new_business_filing_json = copy.deepcopy(filing_json) + new_business_filing_json['nameRequest']['legalType'] = legal_type + filing_template['filing'][filing_type] = new_business_filing_json + filing_template['filing'][filing_type]['contactPoint']['email'] = 'recipient@email.com' + else: + filing_template['filing']['business']['identifier'] = identifier + filing_template['filing']['business'] = { + 'identifier': identifier, + 'legalType': legal_type, + 'legalName': legal_name + } + fe_filing_json = copy.deepcopy(filing_json) + filing_template['filing'][filing_type] = fe_filing_json + + fe_filing = Filing() + fe_filing.filing_date = EPOCH_DATETIME + fe_filing.filing_json = filing_template + fe_filing.save() + fe_filing.payment_token = '123' + fe_filing.payment_completion_date = EPOCH_DATETIME.isoformat() + if is_temp: + fe_filing.temp_reg = identifier + else: + fe_filing.business_id = business_id + fe_filing.effective_date = future_effective_date + fe_filing.save() + + return fe_filing + + class Obj: """Make a custom object hook used by dict_to_obj.""" diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py new file mode 100644 index 0000000000..1aefc71915 --- /dev/null +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py @@ -0,0 +1,82 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for Notice of Withdrawal email processor.""" +from unittest.mock import patch + +import pytest +from legal_api.models import RegistrationBootstrap +from registry_schemas.example_data import ( + ALTERATION_FILING_TEMPLATE, + AMALGAMATION_APPLICATION, + CHANGE_OF_ADDRESS, + CONTINUATION_IN, + DISSOLUTION, + INCORPORATION, +) + +from entity_emailer.email_processors import notice_of_withdrawal_notification +from tests.unit import create_business, create_future_effective_filing, prep_notice_of_withdraw_filing + + +@pytest.mark.parametrize( + 'status, legal_name, legal_type, withdrawn_filing_type, withdrawn_filing_json, is_temp', [ + ('COMPLETED', 'test business', 'BC', 'incorporationApplication', INCORPORATION, True), + ('COMPLETED', '1234567 B.C. INC.', 'BEN', 'continuationIn', CONTINUATION_IN, True), + ('COMPLETED', 'test business', 'CBEN', 'amalgamationApplication', AMALGAMATION_APPLICATION, True), + ('COMPLETED', 'test business', 'BC', 'changeOfAddress', CHANGE_OF_ADDRESS, False), + ('COMPLETED', '1234567 B.C. INC.', 'BEN', 'alteration', ALTERATION_FILING_TEMPLATE, False), + ('COMPLETED', '1234567 B.C. INC.', 'CBEN', 'dissolution', DISSOLUTION, False) + ] +) +def test_notice_of_withdrawal_notification( + app, session, status, legal_name, legal_type, withdrawn_filing_type, withdrawn_filing_json, is_temp): + """Assert that the notice of withdrawal email processor works as expected.""" + business = None + if is_temp: + identifier = 'Tb31yQIuBw' + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + else: + identifier = 'BC1234567' + business = create_business(identifier, legal_type, legal_name) + + business_id = business.id if business else None + # setup withdrawn filing (FE filing) for NoW + fe_filing = create_future_effective_filing( + identifier, legal_type, legal_name, withdrawn_filing_type, withdrawn_filing_json, is_temp, business_id) + now_filing = prep_notice_of_withdraw_filing(identifier, '1', legal_type, legal_name, business_id, fe_filing) + token = 'token' + + # test NoW email processor + with patch.object(notice_of_withdrawal_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: + with patch.object(notice_of_withdrawal_notification, 'get_recipient_from_auth', + return_value='recipient@email.com'): + email = notice_of_withdrawal_notification.process( + {'filingId': now_filing.id, 'type': 'noticeOfWithdrawal', 'option': status}, token + ) + + if is_temp: + assert email['content']['subject'] == 'Notice of Withdrawal filed Successfully' + else: + assert email['content']['subject'] == f'{legal_name} - Notice of Withdrawal filed Successfully' + + assert 'recipient@email.com' in email['recipients'] + assert email['content']['body'] + assert email['content']['attachments'] == [] + assert mock_get_pdfs.call_args[0][0] == token + assert mock_get_pdfs.call_args[0][1]['identifier'] == identifier + assert mock_get_pdfs.call_args[0][1]['legalName'] == legal_name + assert mock_get_pdfs.call_args[0][1]['legalType'] == legal_type + assert mock_get_pdfs.call_args[0][2] == now_filing diff --git a/queue_services/entity-emailer/tests/unit/test_tracker.py b/queue_services/entity-emailer/tests/unit/test_tracker.py index 88f837d96b..72b7f82241 100644 --- a/queue_services/entity-emailer/tests/unit/test_tracker.py +++ b/queue_services/entity-emailer/tests/unit/test_tracker.py @@ -355,6 +355,14 @@ 'option': 'COMPLETED', 'filingId': '111222335' } + }), + ('noticeOfWithdrawal_COMPLETED_111222335', + { + 'email': { + 'type': 'noticeOfWithdrawal', + 'option': 'COMPLETED', + 'filingId': '111222335' + } }) ] ) From 17d62cabaf77b91847a990807c2d9f7fd3b73cc8 Mon Sep 17 00:00:00 2001 From: meawong Date: Wed, 5 Feb 2025 08:53:51 -0800 Subject: [PATCH 048/133] 25783 - Add NoW case to Delete Filings Endpoint (#3202) * 25783 - Add NoW case to Delete Filings endpoint and update tests * 25783 - Update unit tests * 25783 - Verify NoW is no longer embedded in unit test * 25783-Update doc string for unit test --- .../business_filings/business_filings.py | 2 +- .../v2/test_business_filings/test_filings.py | 59 +++++++++++++++++++ 2 files changed, 60 insertions(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index c98bb13ed3..c2c0d70013 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -208,7 +208,7 @@ def delete_filings(identifier, filing_id=None): with suppress(Exception): ListFilingResource.delete_from_minio(filing_type, filing_json) - if identifier.startswith('T'): + if identifier.startswith('T') and filing.filing_type != Filing.FILINGS['noticeOfWithdrawal']['name']: bootstrap = RegistrationBootstrap.find_by_identifier(identifier) if bootstrap: deregister_status = RegistrationBootstrapService.deregister_bootstrap(bootstrap) diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index c198cd8689..4be4566a3d 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -808,6 +808,65 @@ def test_delete_filing_in_draft(session, client, jwt): assert rv.status_code == HTTPStatus.OK +def test_delete_draft_now_filing(session, client, jwt): + """Assert that when a NoW from a temporary business is deleted, the business is unlinked and not deleted.""" + # set-up withdrawn boostrap FE filing + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + + identifier = 'T1Li6MzdrK' + headers = create_header(jwt, [STAFF_ROLE], identifier) + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + json_data = copy.deepcopy(FILING_HEADER) + json_data['filing']['header']['name'] = 'incorporationApplication' + del json_data['filing']['business'] + temp_bus_filing_json = copy.deepcopy(INCORPORATION) + temp_bus_filing_json['nameRequest']['legalType'] = 'BEN' + json_data['filing']['incorporationApplication'] = temp_bus_filing_json + temp_filing = factory_pending_filing(None, json_data) + temp_filing.temp_reg = identifier + temp_filing.effective_date = future_effective_date + temp_filing.payment_completion_date = datetime.utcnow().isoformat() + temp_filing._status = Filing.Status.DRAFT.value + temp_filing.skip_status_listener = True + temp_filing.save() + withdrawn_filing_id = temp_filing.id + + # set-up notice of withdrawal filing + now_json_data = copy.deepcopy(FILING_HEADER) + now_json_data['filing']['header']['name'] = 'noticeOfWithdrawal' + del now_json_data['filing']['business'] + now_json_data['filing']['business'] = { + "identifier": identifier, + "legalType": 'BEN' + } + now_json_data['filing']['noticeOfWithdrawal'] = copy.deepcopy(SCHEMA_NOTICE_OF_WITHDRAWAL) + now_json_data['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + del now_json_data['filing']['header']['filingId'] + now_filing = factory_filing(None, now_json_data) + now_filing.withdrawn_filing_id = withdrawn_filing_id + now_filing.save() + temp_filing.withdrawal_pending = True + temp_filing.save() + + rv = client.delete(f'/api/v2/businesses/{identifier}/filings/{now_filing.id}', + headers=headers + ) + + # validate that the withdrawl_pending flag is set back to False + assert rv.status_code == HTTPStatus.OK + assert temp_filing.withdrawal_pending == False + + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate that no NoW is embedded + assert rv.status_code == HTTPStatus.OK + assert 'noticeOfWithdrawal' not in rv.json['filing'] + def test_delete_coop_ia_filing_in_draft_with_file_in_minio(session, client, jwt, minio_server): """Assert that a draft filing can be deleted.""" From 9ac76f6752552d1390e28c24e5c477509065f605 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Wed, 5 Feb 2025 14:44:52 -0800 Subject: [PATCH 049/133] 25467 colin sync - effective date should be same as filing date if not future effective (#3204) --- colin-api/src/colin_api/models/filing.py | 12 +---- colin-api/src/colin_api/resources/filing.py | 7 ++- colin-api/src/colin_api/services/legal.py | 54 ------------------- legal-api/src/legal_api/models/filing.py | 9 ++++ .../resources/v2/business/colin_sync.py | 1 + 5 files changed, 16 insertions(+), 67 deletions(-) delete mode 100644 colin-api/src/colin_api/services/legal.py diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index c0c7e0e4f6..dd67c92361 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -46,7 +46,6 @@ ) # noqa: I001 from colin_api.resources.db import DB from colin_api.services import flags -from colin_api.services.legal import LegalApiService from colin_api.utils import convert_to_json_date, convert_to_json_datetime, convert_to_pacific_time, convert_to_snake @@ -1402,7 +1401,7 @@ def add_filing(cls, con, filing: Filing, lear_identifier: str) -> int: is_frozen_condition = ( flags.is_on('enable-bc-ccc-ulc') and business['business']['legalType'] != Business.TypeCodes.COOP.value and - cls.is_business_in_lear(lear_identifier) + filing_source == cls.FilingSource.LEAR.value ) current_app.logger.debug(f'Business {lear_identifier}, is_frozen_condition:{is_frozen_condition}') @@ -1418,15 +1417,6 @@ def add_filing(cls, con, filing: Filing, lear_identifier: str) -> int: current_app.logger.error(err.with_traceback(None)) raise err - @classmethod - def is_business_in_lear(cls, lear_identifier: str) -> bool: - """Check if business is in lear.""" - response = LegalApiService.query_business(lear_identifier, slim=True) - - if response.status_code == HTTPStatus.OK: - return True - return False - @classmethod def _get_last_ar_filed_date(cls, header: dict, business: dict): filing_year = header.get('filingYear') diff --git a/colin-api/src/colin_api/resources/filing.py b/colin-api/src/colin_api/resources/filing.py index 0fb1a355a9..7b63c39c18 100644 --- a/colin-api/src/colin_api/resources/filing.py +++ b/colin-api/src/colin_api/resources/filing.py @@ -216,8 +216,11 @@ def _add_filings(con, json_data: dict, filing_list: list, identifier: str, lear_ filing_body = filing_list[filing_type] filing.filing_sub_type = Filing.get_filing_sub_type(filing_type, filing_body) filing.body = filing_body - # get utc lear effective date and convert to pacific time for insert into oracle - filing.effective_date = convert_to_pacific_time(filing.header['learEffectiveDate']) + if filing.header['isFutureEffective']: + # get utc lear effective date and convert to pacific time for insert into oracle + filing.effective_date = convert_to_pacific_time(filing.header['learEffectiveDate']) + else: + filing.effective_date = filing.filing_date if filing_type in ['amalgamationApplication', 'continuationIn', 'incorporationApplication']: filing.business = Business.create_corporation(con, json_data) diff --git a/colin-api/src/colin_api/services/legal.py b/colin-api/src/colin_api/services/legal.py deleted file mode 100644 index 38dad26178..0000000000 --- a/colin-api/src/colin_api/services/legal.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright © 2025 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This class provides the service for legal-api calls.""" -import requests -from flask import current_app - -from colin_api.services.account import AccountService - - -# pylint: disable=too-few-public-methods -class LegalApiService(): - """Provides service to call the legal-api.""" - - @staticmethod - def query_business(identifier: str, slim: bool = False): - """Return a JSON object with business information. - - Args: - identifier (str): The business identifier - slim (bool, optional): If True, requests minimal business data. Defaults to False. - """ - timeout = int(current_app.config.get('ACCOUNT_SVC_TIMEOUT')) - legal_api_url = current_app.config.get('LEGAL_API_URL') - token = AccountService.get_bearer_token() - - try: - url = f'{legal_api_url}/businesses/{identifier}' - if slim: - url += '?slim=true' - # Perform proxy call using the input identifier (e.g. BC 123456) - response = requests.get(url, - headers={'Content-Type': 'application/json', 'Authorization': 'Bearer ' + token}, - timeout=timeout - ) - # If the status code is 200 or 404, return the response - if response.status_code in (200, 404): - return response - response.raise_for_status() - - except Exception as err: # pylint: disable=broad-except: - current_app.logger.error(err, exc_info=True) - raise # re-throw the exception after logging diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 4654eeb604..6b95210e79 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -11,6 +11,7 @@ """Filings are legal documents that alter the state of a business.""" # pylint: disable=too-many-lines import copy +from contextlib import suppress from datetime import date, datetime, timezone from enum import Enum from http import HTTPStatus @@ -769,6 +770,14 @@ def _raise_default_lock_exception(): status_code=HTTPStatus.FORBIDDEN ) + @property + def is_future_effective(self) -> bool: + """Return True if the effective date is in the future.""" + with suppress(AttributeError, TypeError): + if self.effective_date > self.payment_completion_date: + return True + return False + @property def is_corrected(self): """Has this filing been corrected.""" diff --git a/legal-api/src/legal_api/resources/v2/business/colin_sync.py b/legal-api/src/legal_api/resources/v2/business/colin_sync.py index 7eae8525c5..5339500f08 100644 --- a/legal-api/src/legal_api/resources/v2/business/colin_sync.py +++ b/legal-api/src/legal_api/resources/v2/business/colin_sync.py @@ -67,6 +67,7 @@ def get_completed_filings_for_colin(): filing_json['filing']['header']['source'] = Filing.Source.LEAR.value filing_json['filing']['header']['date'] = filing.filing_date.isoformat() filing_json['filing']['header']['learEffectiveDate'] = filing.effective_date.isoformat() + filing_json['filing']['header']['isFutureEffective'] = filing.is_future_effective if not filing_json['filing'].get('business'): if filing.transaction_id: From 65f5d0b7086efb452dbcbc8c5329dd7ff6ae5171 Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Thu, 6 Feb 2025 09:19:45 -0800 Subject: [PATCH 050/133] 25660 - updates for date and statement (#3197) --- .../add_corrections.ipynb | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/jobs/correction-ben-statement/add_corrections.ipynb b/jobs/correction-ben-statement/add_corrections.ipynb index c64f271382..5bd2322186 100644 --- a/jobs/correction-ben-statement/add_corrections.ipynb +++ b/jobs/correction-ben-statement/add_corrections.ipynb @@ -69,16 +69,14 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Correction created successfully for BC0871147\n", - "Correction created successfully for BC0871183\n", - "Correction created successfully for BC0871186\n" + "Correction created successfully for BC0887565 and correction filing_id is 169063\n" ] } ], @@ -87,6 +85,13 @@ "from corrections_output import correction_businesses\n", "\n", "current_date = datetime.now().date().isoformat()\n", + "formatted_current_date = datetime.now().date().strftime('%B %d, %Y')\n", + "correction_statement = (\"BC benefit company statement contained in notice of articles as required under section \" \n", + "\"51.992 of the Business Corporations Act corrected from “This company is a benefit company and, as such, has purposes \"\n", + "\"that include conducting its business in a responsible and sustainable manner and promoting one or more public \"\n", + "\"benefits” to “This company is a benefit company and, as such, is committed to conducting its business in a \"\n", + "\"responsible and sustainable manner and promoting one or more public benefits”\")\n", + "\n", "headers = {\n", " 'Content-Type': 'application/json',\n", " 'Authorization': 'Bearer ' + token\n", @@ -108,16 +113,11 @@ " \"legalType\": \"BC\"\n", " },\n", " \"correction\": {\n", - " \"details\": \"First correction\",\n", + " \"details\": \"BEN Correction statement\",\n", " \"correctedFilingId\": filind_id,\n", " \"correctedFilingType\": \"incorporationApplication\",\n", - " \"comment\": f\"\"\"Correction for Incorporation Application filed on {current_date} \\n\n", - " BC benefit company statement contained in notice of articles as required under section \n", - " 51.992 of the Business Corporations Act corrected from “This company is a benefit company \n", - " and, as such, has purposes that include conducting its business in a responsible and \n", - " sustainable manner and promoting one or more public benefits” to \n", - " “This company is a benefit company and, as such, is committed to conducting its business in \n", - " a responsible and sustainable manner and promoting one or more public benefits”.\"\"\"\n", + " \"comment\": f\"\"\"Correction for Incorporation Application filed on {formatted_current_date}\n", + " {correction_statement}\"\"\"\n", " }\n", " }\n", " }\n", @@ -127,9 +127,10 @@ "\n", " # Check the status code of the response\n", " if rv.status_code == 201:\n", - " print(f\"Correction created successfully for {identifier}\")\n", + " correction_filing_id = rv.json()[\"filing\"][\"header\"][\"filingId\"]\n", + " print(f\"Correction created successfully for {identifier} and correction filing_id is {correction_filing_id}\")\n", " else:\n", - " print(f\"Failed to make POST request. Status code: {rv.status_code}\")\n", + " print(f\"Failed to make POST request. Status code: {rv.status_code}: {rv.text}\")\n", " print(rv.text) # Print the error message if the request fails\n", " \n" ] From 93be1ece0b9235c090fdb8cd003827a8b90511ce Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Fri, 7 Feb 2025 07:30:31 +0800 Subject: [PATCH 051/133] 23352 notice_of_withdrawal_filer (#3198) * 23352 notice_of_withdrawal_filer * fix lint issue * fix the skip NoW filing * fix lint issue * fix lint issue * fix typo --- .../filing_processors/notice_of_withdrawal.py | 42 ++++++++++ .../entity-filer/src/entity_filer/worker.py | 10 ++- .../test_notice_of_withdrawal.py | 78 +++++++++++++++++++ 3 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py create mode 100644 queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py new file mode 100644 index 0000000000..cbb92134ed --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py @@ -0,0 +1,42 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""File processing rules and actions for the Notice of Withdrawal filing.""" +import datetime +from typing import Dict + +from legal_api.models import Filing + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components import filings + + +def process( + filing_submission: Filing, + filing: Dict, + filing_meta: FilingMeta +): # pylint: disable=W0613, R0914 + """Render the notice_of_withdrawal onto the model objects.""" + now_filing = filing.get('noticeOfWithdrawal') + + if court_order := now_filing.get('courtOrder'): + filings.update_filing_court_order(filing_submission, court_order) + filing_meta.notice_of_withdrawal = {**filing_meta.notice_of_withdrawal, + 'withdrawnDate': datetime.datetime.utcnow()} + + withdrawn_filing_id = now_filing.get('filingId') + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + + withdrawn_filing._status = Filing.Status.WITHDRAWN.value # pylint: disable=protected-access + withdrawn_filing.withdrawal_pending = False + withdrawn_filing.save_to_session() diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 944ed7849e..1a0ab22ecd 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -66,6 +66,7 @@ court_order, dissolution, incorporation_filing, + notice_of_withdrawal, put_back_off, put_back_on, registrars_notation, @@ -217,10 +218,14 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, filing_submission = filing_core_submission.storage - if filing_core_submission.status == Filing.Status.COMPLETED: + if filing_core_submission.status in [Filing.Status.COMPLETED, Filing.Status.WITHDRAWN]: logger.warning('QueueFiler: Attempting to reprocess business.id=%s, filing.id=%s filing=%s', filing_submission.business_id, filing_submission.id, filing_msg) return None, None + if filing_submission.withdrawal_pending: + logger.warning('QueueFiler: NoW pending for this filing business.id=%s, filing.id=%s filing=%s', + filing_submission.business_id, filing_submission.id, filing_msg) + raise QueueException # convenience flag to set that the envelope is a correction is_correction = filing_core_submission.filing_type == FilingCore.FilingTypes.CORRECTION @@ -322,6 +327,9 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, elif filing.get('agmExtension'): agm_extension.process(filing, filing_meta) + elif filing.get('noticeOfWithdrawal'): + notice_of_withdrawal.process(filing_submission, filing, filing_meta) + elif filing.get('amalgamationApplication'): business, filing_submission, filing_meta = amalgamation_application.process( business, diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py new file mode 100644 index 0000000000..c1637d8921 --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py @@ -0,0 +1,78 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Notice Of Withdrawal filing.""" +import copy +import random +import pytest + +from legal_api.models import Business, Filing +from registry_schemas.example_data import FILING_HEADER, INCORPORATION, NOTICE_OF_WITHDRAWAL + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import notice_of_withdrawal +from tests.unit import create_business, create_filing + + +@pytest.mark.parametrize('test_name, withdrawal_pending,withdrawn_filing_status', [ + ('Process the Filing', False, False), + ('Dont process the Filing', False, True), + ('Dont process the Filing', True, False), + ('Dont process the Filing', True, True), +]) +def test_worker_notice_of_withdrawal(session, test_name, withdrawal_pending, withdrawn_filing_status): + """Assert that the notice of withdrawal filing processes correctly.""" + # Setup + identifier = 'BC1234567' + business = create_business(identifier, legal_type='BC') + payment_id = str(random.SystemRandom().getrandbits(0x58)) + + # Create IA filing + ia_filing_json = copy.deepcopy(FILING_HEADER) + ia_filing_json['filing']['business']['identifier'] = identifier + ia_filing_json['filing']['incorporationApplication'] = copy.deepcopy(INCORPORATION) + ia_filing = create_filing(payment_id, ia_filing_json, business_id=business.id) + ia_filing.withdrawal_pending = withdrawal_pending + if withdrawn_filing_status: + ia_filing._status = Filing.Status.WITHDRAWN.value + else: + ia_filing._status = 'PENDING' + ia_filing.skip_status_listener = True + ia_filing.save() + + now_filing_json = copy.deepcopy(FILING_HEADER) + now_filing_json['filing']['business']['identifier'] = identifier + now_filing_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + now_filing_json['filing']['noticeOfWithdrawal']['filingId'] = ia_filing.id + now_filing = create_filing(payment_id, now_filing_json, business_id=business.id) + now_filing.withdrawn_filing_id = ia_filing.id + now_filing.save() + + filing_meta = FilingMeta() + filing_meta.notice_of_withdrawal = {} + + # Test + notice_of_withdrawal.process(now_filing, now_filing_json['filing'], filing_meta) + business.save() + + # Check results + final_ia_filing = Filing.find_by_id(ia_filing.id) + final_now_filing = Filing.find_by_id(now_filing.id) + + assert now_filing_json['filing']['noticeOfWithdrawal']['courtOrder']['orderDetails'] == final_now_filing.order_details + if withdrawal_pending or withdrawn_filing_status: + assert final_ia_filing.status == ia_filing.status + assert final_ia_filing.withdrawal_pending == ia_filing.withdrawal_pending + else: + assert final_ia_filing.status == Filing.Status.WITHDRAWN.value + assert final_ia_filing.withdrawal_pending == False From da984f04d153b281995296eea6236b5a1f138506 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Fri, 7 Feb 2025 08:11:54 +0800 Subject: [PATCH 052/133] 23352 fix_now_filer_meta_issue (#3206) * 23352 fix_now_filer_meta_issue * fix lint issue --- .../src/entity_filer/filing_processors/notice_of_withdrawal.py | 1 + .../tests/unit/filing_processors/test_notice_of_withdrawal.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py index cbb92134ed..ed370b9c17 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py @@ -28,6 +28,7 @@ def process( ): # pylint: disable=W0613, R0914 """Render the notice_of_withdrawal onto the model objects.""" now_filing = filing.get('noticeOfWithdrawal') + filing_meta.notice_of_withdrawal = {} if court_order := now_filing.get('courtOrder'): filings.update_filing_court_order(filing_submission, court_order) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py index c1637d8921..ec36da38e5 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py @@ -59,7 +59,6 @@ def test_worker_notice_of_withdrawal(session, test_name, withdrawal_pending, wit now_filing.save() filing_meta = FilingMeta() - filing_meta.notice_of_withdrawal = {} # Test notice_of_withdrawal.process(now_filing, now_filing_json['filing'], filing_meta) From b8ae6cfa4f0ddd360a21bbd3cbdc66b2bb8f9bfc Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Sat, 8 Feb 2025 02:39:33 +0800 Subject: [PATCH 053/133] 23352 update_filing_status_listen (#3207) * 23352 update_filing_status_listen * clean up * fix unit test * revert filer updates --- legal-api/src/legal_api/models/filing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 6b95210e79..3be6b8d8f6 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -1219,7 +1219,8 @@ def receive_before_change(mapper, connection, target): # pylint: disable=unused # pylint: disable=protected-access if (filing._status in [Filing.Status.AWAITING_REVIEW.value, Filing.Status.CHANGE_REQUESTED.value, - Filing.Status.REJECTED.value] or + Filing.Status.REJECTED.value, + Filing.Status.WITHDRAWN.value] or (filing._status == Filing.Status.APPROVED.value and not filing.payment_token)): return # should not override status in the review process From 20321ae3bdbd7dcd25dbbb4598ddae84127f723c Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Fri, 7 Feb 2025 11:53:09 -0800 Subject: [PATCH 054/133] 25231, 25287 - update put back off sync for limited restoration expiration and update correction output (#3205) * 25231 - update put back off sync for limited restoration ecpiration * 25231 - fix lint * 25872 - fix BEN correction statement output --- colin-api/src/colin_api/models/business.py | 1 + colin-api/src/colin_api/models/filing.py | 9 +++++++++ colin-api/src/colin_api/resources/filing.py | 16 +++++++++++++++- .../add_corrections.ipynb | 7 +++---- .../resources/v2/business/colin_sync.py | 1 + 5 files changed, 29 insertions(+), 5 deletions(-) diff --git a/colin-api/src/colin_api/models/business.py b/colin-api/src/colin_api/models/business.py index c26a54da87..a6509d5889 100644 --- a/colin-api/src/colin_api/models/business.py +++ b/colin-api/src/colin_api/models/business.py @@ -69,6 +69,7 @@ class CorpStateTypes(Enum): INVOLUNTARY_DISSOLUTION_NO_AR = 'HDF' # this corp state is also used for Put back off INVOLUNTARY_DISSOLUTION_NO_TR = 'HDT' LIMITED_RESTORATION = 'LRS' + RESTORATION_EXPIRATION = 'EXR' VOLUNTARY_DISSOLUTION = 'HDV' CORPS = [TypeCodes.BCOMP.value, TypeCodes.BC_COMP.value, diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index dd67c92361..8ad22e597f 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -1238,6 +1238,15 @@ def add_involuntary_dissolution_event(cls, con, corp_num, filing_dt, filing_body return None + @classmethod + def add_limited_restoration_expiration_event(cls, con, corp_num, filing_dt) -> int: + """Add limited restoration expiration event .""" + cursor = con.cursor() + event_id = cls._get_event_id(cursor=cursor, corp_num=corp_num, filing_dt=filing_dt, event_type='SYSDL') + Business.update_corp_state(cursor, event_id, corp_num, + Business.CorpStateTypes.RESTORATION_EXPIRATION.value) + return event_id + # pylint: disable=too-many-locals,too-many-statements,too-many-branches,too-many-nested-blocks; @classmethod def add_filing(cls, con, filing: Filing, lear_identifier: str) -> int: diff --git a/colin-api/src/colin_api/resources/filing.py b/colin-api/src/colin_api/resources/filing.py index 7b63c39c18..cd4654781f 100644 --- a/colin-api/src/colin_api/resources/filing.py +++ b/colin-api/src/colin_api/resources/filing.py @@ -92,7 +92,8 @@ def get(legal_type, identifier, filing_type, filing_sub_type=None): @jwt.requires_roles([COLIN_SVC_ROLE]) def post(legal_type, identifier, **kwargs): """Create a new filing.""" - # pylint: disable=unused-argument,too-many-branches; filing_type is only used for the get + # pylint: disable=too-many-return-statements,unused-argument,too-many-branches; + # filing_type is only used for the get try: if legal_type not in [x.value for x in Business.TypeCodes]: return jsonify({'message': 'Must provide a valid legal type.'}), HTTPStatus.BAD_REQUEST @@ -180,6 +181,19 @@ def post(legal_type, identifier, **kwargs): } }), HTTPStatus.CREATED + # filing will not be created for Limited restoration expiration-Put back off (make business Historical) + # Create an event and update corp state. + if ('putBackOff' in filing_list and json_data['header']['hideInLedger'] is True): + filing_dt = convert_to_pacific_time(json_data['header']['date']) + event_id = Filing.add_limited_restoration_expiration_event(con, identifier, filing_dt) + + con.commit() + return jsonify({ + 'filing': { + 'header': {'colinIds': [event_id]} + } + }), HTTPStatus.CREATED + filings_added = FilingInfo._add_filings(con, json_data, filing_list, identifier, lear_identifier) # success! commit the db changes diff --git a/jobs/correction-ben-statement/add_corrections.ipynb b/jobs/correction-ben-statement/add_corrections.ipynb index 5bd2322186..63136e0e70 100644 --- a/jobs/correction-ben-statement/add_corrections.ipynb +++ b/jobs/correction-ben-statement/add_corrections.ipynb @@ -69,14 +69,14 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Correction created successfully for BC0887565 and correction filing_id is 169063\n" + "Correction created successfully for BC0887594 and correction filing_id is 170235\n" ] } ], @@ -116,8 +116,7 @@ " \"details\": \"BEN Correction statement\",\n", " \"correctedFilingId\": filind_id,\n", " \"correctedFilingType\": \"incorporationApplication\",\n", - " \"comment\": f\"\"\"Correction for Incorporation Application filed on {formatted_current_date}\n", - " {correction_statement}\"\"\"\n", + " \"comment\": f\"\"\"Correction for Incorporation Application filed on {formatted_current_date} \\n{correction_statement}\"\"\"\n", " }\n", " }\n", " }\n", diff --git a/legal-api/src/legal_api/resources/v2/business/colin_sync.py b/legal-api/src/legal_api/resources/v2/business/colin_sync.py index 5339500f08..f87358f6c8 100644 --- a/legal-api/src/legal_api/resources/v2/business/colin_sync.py +++ b/legal-api/src/legal_api/resources/v2/business/colin_sync.py @@ -68,6 +68,7 @@ def get_completed_filings_for_colin(): filing_json['filing']['header']['date'] = filing.filing_date.isoformat() filing_json['filing']['header']['learEffectiveDate'] = filing.effective_date.isoformat() filing_json['filing']['header']['isFutureEffective'] = filing.is_future_effective + filing_json['filing']['header']['hideInLedger'] = filing.hide_in_ledger if not filing_json['filing'].get('business'): if filing.transaction_id: From a0f774d4548f631a037bb7e97f370471e68d9dba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 7 Feb 2025 14:23:58 -0800 Subject: [PATCH 055/133] Update version.py (#3209) --- legal-api/src/legal_api/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index 90f3a929fd..afaf5d5c3f 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.138.0' # pylint: disable=invalid-name +__version__ = '2.139.0' # pylint: disable=invalid-name From cfdb0e1891106f8b342de31cc912dba0b25d9753 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 7 Feb 2025 14:24:49 -0800 Subject: [PATCH 056/133] Update version.py (#3210) --- colin-api/src/colin_api/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colin-api/src/colin_api/version.py b/colin-api/src/colin_api/version.py index 0fbd4e4227..e139b6fe29 100644 --- a/colin-api/src/colin_api/version.py +++ b/colin-api/src/colin_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.137.0' # pylint: disable=invalid-name +__version__ = '2.139.0' # pylint: disable=invalid-name From ff678645ed408644928d6f25cf50c4d9c61cdb2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 7 Feb 2025 14:30:05 -0800 Subject: [PATCH 057/133] Update version.py (#3211) --- queue_services/entity-filer/src/entity_filer/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index 2fdc34f03b..e139b6fe29 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.136.0' # pylint: disable=invalid-name +__version__ = '2.139.0' # pylint: disable=invalid-name From 9cb623d11b69e7404582fef1b823d2f27eb631fd Mon Sep 17 00:00:00 2001 From: Kial Date: Mon, 10 Feb 2025 10:17:15 -0500 Subject: [PATCH 058/133] 22411 btr lear filing integration (#3208) * Backend - transparency register filing Signed-off-by: Kial Jinnah * TR filing PM test Signed-off-by: Kial Jinnah * PR comments + lint Signed-off-by: Kial Jinnah * test fixes Signed-off-by: Kial Jinnah * Test tweaks Signed-off-by: Kial Jinnah * Test tweaks Signed-off-by: Kial Jinnah * test tweak - load_ledger Signed-off-by: Kial Jinnah * Update reqs schema version Signed-off-by: Kial Jinnah --------- Signed-off-by: Kial Jinnah --- legal-api/requirements.txt | 2 +- legal-api/src/legal_api/core/filing.py | 6 + legal-api/src/legal_api/core/meta/filing.py | 50 +- legal-api/src/legal_api/models/filing.py | 59 +- .../business_filings/business_filings.py | 3 + legal-api/src/legal_api/services/authz.py | 23 + .../validations/transparency_register.py | 43 ++ .../filings/validations/validation.py | 4 + .../postman/legal-api.postman_collection.json | 725 +++++++++++------- .../tests/unit/core/test_filing_ledger.py | 5 + .../tests/unit/services/test_authorization.py | 122 ++- .../src/business_pay/resources/pay_filer.py | 2 +- 12 files changed, 718 insertions(+), 326 deletions(-) create mode 100644 legal-api/src/legal_api/services/filings/validations/transparency_register.py diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index 7c1a3e152d..3760bb7eac 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.33#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.34#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index f2768d133d..1ad8757471 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -97,6 +97,7 @@ class FilingTypes(str, Enum): RESTORATIONAPPLICATION = 'restorationApplication' SPECIALRESOLUTION = 'specialResolution' TRANSITION = 'transition' + TRANSPARENCY_REGISTER = 'transparencyRegister' class FilingTypesCompact(str, Enum): """Render enum for filing types with sub-types.""" @@ -110,6 +111,9 @@ class FilingTypesCompact(str, Enum): AMALGAMATION_APPLICATION_REGULAR = 'amalgamationApplication.regular' AMALGAMATION_APPLICATION_VERTICAL = 'amalgamationApplication.vertical' AMALGAMATION_APPLICATION_HORIZONTAL = 'amalgamationApplication.horizontal' + TRANSPARENCY_REGISTER_ANNUAL = 'transparencyRegister.annual' + TRANSPARENCY_REGISTER_CHANGE = 'transparencyRegister.change' + TRANSPARENCY_REGISTER_INITIAL = 'transparencyRegister.initial' NEW_BUSINESS_FILING_TYPES: Final = [ FilingTypes.AMALGAMATIONAPPLICATION, @@ -531,6 +535,7 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m Filing.FilingTypes.CONTINUATIONOUT.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, + Filing.FilingTypes.TRANSPARENCY_REGISTER.value, ] if (filing.status in (Filing.Status.PAID, Filing.Status.WITHDRAWN) or (filing.status == Filing.Status.COMPLETED and @@ -568,6 +573,7 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m Filing.FilingTypes.CONTINUATIONOUT.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, + Filing.FilingTypes.TRANSPARENCY_REGISTER.value, ] if filing.filing_type not in no_legal_filings: documents['documents']['legalFilings'] = \ diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index a1f0899a14..f7f67b14c3 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -645,7 +645,55 @@ class FilingTitles(str, Enum): 'outputs': ['noticeOfArticles'] }, ] - } + }, + 'transparencyRegister': { + 'name': 'transparencyRegister', + 'annual': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register - Annual Filing', + 'displayName': 'Transparency Register - Annual Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'change': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'displayName': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'initial': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'displayName': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + } + }, } diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 3be6b8d8f6..5b07081807 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -64,10 +64,12 @@ class Status(str, Enum): class Source(Enum): """Render an Enum of the Filing Sources.""" + BTR = 'BTR' COLIN = 'COLIN' LEAR = 'LEAR' # TODO: get legal types from defined class once table is made (getting it from Business causes circ import) + # TODO: add filing types for btr FILINGS = { 'affidavit': { 'name': 'affidavit', @@ -446,6 +448,51 @@ class Source(Enum): 'CCC': 'TRANS' } }, + 'transparencyRegister': { + 'name': 'transparencyRegister', + 'annual': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register - Annual Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'change': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'initial': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + } + }, # changing the structure of fee code in courtOrder/registrarsNotation/registrarsOrder # for all the business the fee code remain same as NOFEE (Staff) @@ -462,7 +509,8 @@ class Source(Enum): # breaking and more testing was req'd so did not make refactor when introducing this dictionary. 'dissolution': 'dissolutionType', 'restoration': 'type', - 'amalgamationApplication': 'type' + 'amalgamationApplication': 'type', + 'transparencyRegister': 'type' } __tablename__ = 'filings' @@ -1051,7 +1099,14 @@ def get_most_recent_legal_filing(business_id: str, filing_type: str = None): def get_completed_filings_for_colin(limit=20, offset=0): """Return the filings based on limit and offset.""" from .business import Business # noqa: F401; pylint: disable=import-outside-toplevel - excluded_filings = ['lear_epoch', 'adminFreeze', 'courtOrder', 'registrarsNotation', 'registrarsOrder'] + excluded_filings = [ + 'lear_epoch', + 'adminFreeze', + 'courtOrder', + 'registrarsNotation', + 'registrarsOrder', + 'transparencyRegister' + ] excluded_businesses = [Business.LegalTypes.SOLE_PROP.value, Business.LegalTypes.PARTNERSHIP.value] filings = db.session.query(Filing).join(Business). \ filter( diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index c2c0d70013..c2213e978f 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -901,6 +901,9 @@ def create_invoice(business: Business, # pylint: disable=too-many-locals,too-ma corp_type = business.legal_type if business.legal_type else \ filing.json['filing']['business'].get('legalType') + if filing.filing_type == Filing.FILINGS['transparencyRegister']['name']: + corp_type = 'BTR' + payload = { 'businessInfo': { 'businessIdentifier': f'{business.identifier}', diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index a1275cf654..4d9589f6a2 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -482,6 +482,29 @@ def get_allowable_filings_dict(): }, 'transition': { 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'] + }, + CoreFiling.FilingTypes.TRANSPARENCY_REGISTER.value: { + 'annual': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.BUSINESS_FROZEN], + 'futureEffectiveFilings': [CoreFiling.FilingTypes.INCORPORATIONAPPLICATION.value] + } + }, + 'change': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.BUSINESS_FROZEN], + 'futureEffectiveFilings': [CoreFiling.FilingTypes.INCORPORATIONAPPLICATION.value] + } + }, + 'initial': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.BUSINESS_FROZEN], + 'futureEffectiveFilings': [CoreFiling.FilingTypes.INCORPORATIONAPPLICATION.value] + } + } } }, Business.State.HISTORICAL: {} diff --git a/legal-api/src/legal_api/services/filings/validations/transparency_register.py b/legal-api/src/legal_api/services/filings/validations/transparency_register.py new file mode 100644 index 0000000000..8989dac050 --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/transparency_register.py @@ -0,0 +1,43 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Validation for the Transparency Register filing.""" +from typing import Optional + +from legal_api.errors import Error + + +def validate(_: dict) -> Optional[Error]: + """Validate the Transparency Register filing.""" + # NOTE: There isn't anything to validate outside what is already validated via the schema yet + return None diff --git a/legal-api/src/legal_api/services/filings/validations/validation.py b/legal-api/src/legal_api/services/filings/validations/validation.py index 3faaab272e..3681b5d93e 100644 --- a/legal-api/src/legal_api/services/filings/validations/validation.py +++ b/legal-api/src/legal_api/services/filings/validations/validation.py @@ -49,6 +49,7 @@ from .restoration import validate as restoration_validate from .schemas import validate_against_schema from .special_resolution import validate as special_resolution_validate +from .transparency_register import validate as transparency_register_validate def validate(business: Business, # pylint: disable=too-many-branches,too-many-statements @@ -190,6 +191,9 @@ def validate(business: Business, # pylint: disable=too-many-branches,too-many-s elif k == Filing.FILINGS['putBackOff'].get('name'): err = put_back_off_validate(business, filing_json) + elif k == Filing.FILINGS['transparencyRegister'].get('name'): + err = transparency_register_validate(filing_json) # pylint: disable=assignment-from-none + if err: return err diff --git a/legal-api/tests/postman/legal-api.postman_collection.json b/legal-api/tests/postman/legal-api.postman_collection.json index 5097bef2bf..e0d5419fbe 100644 --- a/legal-api/tests/postman/legal-api.postman_collection.json +++ b/legal-api/tests/postman/legal-api.postman_collection.json @@ -1,10 +1,11 @@ { "info": { - "_postman_id": "109b8aad-eda5-442c-a7e3-981b00ada6f8", + "_postman_id": "9e1398e3-eb5e-4cb5-b4a8-9f077cf1b284", "name": "legal-api", "description": "version=2.8 - Legal API Postman Tests", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "484083" + "_exporter_id": "6835935", + "_collection_link": "https://warped-escape-616276.postman.co/workspace/bc-registries~8ef8e652-492a-4d19-b978-d4f0da255b2c/collection/6835935-9e1398e3-eb5e-4cb5-b4a8-9f077cf1b284?action=share&source=collection_link&creator=6835935" }, "item": [ { @@ -196,9 +197,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -255,9 +255,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -314,9 +313,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -373,9 +371,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -432,9 +429,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -491,9 +487,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -541,9 +536,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "url": { @@ -582,9 +576,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "url": { @@ -2332,9 +2325,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -2380,9 +2372,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -2637,14 +2629,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -2684,14 +2675,14 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -3244,7 +3235,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -3281,8 +3271,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -4029,9 +4019,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -4066,9 +4055,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -4623,7 +4612,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -4660,8 +4648,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -4833,9 +4821,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -4875,8 +4862,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -4983,9 +4970,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -5025,8 +5011,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -5541,7 +5527,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -5578,8 +5563,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -5747,9 +5732,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -5789,8 +5773,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -5897,9 +5881,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -5939,8 +5922,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6058,9 +6041,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6095,8 +6077,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6216,9 +6198,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6253,8 +6234,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6374,9 +6355,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6411,8 +6391,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6530,9 +6510,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6567,8 +6546,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -7430,7 +7409,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -7519,7 +7497,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -7799,8 +7776,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -7892,8 +7869,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -7972,8 +7949,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8062,8 +8039,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8253,9 +8230,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8303,9 +8279,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -8684,9 +8660,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8734,9 +8709,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -8860,9 +8835,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8910,9 +8884,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -9034,9 +9008,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -9089,9 +9062,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -9213,14 +9186,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -9266,14 +9238,14 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -9491,9 +9463,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -9541,9 +9512,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -9958,7 +9929,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -9995,8 +9965,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -10122,9 +10092,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "url": { @@ -10156,8 +10125,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -10298,9 +10267,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -10401,9 +10369,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -10473,7 +10440,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" }, @@ -10582,7 +10548,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -10676,7 +10641,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -10713,8 +10677,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -10856,7 +10820,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -10893,8 +10856,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -11036,7 +10999,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11130,7 +11092,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11224,7 +11185,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11316,7 +11276,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11410,7 +11369,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11447,7 +11405,7 @@ "name": "Special Resolution", "item": [ { - "name": "post - success - CP3490248", + "name": "post - fail, missing resolution text - CP3490248", "event": [ { "listen": "test", @@ -11458,8 +11416,8 @@ "", "var jsonData = pm.response.json();", "", - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", + "pm.test(\"Status code is 400\", function () {", + " pm.response.to.have.status(400);", "});", "", "pm.test('should return JSON', function () {", @@ -11467,11 +11425,15 @@ "});", "", "", - "pm.test(\"Returns special resolution filing.\", () => {", + "pm.test(\"Returns dissolution filing.\", () => {", " pm.expect(jsonData.filing).to.exist", " pm.expect(jsonData.filing.business).to.exist", " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", + " pm.expect(jsonData.errors).to.exist", + " pm.expect(jsonData.errors.length).to.eq(1)", + " pm.expect(jsonData.errors[0].error).to.eq('Resolution must be provided.')", + " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolution')", "});", "", "" @@ -11505,14 +11467,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11537,7 +11498,7 @@ "response": [] }, { - "name": "post - fail, missing resolution text - CP3490248", + "name": "post - fail, missing resolution date - CP3490248 Copy", "event": [ { "listen": "test", @@ -11564,8 +11525,8 @@ " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", " pm.expect(jsonData.errors.length).to.eq(1)", - " pm.expect(jsonData.errors[0].error).to.eq('Resolution must be provided.')", - " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolution')", + " pm.expect(jsonData.errors[0].error).to.eq('Resolution date is required.')", + " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolutionDate')", "});", "", "" @@ -11599,14 +11560,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11631,13 +11591,14 @@ "response": [] }, { - "name": "post - fail, missing resolution date - CP3490248 Copy", + "name": "post - fail, resolution date future - CP3490248", "event": [ { "listen": "test", "script": { "exec": [ "var jsonData = pm.response.json()", + "const jsonDataText = pm.response.text()", "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", "", "var jsonData = pm.response.json();", @@ -11657,9 +11618,8 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonData.errors.length).to.eq(1)", - " pm.expect(jsonData.errors[0].error).to.eq('Resolution date is required.')", - " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolutionDate')", + " pm.expect(jsonDataText).to.include('Resolution date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('/filing/specialResolution/resolutionDate')", "});", "", "" @@ -11672,7 +11632,11 @@ "script": { "exec": [ "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", + "", + "var future = new Date()", + "future.setDate(future.getDate() + 10)", + "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" ], "type": "text/javascript" } @@ -11693,14 +11657,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"{{future}}\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11725,7 +11688,7 @@ "response": [] }, { - "name": "post - fail, resolution date future - CP3490248", + "name": "post - fail, resolution date < incorp date - CP3490248", "event": [ { "listen": "test", @@ -11752,7 +11715,7 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Resolution date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('Resolution date cannot be earlier than the incorporation date.')", " pm.expect(jsonDataText).to.include('/filing/specialResolution/resolutionDate')", "});", "", @@ -11791,14 +11754,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"{{future}}\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"1970-01-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11823,14 +11785,13 @@ "response": [] }, { - "name": "post - fail, resolution date < incorp date - CP3490248", + "name": "post - fail, missing signing date - CP3490248", "event": [ { "listen": "test", "script": { "exec": [ "var jsonData = pm.response.json()", - "const jsonDataText = pm.response.text()", "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", "", "var jsonData = pm.response.json();", @@ -11850,8 +11811,9 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Resolution date cannot be earlier than the incorporation date.')", - " pm.expect(jsonDataText).to.include('/filing/specialResolution/resolutionDate')", + " pm.expect(jsonData.errors.length).to.eq(1)", + " pm.expect(jsonData.errors[0].error).to.eq('Signing date is required.')", + " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/signingDate')", "});", "", "" @@ -11864,11 +11826,7 @@ "script": { "exec": [ "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", - "", - "var future = new Date()", - "future.setDate(future.getDate() + 10)", - "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" ], "type": "text/javascript" } @@ -11889,14 +11847,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"1970-01-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11921,13 +11878,14 @@ "response": [] }, { - "name": "post - fail, missing signing date - CP3490248", + "name": "post - fail, signing date future - CP3490248", "event": [ { "listen": "test", "script": { "exec": [ "var jsonData = pm.response.json()", + "const jsonDataText = pm.response.text()", "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", "", "var jsonData = pm.response.json();", @@ -11947,9 +11905,8 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonData.errors.length).to.eq(1)", - " pm.expect(jsonData.errors[0].error).to.eq('Signing date is required.')", - " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/signingDate')", + " pm.expect(jsonDataText).to.include('Signing date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('/filing/specialResolution/signingDate')", "});", "", "" @@ -11962,7 +11919,11 @@ "script": { "exec": [ "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", + "", + "var future = new Date()", + "future.setDate(future.getDate() + 10)", + "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" ], "type": "text/javascript" } @@ -11983,14 +11944,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"{{future}}\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -12015,7 +11975,7 @@ "response": [] }, { - "name": "post - fail, signing date future - CP3490248", + "name": "post - fail, signing date < resolution date - CP3490248", "event": [ { "listen": "test", @@ -12042,7 +12002,7 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Signing date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('Signing date cannot be before the resolution date.')", " pm.expect(jsonDataText).to.include('/filing/specialResolution/signingDate')", "});", "", @@ -12081,105 +12041,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "value": "application/json", - "type": "text" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"{{future}}\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" - }, - "url": { - "raw": "{{url}}/api/v1/businesses/:id/filings", - "host": [ - "{{url}}" - ], - "path": [ - "api", - "v1", - "businesses", - ":id", - "filings" - ], - "variable": [ - { - "key": "id", - "value": "CP3490248" - } - ] - } - }, - "response": [] - }, - { - "name": "post - fail, signing date < resolution date - CP3490248", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "var jsonData = pm.response.json()", - "const jsonDataText = pm.response.text()", - "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", - "", - "var jsonData = pm.response.json();", - "", - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", - "});", - "", - "pm.test('should return JSON', function () {", - " pm.response.to.have.header('Content-Type', 'application/json');", - "});", - "", - "", - "pm.test(\"Returns dissolution filing.\", () => {", - " pm.expect(jsonData.filing).to.exist", - " pm.expect(jsonData.filing.business).to.exist", - " pm.expect(jsonData.filing.specialResolution).to.exist", - " pm.expect(jsonData.filing.header).to.exist", - " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Signing date cannot be before the resolution date.')", - " pm.expect(jsonDataText).to.include('/filing/specialResolution/signingDate')", - "});", - "", - "" - ], - "type": "text/javascript" - } - }, - { - "listen": "prerequest", - "script": { - "exec": [ - "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", - "", - "var future = new Date()", - "future.setDate(future.getDate() + 10)", - "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" - ], - "type": "text/javascript" - } - } - ], - "request": { - "auth": { - "type": "bearer", - "bearer": [ - { - "key": "token", - "value": "{{token}}", - "type": "string" - } - ] - }, - "method": "POST", - "header": [ - { - "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -12278,7 +12139,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -12377,7 +12237,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -17436,7 +17295,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -17473,8 +17331,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -18103,6 +17961,281 @@ "response": [] } ] + }, + { + "name": "Transparency Register", + "item": [ + { + "name": "initial", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json()", + "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", + "", + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test('should return JSON', function () {", + " pm.response.to.have.header('Content-Type', 'application/json');", + "});", + "", + "", + "pm.test(\"Returns special resolution filing.\", () => {", + " pm.expect(jsonData.filing).to.exist", + " pm.expect(jsonData.filing.business).to.exist", + " pm.expect(jsonData.filing.specialResolution).to.exist", + " pm.expect(jsonData.filing.header).to.exist", + "});", + "", + "" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "var today = new Date()", + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Account-Id", + "value": "3101", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"transparencyRegister\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T15:31:55+00:00\"\n },\n \"business\": {\n \"identifier\": \"{{business_identifier}}\"\n },\n \"transparencyRegister\": {\n \"type\": \"initial\",\n \"ledgerReferenceNumber\": \"1234rtfjf44544fkk\"\n }\n }\n}" + }, + "url": { + "raw": "{{url}}/api/v2/businesses/{{business_identifier}}/filings", + "host": [ + "{{url}}" + ], + "path": [ + "api", + "v2", + "businesses", + "{{business_identifier}}", + "filings" + ] + } + }, + "response": [] + }, + { + "name": "annual", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json()", + "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", + "", + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test('should return JSON', function () {", + " pm.response.to.have.header('Content-Type', 'application/json');", + "});", + "", + "", + "pm.test(\"Returns special resolution filing.\", () => {", + " pm.expect(jsonData.filing).to.exist", + " pm.expect(jsonData.filing.business).to.exist", + " pm.expect(jsonData.filing.specialResolution).to.exist", + " pm.expect(jsonData.filing.header).to.exist", + "});", + "", + "" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "var today = new Date()", + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Account-Id", + "value": "3101", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"transparencyRegister\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T15:40:55+00:00\"\n },\n \"business\": {\n \"identifier\": \"{{business_identifier}}\"\n },\n \"transparencyRegister\": {\n \"type\": \"annual\",\n \"ledgerReferenceNumber\": \"2234rtfjf44544fka\"\n }\n }\n}" + }, + "url": { + "raw": "{{url}}/api/v2/businesses/{{business_identifier}}/filings", + "host": [ + "{{url}}" + ], + "path": [ + "api", + "v2", + "businesses", + "{{business_identifier}}", + "filings" + ] + } + }, + "response": [] + }, + { + "name": "change", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json()", + "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", + "", + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test('should return JSON', function () {", + " pm.response.to.have.header('Content-Type', 'application/json');", + "});", + "", + "", + "pm.test(\"Returns special resolution filing.\", () => {", + " pm.expect(jsonData.filing).to.exist", + " pm.expect(jsonData.filing.business).to.exist", + " pm.expect(jsonData.filing.specialResolution).to.exist", + " pm.expect(jsonData.filing.header).to.exist", + "});", + "", + "" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "var today = new Date()", + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Account-Id", + "value": "3101", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"transparencyRegister\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T15:50:55+00:00\"\n },\n \"business\": {\n \"identifier\": \"{{business_identifier}}\"\n },\n \"transparencyRegister\": {\n \"type\": \"change\",\n \"ledgerReferenceNumber\": \"3234rtfjf44544fkb\"\n }\n }\n}" + }, + "url": { + "raw": "{{url}}/api/v2/businesses/{{business_identifier}}/filings", + "host": [ + "{{url}}" + ], + "path": [ + "api", + "v2", + "businesses", + "{{business_identifier}}", + "filings" + ] + } + }, + "response": [] + } + ] } ], "event": [ @@ -18125,4 +18258,4 @@ } } ] -} +} \ No newline at end of file diff --git a/legal-api/tests/unit/core/test_filing_ledger.py b/legal-api/tests/unit/core/test_filing_ledger.py index dd01f67033..c8fefa5b8c 100644 --- a/legal-api/tests/unit/core/test_filing_ledger.py +++ b/legal-api/tests/unit/core/test_filing_ledger.py @@ -42,6 +42,11 @@ def load_ledger(business, founding_date): elif filing_meta['name'] == 'amalgamationApplication': filing['filing']['amalgamationApplication'] = {} filing['filing']['amalgamationApplication']['type'] = 'regular' + elif filing_meta['name'] == 'transparencyRegister': + filing['filing']['transparencyRegister'] = { + 'type': 'initial', + 'ledgerReferenceNumber': '123ewd2' + } f = factory_completed_filing(business, filing, filing_date=founding_date + datedelta.datedelta(months=i)) for c in range(i): comment = Comment() diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index e8dc133a7e..71b9c3e229 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -161,6 +161,9 @@ class FilingKey(str, Enum): AMALGAMATION_VERTICAL = 'AMALGAMATION_VERTICAL' AMALGAMATION_HORIZONTAL = 'AMALGAMATION_HORIZONTAL' NOTICE_OF_WITHDRAWAL = 'NOTICE_OF_WITHDRAWAL' + TRANSPARENCY_REGISTER_ANNUAL = 'TRANSPARENCY_REGISTER_ANNUAL' + TRANSPARENCY_REGISTER_CHANGE = 'TRANSPARENCY_REGISTER_CHANGE' + TRANSPARENCY_REGISTER_INITIAL = 'TRANSPARENCY_REGISTER_INITIAL' EXPECTED_DATA = { @@ -236,7 +239,10 @@ class FilingKey(str, Enum): FilingKey.AMALGAMATION_REGULAR: {'name': 'amalgamationApplication', 'type': 'regular', 'displayName': 'Amalgamation Application (Regular)', 'feeCode': 'AMALR'}, FilingKey.AMALGAMATION_VERTICAL: {'name': 'amalgamationApplication', 'type': 'vertical', 'displayName': 'Amalgamation Application Short-form (Vertical)', 'feeCode': 'AMALV'}, FilingKey.AMALGAMATION_HORIZONTAL: {'name': 'amalgamationApplication', 'type': 'horizontal', 'displayName': 'Amalgamation Application Short-form (Horizontal)', 'feeCode': 'AMALH'}, - FilingKey.NOTICE_OF_WITHDRAWAL: {'displayName': 'Notice of Withdrawal', 'feeCode': 'NWITH', 'name': 'noticeOfWithdrawal'} + FilingKey.NOTICE_OF_WITHDRAWAL: {'displayName': 'Notice of Withdrawal', 'feeCode': 'NWITH', 'name': 'noticeOfWithdrawal'}, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, } EXPECTED_DATA_CONT_IN = { @@ -317,7 +323,10 @@ class FilingKey(str, Enum): 'displayName': 'Amalgamation Application Short-form (Horizontal)', 'feeCode': None}, FilingKey.NOTICE_OF_WITHDRAWAL: {'displayName': 'Notice of Withdrawal', 'feeCode': 'NWITH', - 'name': 'noticeOfWithdrawal'} + 'name': 'noticeOfWithdrawal'}, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, } BLOCKER_FILING_STATUSES = factory_incomplete_statuses() @@ -571,11 +580,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('user_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], ['agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', - {'dissolution': ['voluntary']}, 'incorporationApplication', 'transition']), + {'dissolution': ['voluntary']}, 'incorporationApplication', 'transition', {'transparencyRegister': ['annual','change','initial']}]), ('user_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'general', [BASIC_USER], ['agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationIn', - {'dissolution': ['voluntary']}, 'transition']), + {'dissolution': ['voluntary']}, 'transition', {'transparencyRegister': ['annual','change','initial']}]), ('user_active_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('user_active_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], ['changeOfRegistration', {'dissolution': ['voluntary']}, 'registration']), @@ -1012,7 +1021,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', True, Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup_continue_in_corps([FilingKey.AGM_EXTENSION, @@ -1026,7 +1038,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION, @@ -1307,7 +1322,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', True, Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup_continue_in_corps([FilingKey.AGM_EXTENSION, @@ -1321,7 +1339,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION, @@ -1612,13 +1633,19 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', True, Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION])), @@ -1721,10 +1748,16 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me # active business - general user ('general_user_cp', Business.State.ACTIVE, ['CP'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, []), ('general_user_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], - BLOCKER_FILING_STATUSES, expected_lookup([FilingKey.TRANSITION, ])), + BLOCKER_FILING_STATUSES, expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, - expected_lookup_continue_in_corps([FilingKey.TRANSITION, ])), + expected_lookup_continue_in_corps([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, []), @@ -1841,10 +1874,16 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, []), ('general_user_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, - expected_lookup([FilingKey.TRANSITION])), + expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continu_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, - expected_lookup_continue_in_corps([FilingKey.TRANSITION])), + expected_lookup_continue_in_corps([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, @@ -1950,10 +1989,16 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me # active business - general user ('general_user_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, - expected_lookup([FilingKey.TRANSITION])), + expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_usere_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, - expected_lookup([FilingKey.TRANSITION])) + expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ] ) def test_allowed_filings_blocker_filing_amalgamations(monkeypatch, app, session, jwt, test_name, state, @@ -2078,7 +2123,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup_continue_in_corps([FilingKey.AGM_EXTENSION, FilingKey.AGM_LOCATION_CHANGE, @@ -2091,7 +2139,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], []), @@ -2299,7 +2350,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_corps_unaffected2', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], ['restoration', 'restoration'], ['limitedRestoration', 'limitedRestorationExtension'], @@ -2313,7 +2367,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps_unaffected', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], [None, 'restoration'], @@ -2329,7 +2386,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps_unaffected2', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], ['restoration', 'restoration'], @@ -2344,7 +2404,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc_unaffected', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], ['restoration', 'restoration', None, 'restoration'], ['limitedRestoration', 'limitedRestorationExtension', None, 'fullRestoration'], []), @@ -2682,7 +2745,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION])), @@ -2837,13 +2903,19 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], None, expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], None, []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], None, expected_lookup([FilingKey.CHANGE_OF_REGISTRATION, diff --git a/queue_services/business-pay/src/business_pay/resources/pay_filer.py b/queue_services/business-pay/src/business_pay/resources/pay_filer.py index 30fb3d66d6..b439d6f90d 100644 --- a/queue_services/business-pay/src/business_pay/resources/pay_filer.py +++ b/queue_services/business-pay/src/business_pay/resources/pay_filer.py @@ -109,7 +109,7 @@ async def worker(): logger.debug(f"Removed From Queue: no payment info in ce: {str(ce)}") return {}, HTTPStatus.OK - if payment_token.corp_type_code in ["MHR", "BTR"]: + if payment_token.corp_type_code in ["MHR"]: logger.debug( f"ignoring message for corp_type_code:{payment_token.corp_type_code}, {str(ce)}") return {}, HTTPStatus.OK From 9add09d30f18f5f5b34e46e93ba00522baf83a15 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Tue, 11 Feb 2025 00:32:54 +0800 Subject: [PATCH 059/133] 23352 update fe filing meta (#3212) * 23352 update_fe_filing_meta * add logger * fix line issue * update the test * clean up --- .../filing_processors/notice_of_withdrawal.py | 9 ++- .../test_notice_of_withdrawal.py | 71 +++++++++---------- .../tests/unit/test_worker/test_worker.py | 45 +++++++++++- 3 files changed, 85 insertions(+), 40 deletions(-) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py index ed370b9c17..5451e3d789 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py @@ -15,6 +15,7 @@ import datetime from typing import Dict +from entity_queue_common.service_utils import logger from legal_api.models import Filing from entity_filer.filing_meta import FilingMeta @@ -28,16 +29,18 @@ def process( ): # pylint: disable=W0613, R0914 """Render the notice_of_withdrawal onto the model objects.""" now_filing = filing.get('noticeOfWithdrawal') - filing_meta.notice_of_withdrawal = {} + logger.debug('start notice_of_withdrawal filing process, noticeOfWithdrawal: %s', now_filing) if court_order := now_filing.get('courtOrder'): filings.update_filing_court_order(filing_submission, court_order) - filing_meta.notice_of_withdrawal = {**filing_meta.notice_of_withdrawal, - 'withdrawnDate': datetime.datetime.utcnow()} withdrawn_filing_id = now_filing.get('filingId') withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + logger.debug('withdrawn_filing_id: %s', withdrawn_filing.id) withdrawn_filing._status = Filing.Status.WITHDRAWN.value # pylint: disable=protected-access withdrawn_filing.withdrawal_pending = False + withdrawn_filing_meta_data = withdrawn_filing.meta_data if withdrawn_filing.meta_data else {} + withdrawn_filing._meta_data = {**withdrawn_filing_meta_data, # pylint: disable=protected-access + 'withdrawnDate': f'{datetime.datetime.utcnow()}'} withdrawn_filing.save_to_session() diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py index ec36da38e5..68dcb1cb3a 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py @@ -13,65 +13,64 @@ # limitations under the License. """The Unit Tests for the Notice Of Withdrawal filing.""" import copy +import datetime import random import pytest -from legal_api.models import Business, Filing -from registry_schemas.example_data import FILING_HEADER, INCORPORATION, NOTICE_OF_WITHDRAWAL +from legal_api.models import Filing +from legal_api.services import RegistrationBootstrapService +from registry_schemas.example_data import ALTERATION, FILING_HEADER, INCORPORATION, NOTICE_OF_WITHDRAWAL from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import notice_of_withdrawal from tests.unit import create_business, create_filing -@pytest.mark.parametrize('test_name, withdrawal_pending,withdrawn_filing_status', [ - ('Process the Filing', False, False), - ('Dont process the Filing', False, True), - ('Dont process the Filing', True, False), - ('Dont process the Filing', True, True), +@pytest.mark.parametrize('test_name,filing_type,filing_template,identifier', [ + ('IA Withdrawn Filing', 'incorporationApplication', INCORPORATION, 'TJO4XI2qMo'), + ('alteration Withdrawn Filing', 'alteration', ALTERATION, 'BC1234567') ]) -def test_worker_notice_of_withdrawal(session, test_name, withdrawal_pending, withdrawn_filing_status): +def test_worker_notice_of_withdrawal(session, test_name, filing_type, filing_template, identifier): """Assert that the notice of withdrawal filing processes correctly.""" # Setup - identifier = 'BC1234567' - business = create_business(identifier, legal_type='BC') payment_id = str(random.SystemRandom().getrandbits(0x58)) - # Create IA filing - ia_filing_json = copy.deepcopy(FILING_HEADER) - ia_filing_json['filing']['business']['identifier'] = identifier - ia_filing_json['filing']['incorporationApplication'] = copy.deepcopy(INCORPORATION) - ia_filing = create_filing(payment_id, ia_filing_json, business_id=business.id) - ia_filing.withdrawal_pending = withdrawal_pending - if withdrawn_filing_status: - ia_filing._status = Filing.Status.WITHDRAWN.value + # Create withdrawn_filing + withdrawn_filing_json = copy.deepcopy(FILING_HEADER) + withdrawn_filing_json['filing']['business']['legalType'] = 'BC' + withdrawn_filing_json['filing']['business']['identifier'] = identifier + withdrawn_filing_json['filing'][filing_type] = copy.deepcopy(filing_template) + if identifier.startswith('T'): + business = RegistrationBootstrapService.create_bootstrap(account=28) + withdrawn_filing = create_filing(token=payment_id, json_filing=withdrawn_filing_json, bootstrap_id=business.identifier) else: - ia_filing._status = 'PENDING' - ia_filing.skip_status_listener = True - ia_filing.save() + business = create_business(identifier, legal_type='BC') + withdrawn_filing = create_filing(payment_id, withdrawn_filing_json, business_id=business.id) + withdrawn_filing.payment_completion_date = datetime.datetime.utcnow() # for setting the filing status PAID + withdrawn_filing._meta_data = {} + withdrawn_filing.save() + # Create NoW filing now_filing_json = copy.deepcopy(FILING_HEADER) - now_filing_json['filing']['business']['identifier'] = identifier + now_filing_json['filing']['business']['identifier'] = business.identifier now_filing_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) - now_filing_json['filing']['noticeOfWithdrawal']['filingId'] = ia_filing.id - now_filing = create_filing(payment_id, now_filing_json, business_id=business.id) - now_filing.withdrawn_filing_id = ia_filing.id + now_filing_json['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing.id + now_filing = create_filing(payment_id, now_filing_json) + now_filing.withdrawn_filing_id = withdrawn_filing.id now_filing.save() - filing_meta = FilingMeta() - + + assert withdrawn_filing.status == Filing.Status.PAID.value + # Test notice_of_withdrawal.process(now_filing, now_filing_json['filing'], filing_meta) - business.save() - + withdrawn_filing.save() + # Check results - final_ia_filing = Filing.find_by_id(ia_filing.id) + final_withdrawn_filing = Filing.find_by_id(withdrawn_filing.id) final_now_filing = Filing.find_by_id(now_filing.id) assert now_filing_json['filing']['noticeOfWithdrawal']['courtOrder']['orderDetails'] == final_now_filing.order_details - if withdrawal_pending or withdrawn_filing_status: - assert final_ia_filing.status == ia_filing.status - assert final_ia_filing.withdrawal_pending == ia_filing.withdrawal_pending - else: - assert final_ia_filing.status == Filing.Status.WITHDRAWN.value - assert final_ia_filing.withdrawal_pending == False + assert final_withdrawn_filing.status == Filing.Status.WITHDRAWN.value + assert final_withdrawn_filing.withdrawal_pending == False + assert final_withdrawn_filing.meta_data.get('withdrawnDate') diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_worker.py b/queue_services/entity-filer/tests/unit/test_worker/test_worker.py index 1b2b656da8..5a2188fd3e 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_worker.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_worker.py @@ -14,6 +14,8 @@ """The Test Suites to ensure that the worker is operating correctly.""" import copy import datetime +from datetime import timezone +from http import HTTPStatus import random from unittest.mock import patch @@ -26,12 +28,14 @@ from registry_schemas.example_data import ( ANNUAL_REPORT, CHANGE_OF_ADDRESS, + CONTINUATION_IN_FILING_TEMPLATE, CORRECTION_AR, FILING_HEADER, INCORPORATION_FILING_TEMPLATE, ) -from entity_filer.filing_processors.filing_components import create_party, create_role +from entity_queue_common.service_utils import QueueException +from entity_filer.filing_processors.filing_components import business_info, business_profile, create_party, create_role from entity_filer.worker import process_filing from tests.unit import ( COD_FILING, @@ -467,3 +471,42 @@ async def test_publish_event(): } mock_publish.publish.assert_called_with('entity.events', payload) + + +@pytest.mark.parametrize('test_name,withdrawal_pending,filing_status', [ + ('Process the Filing', False, 'PAID'), + ('Dont process the Filing', False, 'WITHDRAWN'), + ('Dont process the Filing', True, 'PAID'), + ('Dont process the Filing', True, 'WITHDRAWN'), +]) +async def test_process_filing_completed(app, session, mocker, test_name, withdrawal_pending, filing_status): + """Assert that an filling can be processed.""" + # vars + filing_type = 'continuationIn' + nr_identifier = 'NR 1234567' + next_corp_num = 'C0001095' + + filing = copy.deepcopy(CONTINUATION_IN_FILING_TEMPLATE) + filing['filing'][filing_type]['nameRequest']['nrNumber'] = nr_identifier + filing['filing'][filing_type]['nameTranslations'] = [{'name': 'ABCD Ltd.'}] + filing_rec = create_filing('123', filing) + effective_date = datetime.datetime.now(timezone.utc) + filing_rec.effective_date = effective_date + filing_rec._status = filing_status + filing_rec.withdrawal_pending = withdrawal_pending + filing_rec.save() + + # test + filing_msg = {'filing': {'id': filing_rec.id}} + + with patch.object(business_info, 'get_next_corp_num', return_value=next_corp_num): + with patch.object(business_profile, 'update_business_profile', return_value=HTTPStatus.OK): + if withdrawal_pending and filing_status != 'WITHDRAWN': + with pytest.raises(QueueException): + await process_filing(filing_msg, app) + else: + await process_filing(filing_msg, app) + + business = Business.find_by_identifier(next_corp_num) + if not withdrawal_pending and filing_status == 'PAID': + assert business.state == Business.State.ACTIVE From c1549ea0c8e83a51429193f44e6f276c5a4c76f9 Mon Sep 17 00:00:00 2001 From: flutistar Date: Mon, 10 Feb 2025 10:02:50 -0800 Subject: [PATCH 060/133] update validation --- legal-api/src/legal_api/constants.py | 8 ++++++-- .../legal_api/services/filings/validations/dissolution.py | 6 ++++-- .../filings/validations/incorporation_application.py | 7 ++++--- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/legal-api/src/legal_api/constants.py b/legal-api/src/legal_api/constants.py index 403cc32e13..25c41b17e2 100644 --- a/legal-api/src/legal_api/constants.py +++ b/legal-api/src/legal_api/constants.py @@ -19,8 +19,12 @@ BOB_DATE = '2019-03-08' class DocumentClassEnum(Enum): - CORP = 'CORP' + CORP = 'CORP' + COOP = 'COOP' class DocumentTypeEnum(Enum): CNTO = 'CNTO', - DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' \ No newline at end of file + DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' + CORP_AFFIDAVIT = 'CORP_AFFIDAVIT' + COOP_MEMORANDUM = 'COOP_MEMORANDUM' + COOP_RULES = 'COOP_RULES' \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/dissolution.py b/legal-api/src/legal_api/services/filings/validations/dissolution.py index 21bc9b2575..f762766756 100644 --- a/legal-api/src/legal_api/services/filings/validations/dissolution.py +++ b/legal-api/src/legal_api/services/filings/validations/dissolution.py @@ -22,7 +22,9 @@ from legal_api.errors import Error from legal_api.models import Address, Business, PartyRole -from .common_validations import validate_court_order, validate_pdf +from .common_validations import validate_court_order, validate_file_on_drs +from legal_api.constants import DocumentClassEnum + from ...utils import get_str # noqa: I003; needed as the linter gets confused from the babel override above. @@ -243,7 +245,7 @@ def validate_affidavit(filing_json, legal_type, dissolution_type) -> Optional[li return [{'error': _('A valid affidavit key is required.'), 'path': affidavit_file_key_path}] - return validate_pdf(affidavit_file_key, affidavit_file_key_path) + return validate_file_on_drs(DocumentClassEnum.CORP.value, affidavit_file_key, affidavit_file_key_path) return None diff --git a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py index dc33ed88eb..d540a68347 100644 --- a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py +++ b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py @@ -24,12 +24,13 @@ from legal_api.models import Business from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt +from legal_api.constants import DocumentClassEnum from .common_validations import ( # noqa: I001 validate_court_order, validate_name_request, validate_parties_names, - validate_pdf, + validate_file_on_drs, validate_share_structure, ) @@ -294,13 +295,13 @@ def validate_cooperative_documents(incorporation_json: dict): rules_file_key = cooperative['rulesFileKey'] rules_file_key_path = '/filing/incorporationApplication/cooperative/rulesFileKey' - rules_err = validate_pdf(rules_file_key, rules_file_key_path) + rules_err = validate_file_on_drs(DocumentClassEnum.COOP.value, rules_file_key, rules_file_key_path) if rules_err: msg.extend(rules_err) memorandum_file_key = cooperative['memorandumFileKey'] memorandum_file_key_path = '/filing/incorporationApplication/cooperative/memorandumFileKey' - memorandum_err = validate_pdf(memorandum_file_key, memorandum_file_key_path) + memorandum_err = validate_file_on_drs(DocumentClassEnum.COOP.value, memorandum_file_key, memorandum_file_key_path) if memorandum_err: msg.extend(memorandum_err) From 7172f81e42cdc70556337aba5cb5d12ddb245246 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Tue, 11 Feb 2025 03:06:43 +0800 Subject: [PATCH 061/133] 23352 fix_withdawn_date_value (#3213) * 23352 fix_withdawn_date_value * fix lint issue * update version --- colin-api/src/colin_api/version.py | 2 +- legal-api/src/legal_api/version.py | 2 +- .../src/entity_filer/filing_processors/notice_of_withdrawal.py | 3 +-- queue_services/entity-filer/src/entity_filer/version.py | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/colin-api/src/colin_api/version.py b/colin-api/src/colin_api/version.py index e139b6fe29..81c66b5c24 100644 --- a/colin-api/src/colin_api/version.py +++ b/colin-api/src/colin_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.139.0' # pylint: disable=invalid-name +__version__ = '2.140.0' # pylint: disable=invalid-name diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index afaf5d5c3f..d7d51345a7 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.139.0' # pylint: disable=invalid-name +__version__ = '2.140.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py index 5451e3d789..682a48bc3a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. """File processing rules and actions for the Notice of Withdrawal filing.""" -import datetime from typing import Dict from entity_queue_common.service_utils import logger @@ -42,5 +41,5 @@ def process( withdrawn_filing.withdrawal_pending = False withdrawn_filing_meta_data = withdrawn_filing.meta_data if withdrawn_filing.meta_data else {} withdrawn_filing._meta_data = {**withdrawn_filing_meta_data, # pylint: disable=protected-access - 'withdrawnDate': f'{datetime.datetime.utcnow()}'} + 'withdrawnDate': f'{filing_submission.effective_date.isoformat()}'} withdrawn_filing.save_to_session() diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index e139b6fe29..81c66b5c24 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.139.0' # pylint: disable=invalid-name +__version__ = '2.140.0' # pylint: disable=invalid-name From 66620526a9efbee63fffd39203caf7b2c44dca0d Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Mon, 10 Feb 2025 15:50:28 -0800 Subject: [PATCH 062/133] 23352 fix_get_business_type (#3216) --- queue_services/entity-filer/src/entity_filer/worker.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 1a0ab22ecd..5874a9d375 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -349,7 +349,8 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, filing_submission.transaction_id = transaction.id - business_type = business.legal_type if business else filing_submission['business']['legal_type'] + business_type = business.legal_type if business \ + else filing_submission.filing_json.get('filing', {}).get('business', {}).get('legalType') filing_submission.set_processed(business_type) business.last_modified = filing_submission.completion_date From 914b9989bde52af77e3475402d36d588b8a7e99c Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Tue, 11 Feb 2025 09:59:51 -0800 Subject: [PATCH 063/133] 24593 Tombstone pipeline - implement conversion (exclude liquidation, invol dissolution, cont out) (#3199) * 24593 - Tombstone - implement conversion (exclude liquidation, invol dissolution, cont out) Signed-off-by: Hongjing Chen * update legal-api for tombstone conversion Signed-off-by: Hongjing Chen * fix linting Signed-off-by: Hongjing Chen * fix linting Signed-off-by: Hongjing Chen * fix unit test Signed-off-by: Hongjing Chen * update processing status for corps skipped due to data collection error Signed-off-by: Hongjing Chen * update conversion filing_json and meta_data for changeOfName Signed-off-by: Hongjing Chen * add detailed name change info into conversion filing_json and meta_data Signed-off-by: Hongjing Chen * add comments Signed-off-by: Hongjing Chen * populate meta_data of name change and legal type change for alteration Signed-off-by: Hongjing Chen * fix linting Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- data-tool/flows/corps_tombstone_flow.py | 6 + .../flows/tombstone/tombstone_base_data.py | 3 +- .../flows/tombstone/tombstone_mappings.py | 56 ++++++--- .../flows/tombstone/tombstone_queries.py | 12 +- data-tool/flows/tombstone/tombstone_utils.py | 117 +++++++++++++----- ...b10576924c_alter_amalgamation_type_enum.py | 50 ++++++++ .../src/legal_api/models/amalgamation.py | 4 +- legal-api/src/legal_api/models/business.py | 3 +- legal-api/src/legal_api/models/filing.py | 17 +++ .../src/legal_api/models/jurisdiction.py | 12 +- .../legal_api/reports/business_document.py | 70 +++++++---- .../tests/unit/models/test_amalgamation.py | 14 ++- 12 files changed, 292 insertions(+), 72 deletions(-) create mode 100644 legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 762f723316..633421ac23 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -397,6 +397,12 @@ def tombstone_flow(): ) else: skipped += 1 + processing_service.update_corp_status( + flow_run_id, + corp_num, + ProcessingStatuses.FAILED, + error="Migration failed - Skip due to data collection error" + ) print(f'❗ Skip migrating {corp_num} due to data collection error.') wait(corp_futures) diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index 907ecd1cb0..757b0925dd 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -189,7 +189,8 @@ 'source': 'COLIN', 'colin_only': False, 'deletion_locked': False, - # TODO: new column - hide_in_ledger + 'hide_in_ledger': False, # TODO: double check when doing cleanup - dissolution (invol, admin) + # TODO: new columns for NoW # FK 'business_id': None, 'transaction_id': None, diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index b72dba851c..1aff238381 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -35,7 +35,6 @@ class EventFilings(str, Enum): FILE_AMLRC = 'FILE_AMLRC' FILE_AMLVC = 'FILE_AMLVC' - CONVAMAL_NULL = 'CONVAMAL_NULL' # TODO: re-map # Annual Report FILE_ANNBC = 'FILE_ANNBC' @@ -62,9 +61,25 @@ class EventFilings(str, Enum): FILE_CONTU = 'FILE_CONTU' FILE_CONTC = 'FILE_CONTC' - # Conversion + # Conversion Ledger FILE_CONVL = 'FILE_CONVL' + # Conversion + CONVAMAL_NULL = 'CONVAMAL_NULL' + CONVCIN_NULL = 'CONVCIN_NULL' + CONVCOUT_NULL = 'CONVCOUT_NULL' + CONVDS_NULL = 'CONVDS_NULL' + CONVDSF_NULL = 'CONVDSF_NULL' + CONVDSL_NULL = 'CONVDSL_NULL' + CONVDSO_NULL = 'CONVDSO_NULL' + CONVICORP_NULL = 'CONVICORP_NULL' + CONVID1_NULL = 'CONVID1_NULL' + CONVID2_NULL = 'CONVID2_NULL' + CONVILIQ_NULL = 'CONVILIQ_NULL' + CONVLRSTR_NULL = 'CONVLRSTR_NULL' + CONVNC_NULL = 'CONVNC_NULL' + CONVRSTR_NULL = 'CONVRSTR_NULL' + # Correction FILE_CO_AR = 'FILE_CO_AR' FILE_CO_BC = 'FILE_CO_BC' @@ -103,9 +118,6 @@ class EventFilings(str, Enum): FILE_ICORP = 'FILE_ICORP' FILE_ICORU = 'FILE_ICORU' FILE_ICORC = 'FILE_ICORC' - CONVICORP_NULL = 'CONVICORP_NULL' # TODO: re-map - - # TODO: Ledger - unsupported # TODO: Legacy Other - unsupported FILE_AM_PF = 'FILE_AM_PF' @@ -115,7 +127,7 @@ class EventFilings(str, Enum): # TODO: Liquidation - unsupported # FILE_ADCOL = 'FILE_ADCOL' - # Notice of Withdrawal + # TODO: Notice of Withdrawal - unsupported FILE_NWITH = 'FILE_NWITH' # Registrar's Notation @@ -140,7 +152,7 @@ class EventFilings(str, Enum): # TODO: # Other COLIN events: - # CONV*, Adim Corp (ADCORP, BNUPD, ADMIN), XPRO filing + # Adim Corp (ADCORP, BNUPD, ADMIN), XPRO filing # SYSDL, SYST # more legacyOther filings @@ -175,7 +187,6 @@ def has_value(cls, value): EventFilings.FILE_AMLHC: ['amalgamationApplication', 'horizontal'], EventFilings.FILE_AMLRC: ['amalgamationApplication', 'regular'], EventFilings.FILE_AMLVC: ['amalgamationApplication', 'vertical'], - EventFilings.CONVAMAL_NULL: ['amalgamationApplication', 'regular'], # TODO: re-map EventFilings.FILE_ANNBC: 'annualReport', @@ -197,6 +208,21 @@ def has_value(cls, value): EventFilings.FILE_CONVL: 'conversionLedger', + EventFilings.CONVAMAL_NULL: ['conversion', ('amalgamationApplication', 'unknown')], + EventFilings.CONVCIN_NULL: ['conversion', 'continuationIn'], + EventFilings.CONVCOUT_NULL: ['conversion', 'continuationOut'], # TODO: continuation out + EventFilings.CONVDS_NULL: ['conversion', ('dissolution', 'voluntary')], + EventFilings.CONVDSF_NULL: ['conversion', ('dissolution', 'involuntary')], + EventFilings.CONVDSL_NULL: 'conversion', # TODO: liquidation + EventFilings.CONVDSO_NULL: ['conversion', ('dissolution', 'unknown')], + EventFilings.CONVICORP_NULL: 'conversion', + EventFilings.CONVID1_NULL: 'conversion', # TODO: related to invol dissolution + EventFilings.CONVID2_NULL: 'conversion', # TODO: related to invol dissolution + EventFilings.CONVILIQ_NULL: 'conversion', # TODO: liquidation + EventFilings.CONVLRSTR_NULL: ['conversion', ('restoration', 'limitedRestoration')], + EventFilings.CONVNC_NULL: ['conversion', 'changeOfName'], + EventFilings.CONVRSTR_NULL: ['conversion', ('restoration', 'fullRestoration')], + EventFilings.FILE_CO_AR: 'correction', EventFilings.FILE_CO_BC: 'correction', EventFilings.FILE_CO_DI: 'correction', @@ -228,9 +254,7 @@ def has_value(cls, value): EventFilings.FILE_ICORP: 'incorporationApplication', EventFilings.FILE_ICORU: 'incorporationApplication', EventFilings.FILE_ICORC: 'incorporationApplication', - EventFilings.CONVICORP_NULL: 'incorporationApplication', # TODO: re-map - # TODO: Ledger - unsupported # TODO: Legacy Other - unsupported EventFilings.FILE_AM_PF: 'legacyOther', EventFilings.FILE_AM_PO: 'legacyOther', @@ -285,7 +309,6 @@ def has_value(cls, value): EventFilings.FILE_AMLHC: 'Amalgamation Application Short Form (Horizontal) for a Community Contribution Company', EventFilings.FILE_AMLRC: 'Amalgamation Application (Regular) for a Community Contribution Company', EventFilings.FILE_AMLVC: 'Amalgamation Application Short Form (Vertical) for a Community Contribution Company', - EventFilings.CONVAMAL_NULL: None, # TODO: re-map EventFilings.FILE_ANNBC: 'BC Annual Report', # has suffix of date, dynamically add it during formatting @@ -338,9 +361,7 @@ def has_value(cls, value): EventFilings.FILE_ICORP: 'Incorporation Application', EventFilings.FILE_ICORU: 'Incorporation Application for a BC Unlimited Liability Company', EventFilings.FILE_ICORC: 'Incorporation Application for a Community Contribution Company', - EventFilings.CONVICORP_NULL: None, # TODO: re-map - # TODO: Ledger - unsupported # TODO: Legacy Other - unsupported EventFilings.FILE_AM_PF: 'Amendment - Put Back Off', EventFilings.FILE_AM_PO: 'Amendment - Put Back On', @@ -393,7 +414,7 @@ def has_value(cls, value): 'changeOfDirectors': ['last_cod_date'], 'agmExtension': ['last_agm_date'], 'amalgamationApplication': ['last_coa_date', 'last_cod_date'], - # TODO: 'dissolution_date' - Amalgamating business, continuation out + # TODO: 'dissolution_date' - continuation out # TODO: 'continuation_out_date' - continuation out 'continuationIn': ['last_coa_date', 'last_cod_date'], 'dissolution': ['dissolution_date'], @@ -412,3 +433,10 @@ def has_value(cls, value): # ingore the following since we won't map to them # 'dissolved', 'restorationApplication', 'continuedOut' ] + + +LEGAL_TYPE_CHANGE_FILINGS = { + EventFilings.FILE_NOALB: ['ULC', 'BC'], + EventFilings.FILE_NOALU: ['BC', 'ULC'], + EventFilings.FILE_NOALC: ['BC', 'CC'], +} diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index cc6955bd5d..56cb190355 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -605,17 +605,25 @@ def get_filings_query(corp_num): u.email_addr as u_email_addr, u.role_typ_cd as u_role_typ_cd, --- conversion ledger - cl.ledger_title_txt as cl_ledger_title_txt + cl.ledger_title_txt as cl_ledger_title_txt, + -- conv event + to_char(ce.effective_dt at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as ce_effective_dt_str, + -- corp name change + cn_old.corp_name as old_corp_name, + cn_new.corp_name as new_corp_name from event e left outer join filing f on e.event_id = f.event_id left outer join filing_user u on u.event_id = e.event_id left outer join conv_ledger cl on cl.event_id = e.event_id + left outer join conv_event ce on e.event_id = ce.event_id + left outer join corp_name cn_old on e.event_id = cn_old.end_event_id + left outer join corp_name cn_new on e.event_id = cn_new.start_event_id where 1 = 1 and e.corp_num = '{corp_num}' -- and e.corp_num = 'BC0068889' -- and e.corp_num = 'BC0449924' -- AR, ADCORP -- and e.trigger_dts is not null - order by e.event_timerstamp + order by e.event_timerstamp, e.event_id ; """ return query diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 006613c597..5faf20a1ec 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -1,5 +1,4 @@ import copy -from decimal import Decimal import json from datetime import datetime, timezone from decimal import Decimal @@ -7,14 +6,16 @@ import pandas as pd import pytz from sqlalchemy import Connection, text -from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, FILING_JSON, - JURISDICTION, OFFICE, +from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, + FILING_JSON, JURISDICTION, OFFICE, PARTY, PARTY_ROLE, RESOLUTION, SHARE_CLASSES, USER) from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, EVENT_FILING_LEAR_TARGET_MAPPING, LEAR_FILING_BUSINESS_UPDATE_MAPPING, - LEAR_STATE_FILINGS, EventFilings) + LEAR_STATE_FILINGS, + LEGAL_TYPE_CHANGE_FILINGS, + EventFilings) unsupported_event_file_types = set() @@ -248,8 +249,8 @@ def format_jurisdictions_data(data: dict, event_id: Decimal) -> dict: formatted_jurisdiction['country'] = None formatted_jurisdiction['region'] = None - can_jurisdiction_code = jurisdiction_info['j_can_jur_typ_cd'] - other_jurisdiction_desc = jurisdiction_info['j_othr_juris_desc'] + can_jurisdiction_code = jurisdiction_info['j_can_jur_typ_cd'] or '' + other_jurisdiction_desc = jurisdiction_info['j_othr_juris_desc'] or '' # when canadian jurisdiction, ignore othr_juris_desc if can_jurisdiction_code != 'OT': @@ -278,19 +279,29 @@ def format_filings_data(data: dict) -> list[dict]: for x in filings_data: event_file_type = x['event_file_type'] # TODO: build a new complete filing event mapper (WIP) - filing_type, filing_subtype = get_target_filing_type(event_file_type) + raw_filing_type, raw_filing_subtype = get_target_filing_type(event_file_type) # skip the unsupported ones - if not filing_type: + if not raw_filing_type: print(f'❗ Skip event filing type: {event_file_type}') unsupported_event_file_types.add(event_file_type) continue + + # get converted filing_type and filing_subtype + if raw_filing_type == 'conversion': + if isinstance(raw_filing_subtype, tuple): + filing_type, filing_subtype = raw_filing_subtype + else: + filing_type = raw_filing_subtype + filing_subtype = None + raw_filing_subtype = None + else: + filing_type = raw_filing_type + filing_subtype = raw_filing_subtype - effective_date = x['f_effective_dt_str'] - if not effective_date: - effective_date = x['e_event_dt_str'] + effective_date = x['ce_effective_dt_str'] or x['f_effective_dt_str'] or x['e_event_dt_str'] trigger_date = x['e_trigger_dt_str'] - filing_json, meta_data = build_filing_json_meta_data(filing_type, filing_subtype, + filing_json, meta_data = build_filing_json_meta_data(raw_filing_type, filing_type, filing_subtype, effective_date, x) filing_body = copy.deepcopy(FILING['filings']) @@ -301,23 +312,31 @@ def format_filings_data(data: dict) -> list[dict]: if not (user_id := x['u_user_id']): user_id = x['u_full_name'] if x['u_full_name'] else None + if raw_filing_type == 'conversion' or raw_filing_subtype == 'involuntary': + hide_in_ledger = True + else: + hide_in_ledger = False + filing_body = { **filing_body, 'filing_date': effective_date, - 'filing_type': filing_type, - 'filing_sub_type': filing_subtype, + 'filing_type': raw_filing_type, + 'filing_sub_type': raw_filing_subtype, 'completion_date': effective_date, 'effective_date': effective_date, 'filing_json': filing_json, 'meta_data': meta_data, + 'hide_in_ledger': hide_in_ledger, 'submitter_id': user_id, # will be updated to real user_id when loading data into db } + # conversion still need to populate create-new-business info + # based on converted filing type if filing_type == 'continuationIn': jurisdiction = format_jurisdictions_data(data, x['e_event_id']) if filing_type == 'amalgamationApplication': - amalgamation = format_amalgamations_data(data, x['e_event_id']) + amalgamation = format_amalgamations_data(data, x['e_event_id'], effective_date, filing_subtype) comments = format_filing_comments_data(data, x['e_event_id']) @@ -348,7 +367,7 @@ def format_filings_data(data: dict) -> list[dict]: } -def format_amalgamations_data(data: dict, event_id: Decimal) -> dict: +def format_amalgamations_data(data: dict, event_id: Decimal, amalgamation_date: str, amalgamation_type: str) -> dict: amalgamations_data = data['amalgamations'] matched_amalgamations = [ @@ -359,18 +378,12 @@ def format_amalgamations_data(data: dict, event_id: Decimal) -> dict: return None formatted_amalgmation = copy.deepcopy(AMALGAMATION) - amalgmation_info = matched_amalgamations[0] + amalgamation_info = matched_amalgamations[0] - amalgmation_date = amalgmation_info['f_effective_dt_str'] - if not amalgmation_date: - amalgmation_date = amalgmation_info['e_event_dt_str'] - formatted_amalgmation['amalgamations']['amalgamation_date'] = amalgmation_date - formatted_amalgmation['amalgamations']['court_approval'] = bool(amalgmation_info['f_court_approval']) - - event_file_type = amalgmation_info['event_file_type'] - _, filing_subtype = get_target_filing_type(event_file_type) + formatted_amalgmation['amalgamations']['amalgamation_date'] = amalgamation_date + formatted_amalgmation['amalgamations']['court_approval'] = bool(amalgamation_info['f_court_approval']) - formatted_amalgmation['amalgamations']['amalgamation_type'] = filing_subtype + formatted_amalgmation['amalgamations']['amalgamation_type'] = amalgamation_type formatted_tings = formatted_amalgmation['amalgamating_businesses'] for ting in matched_amalgamations: formatted_tings.append(format_amalgamating_businesses(ting)) @@ -546,9 +559,11 @@ def get_business_update_value(key: str, effective_date: str, trigger_date: str, return value -def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective_date: str, data: dict) -> tuple[dict, dict]: +def build_filing_json_meta_data(raw_filing_type: str, filing_type: str, filing_subtype: str, effective_date: str, data: dict) -> tuple[dict, dict]: filing_json = copy.deepcopy(FILING_JSON) - filing_json['filing'][filing_type] = {} + filing_json['filing'][raw_filing_type] = {} + if raw_filing_type != filing_type: + filing_json['filing'][filing_type] = {} meta_data = { 'colinFilingInfo': { @@ -560,6 +575,38 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective 'colinDisplayName': get_colin_display_name(data) } + if raw_filing_type == 'conversion': + # will populate state filing info for conversion in the following steps + # based on converted filing type and converted filing subtype + if filing_type in LEAR_STATE_FILINGS: + state_change = True + else: + state_change = False + if filing_type == 'changeOfName': + name_change = True + filing_json['filing']['changeOfName'] = { + 'fromLegalName': data['old_corp_name'], + 'toLegalName': data['new_corp_name'], + } + meta_data['changeOfName'] = { + 'fromLegalName': data['old_corp_name'], + 'toLegalName': data['new_corp_name'], + } + else: + name_change = False + filing_json['filing']['conversion'] = { + 'convFilingType': filing_type, + 'convFilingSubType': filing_subtype, + 'stateChange': state_change, + 'nameChange': name_change, + } + meta_data['conversion'] = { + 'convFilingType': filing_type, + 'convFilingSubType': filing_subtype, + 'stateChange': state_change, + 'nameChange': name_change, + } + if filing_type == 'annualReport': meta_data['annualReport'] = { 'annualReportFilingYear': int(effective_date[:4]), @@ -587,6 +634,20 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective **filing_json['filing']['restoration'], 'type': filing_subtype, } + elif filing_type == 'alteration': + meta_data['alteration'] = {} + if (event_file_type := data['event_file_type']) in LEGAL_TYPE_CHANGE_FILINGS.keys(): + meta_data['alteration'] = { + **meta_data['alteration'], + 'fromLegalType': LEGAL_TYPE_CHANGE_FILINGS[event_file_type][0], + 'toLegalType': LEGAL_TYPE_CHANGE_FILINGS[event_file_type][1], + } + if (old_corp_name := data['old_corp_name']) and (new_corp_name := data['new_corp_name']): + meta_data['alteration'] = { + **meta_data['alteration'], + 'fromLegalName': old_corp_name, + 'toLegalName': new_corp_name, + } # TODO: populate meta_data for correction to display correct filing name return filing_json, meta_data diff --git a/legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py b/legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py new file mode 100644 index 0000000000..eeb169f797 --- /dev/null +++ b/legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py @@ -0,0 +1,50 @@ +"""alter_amalgamation_type_enum + +Revision ID: d0b10576924c +Revises: d9254d3cbbf4 +Create Date: 2025-02-03 21:47:05.061172 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd0b10576924c' +down_revision = 'd9254d3cbbf4' +branch_labels = None +depends_on = None + + +amalgamation_type_old_enum = postgresql.ENUM('regular', + 'vertical', + 'horizontal', + name='amalgamation_type_old') + + +def upgrade(): + with op.get_context().autocommit_block(): + op.execute("ALTER TYPE amalgamation_type ADD VALUE 'unknown'") + + +def downgrade(): + op.execute("UPDATE amalgamations SET amalgamation_type = 'regular' WHERE amalgamation_type = 'unknown'") + op.execute("UPDATE amalgamations_version SET amalgamation_type = 'regular' WHERE amalgamation_type = 'unknown'") + + amalgamation_type_old_enum.create(op.get_bind(), checkfirst=True) + + op.execute(""" + ALTER TABLE amalgamations + ALTER COLUMN amalgamation_type + TYPE amalgamation_type_old + USING amalgamation_type::text::amalgamation_type_old + """) + op.execute(""" + ALTER TABLE amalgamations_version + ALTER COLUMN amalgamation_type + TYPE amalgamation_type_old + USING amalgamation_type::text::amalgamation_type_old + """) + + op.execute("DROP TYPE amalgamation_type") + op.execute("ALTER TYPE amalgamation_type_old RENAME TO amalgamation_type") diff --git a/legal-api/src/legal_api/models/amalgamation.py b/legal-api/src/legal_api/models/amalgamation.py index 0b2ecc0ad3..d5eabc237b 100644 --- a/legal-api/src/legal_api/models/amalgamation.py +++ b/legal-api/src/legal_api/models/amalgamation.py @@ -37,6 +37,7 @@ class AmalgamationTypes(BaseEnum): regular = auto() vertical = auto() horizontal = auto() + unknown = auto() __versioned__ = {} __tablename__ = 'amalgamations' @@ -124,7 +125,8 @@ def get_revision_json(cls, transaction_id, business_id, tombstone=False): if tombstone: return { 'identifier': 'Not Available', - 'legalName': 'Not Available' + 'legalName': 'Not Available', + 'amalgamationDate': 'Not Available' } amalgamation = Amalgamation.get_revision(transaction_id, business_id) diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 0ca1500cc9..552266febb 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -790,7 +790,8 @@ def get_amalgamated_into(self) -> dict: else: return { 'identifier': 'Not Available', - 'legalName': 'Not Available' + 'legalName': 'Not Available', + 'amalgamationDate': 'Not Available' } return None diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 5b07081807..fed9ffea6d 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -996,6 +996,23 @@ def get_filings_by_types(business_id: int, filing_types): all() return filings + @staticmethod + def get_conversion_filings_by_conv_types(business_id: int, filing_types: list): + """Return the conversion filings of a particular conv type. + + Records only exist in some legacy corps imported from COLIN. + """ + filings = db.session.query(Filing). \ + filter(Filing.business_id == business_id). \ + filter(Filing._filing_type == 'conversion'). \ + filter( + Filing._meta_data.op('->')('conversion').op('->>')('convFilingType').in_(filing_types) + ). \ + order_by(desc(Filing.transaction_id)). \ + all() + + return filings + @staticmethod def get_incomplete_filings_by_types(business_id: int, filing_types: list, excluded_statuses: list = None): """Return the filings of particular types and statuses. diff --git a/legal-api/src/legal_api/models/jurisdiction.py b/legal-api/src/legal_api/models/jurisdiction.py index 1705fc4188..b7f7934d65 100644 --- a/legal-api/src/legal_api/models/jurisdiction.py +++ b/legal-api/src/legal_api/models/jurisdiction.py @@ -15,6 +15,7 @@ from __future__ import annotations from sql_versioning import Versioned +from sqlalchemy import and_, or_ from .db import db from .filing import Filing @@ -61,6 +62,15 @@ def get_continuation_in_jurisdiction(cls, business_id) -> Jurisdiction: # pylint: disable=protected-access jurisdiction = (db.session.query(Jurisdiction).join(Filing). filter(Jurisdiction.business_id == business_id). - filter(Filing._filing_type == 'continuationIn'). + filter( + or_( + Filing._filing_type == 'continuationIn', + and_( + Filing._filing_type == 'conversion', + Filing._meta_data.op('->')('conversion'). + op('->>')('convFilingType') == 'continuationIn' + ) + ) + ). one_or_none()) return jurisdiction diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index e806d380c6..c31801d6b8 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -327,6 +327,12 @@ def _set_business_state_changes(self, business: dict): 'continuationOut']): state_filings.append(self._format_state_filing(filing)) + # TODO: add conv liquidation etc. in the future work + for filing in Filing.get_conversion_filings_by_conv_types(self._business.id, ['dissolution', + 'continuationOut', + 'restoration']): + state_filings.append(self._format_state_filing(filing)) + # If it has linked amalgamating businesses # set placeholder info if this business is tombstone tombstone = self._business.is_tombstone @@ -404,40 +410,58 @@ def _set_business_changes(self, business: dict): 'Not Available') name_change_info['filingDateTime'] = filing.filing_date.isoformat() name_changes.append(name_change_info) + + # get name change info from conversion filing + for filing in Filing.get_conversion_filings_by_conv_types(self._business.id, ['changeOfName']): + filing_meta = filing.meta_data + name_change_info = {} + name_change_info['fromLegalName'] = filing_meta.get('changeOfName').get('fromLegalName', + 'Not Available') + name_change_info['toLegalName'] = filing_meta.get('changeOfName').get('toLegalName', + 'Not Available') + name_change_info['filingDateTime'] = filing.filing_date.isoformat() + name_changes.append(name_change_info) + business['nameChanges'] = name_changes business['alterations'] = alterations def _format_state_filing(self, filing: Filing) -> dict: """Format state change filing data.""" filing_info = {} + filing_meta = filing.meta_data + if filing.filing_type == 'conversion': + filing_type = filing_meta.get('conversion').get('convFilingType') + filing_sub_type = filing_meta.get('conversion').get('convFilingSubType') + else: + filing_type = filing.filing_type + filing_sub_type = filing.filing_sub_type - filing_info['filingType'] = filing.filing_type - filing_info['filingSubType'] = filing.filing_sub_type + filing_info['filingType'] = filing_type + filing_info['filingSubType'] = filing_sub_type filing_info['filingDateTime'] = filing.filing_date.isoformat() filing_info['effectiveDateTime'] = filing.effective_date.isoformat() - filing_meta = filing.meta_data - if filing.filing_type == 'dissolution': + if filing_type == 'dissolution': filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing.filing_type, + _get_summary_display_name(filing_type, filing_meta['dissolution']['dissolutionType'], self._business.legal_type) if self._business.legal_type in ['SP', 'GP'] and filing_meta['dissolution']['dissolutionType'] == \ 'voluntary': filing_info['dissolution_date_str'] = LegislationDatetime.as_legislation_timezone_from_date_str( filing.filing_json['filing']['dissolution']['dissolutionDate']).strftime(OUTPUT_DATE_FORMAT) - elif filing.filing_type == 'restoration': + elif filing_type == 'restoration': filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing.filing_type, - filing.filing_sub_type, + _get_summary_display_name(filing_type, + filing_sub_type, self._business.legal_type) - if filing.filing_sub_type in ['limitedRestoration', 'limitedRestorationExtension']: + if filing_sub_type in ['limitedRestoration', 'limitedRestorationExtension']: expiry_date = filing_meta['restoration']['expiry'] expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date) expiry_date = expiry_date.replace(minute=1) filing_info['limitedRestorationExpiryDate'] = LegislationDatetime.format_as_report_string(expiry_date) - elif filing.filing_type == 'continuationOut': - filing_info['filingName'] = BusinessDocument._get_summary_display_name(filing.filing_type, None, None) + elif filing_type == 'continuationOut': + filing_info['filingName'] = BusinessDocument._get_summary_display_name(filing_type, None, None) country_code = filing_meta['continuationOut']['country'] region_code = filing_meta['continuationOut']['region'] @@ -453,13 +477,15 @@ def _format_state_filing(self, filing: Filing) -> dict: filing_info['continuationOutDate'] = continuation_out_date.strftime(OUTPUT_DATE_FORMAT) else: filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing.filing_type, None, None) + _get_summary_display_name(filing_type, None, None) return filing_info def _set_amalgamation_details(self, business: dict): """Set amalgamation filing data.""" amalgamated_businesses = [] - filings = Filing.get_filings_by_types(self._business.id, ['amalgamationApplication']) + # get amalgamation info from either general filing or conversion filing + filings = Filing.get_filings_by_types(self._business.id, ['amalgamationApplication']) or \ + Filing.get_conversion_filings_by_conv_types(self._business.id, ['amalgamationApplication']) if filings: amalgamation_application = filings[0] business['business']['amalgamatedEntity'] = True @@ -519,7 +545,9 @@ def _set_liquidation_details(self, business: dict): def _set_continuation_in_details(self, business: dict): """Set continuation in filing data.""" continuation_in_info = {} - continuation_in_filing = Filing.get_filings_by_types(self._business.id, ['continuationIn']) + # get continuation in info from either general filing or conversion filing + continuation_in_filing = Filing.get_filings_by_types(self._business.id, ['continuationIn']) or \ + Filing.get_conversion_filings_by_conv_types(self._business.id, ['continuationIn']) if continuation_in_filing: continuation_in_filing = continuation_in_filing[0] jurisdiction = Jurisdiction.get_continuation_in_jurisdiction(continuation_in_filing.business_id) @@ -541,21 +569,16 @@ def _set_continuation_in_details(self, business: dict): jurisdiction_info = { 'id': jurisdiction.id, 'jurisdiction': location_jurisdiction, - 'identifier': jurisdiction.identifier, - 'legal_name': jurisdiction.legal_name, + 'identifier': jurisdiction.identifier or 'Not Available', + 'legal_name': jurisdiction.legal_name or 'Not Available', 'tax_id': jurisdiction.tax_id, 'incorporation_date': formatted_incorporation_date, 'expro_identifier': jurisdiction.expro_identifier, - 'expro_legal_name': jurisdiction.expro_legal_name, + 'expro_legal_name': jurisdiction.expro_legal_name or 'Not Available', 'business_id': jurisdiction.business_id, 'filing_id': jurisdiction.filing_id, } - # Imported from COLIN - if self._business.is_tombstone: - jurisdiction_info['expro_identifier'] = 'Not Available' - jurisdiction_info['expro_legal_name'] = 'Not Available' - continuation_in_info['foreignJurisdiction'] = jurisdiction_info business['continuationIn'] = continuation_in_info @@ -620,7 +643,8 @@ def _get_legal_type_description(legal_type: str) -> str: 'GP': 'Dissolution Application' }, 'involuntary': 'Involuntary Dissolution', - 'administrative': 'Administrative Dissolution' + 'administrative': 'Administrative Dissolution', + 'unknown': 'Dissolved' }, 'restorationApplication': 'Restoration Application', 'restoration': { diff --git a/legal-api/tests/unit/models/test_amalgamation.py b/legal-api/tests/unit/models/test_amalgamation.py index 52bc204737..7f2e0dda5d 100644 --- a/legal-api/tests/unit/models/test_amalgamation.py +++ b/legal-api/tests/unit/models/test_amalgamation.py @@ -104,11 +104,23 @@ def test_valid_amalgamation_save(session): amalgamation_3.save() + amalgamation_4 = Amalgamation( + amalgamation_type=Amalgamation.AmalgamationTypes.unknown, + business_id=b.id, + filing_id=filing.id, + amalgamation_date=datetime.utcnow(), + court_approval=True + ) + + amalgamation_4.save() + # verify assert amalgamation_1.id assert amalgamation_2.id assert amalgamation_3.id + assert amalgamation_4.id for type in Amalgamation.AmalgamationTypes: assert type in [Amalgamation.AmalgamationTypes.horizontal, Amalgamation.AmalgamationTypes.vertical, - Amalgamation.AmalgamationTypes.regular] + Amalgamation.AmalgamationTypes.regular, + Amalgamation.AmalgamationTypes.unknown] From f1f0a00cca2b6dda404200514e5cd322f31d31ce Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Tue, 11 Feb 2025 11:07:07 -0800 Subject: [PATCH 064/133] 23352 update now filer email trigger (#3217) * 23352 fix_get_business_type * 23352 update_now_filer_email_trigger * fix missing * add the test for temp buisness filing * clean up --- .../entity-filer/src/entity_filer/worker.py | 39 ++++-- .../test_worker/test_notice_of_withdrawal.py | 123 ++++++++++++++++++ .../tests/unit/test_worker/test_worker.py | 2 +- 3 files changed, 151 insertions(+), 13 deletions(-) create mode 100644 queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 5874a9d375..4d724378d5 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -108,6 +108,13 @@ def get_filing_types(legal_filings: dict): async def publish_event(business: Business, filing: Filing): """Publish the filing message onto the NATS filing subject.""" + temp_reg = filing.temp_reg + if filing.filing_type == FilingCore.FilingTypes.NOTICEOFWITHDRAWAL and filing.withdrawn_filing: + logger.debug('publish_event - notice of withdrawal filing: %s, withdrawan_filing: %s', + filing, filing.withdrawn_filing) + temp_reg = filing.withdrawn_filing.temp_reg + business_identifier = business.identifier if business else temp_reg + try: payload = { 'specversion': '1.x-wip', @@ -115,25 +122,25 @@ async def publish_event(business: Business, filing: Filing): 'source': ''.join([ APP_CONFIG.LEGAL_API_URL, '/business/', - business.identifier, + business_identifier, '/filing/', str(filing.id)]), 'id': str(uuid.uuid4()), 'time': datetime.utcnow().isoformat(), 'datacontenttype': 'application/json', - 'identifier': business.identifier, + 'identifier': business_identifier, 'data': { 'filing': { 'header': {'filingId': filing.id, 'effectiveDate': filing.effective_date.isoformat() }, - 'business': {'identifier': business.identifier}, + 'business': {'identifier': business_identifier}, 'legalFilings': get_filing_types(filing.filing_json) } } } - if filing.temp_reg: - payload['tempidentifier'] = filing.temp_reg + if temp_reg: + payload['tempidentifier'] = temp_reg subject = APP_CONFIG.ENTITY_EVENT_PUBLISH_OPTIONS['subject'] await qsm.service.publish(subject, payload) @@ -144,6 +151,13 @@ async def publish_event(business: Business, filing: Filing): def publish_gcp_queue_event(business: Business, filing: Filing): """Publish the filing message onto the GCP-QUEUE filing subject.""" + temp_reg = filing.temp_reg + if filing.filing_type == FilingCore.FilingTypes.NOTICEOFWITHDRAWAL and filing.withdrawn_filing: + logger.debug('publish_event - notice of withdrawal filing: %s, withdrawan_filing: %s', + filing, filing.withdrawn_filing) + temp_reg = filing.withdrawn_filing.temp_reg + business_identifier = business.identifier if business else temp_reg + try: subject = APP_CONFIG.BUSINESS_EVENTS_TOPIC data = { @@ -152,20 +166,20 @@ def publish_gcp_queue_event(business: Business, filing: Filing): 'filingId': filing.id, 'effectiveDate': filing.effective_date.isoformat() }, - 'business': {'identifier': business.identifier}, + 'business': {'identifier': business_identifier}, 'legalFilings': get_filing_types(filing.filing_json) }, - 'identifier': business.identifier + 'identifier': business_identifier } - if filing.temp_reg: - data['tempidentifier'] = filing.temp_reg + if temp_reg: + data['tempidentifier'] = temp_reg ce = SimpleCloudEvent( id=str(uuid.uuid4()), source=''.join([ APP_CONFIG.LEGAL_API_URL, '/business/', - business.identifier, + business_identifier, '/filing/', str(filing.id)]), subject=subject, @@ -352,13 +366,14 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, business_type = business.legal_type if business \ else filing_submission.filing_json.get('filing', {}).get('business', {}).get('legalType') filing_submission.set_processed(business_type) - business.last_modified = filing_submission.completion_date + if business: + business.last_modified = filing_submission.completion_date + db.session.add(business) filing_submission._meta_data = json.loads( # pylint: disable=W0212 json.dumps(filing_meta.asjson, default=json_serial) ) - db.session.add(business) db.session.add(filing_submission) db.session.commit() diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py b/queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py new file mode 100644 index 0000000000..9e8b669771 --- /dev/null +++ b/queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py @@ -0,0 +1,123 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Notice Of Withdrawal filing.""" +import copy +import datetime +import random +import pytest +from unittest.mock import patch +from freezegun import freeze_time + +from legal_api.models import Filing, Business +from legal_api.services import RegistrationBootstrapService +from registry_schemas.example_data import ALTERATION, FILING_HEADER, INCORPORATION, NOTICE_OF_WITHDRAWAL + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import notice_of_withdrawal +from entity_filer.worker import process_filing, APP_CONFIG, get_filing_types, publish_event, qsm +from tests.unit import create_business, create_filing + + +@pytest.mark.parametrize('test_name,filing_type,filing_template,identifier', [ + ('IA Withdrawn Filing', 'incorporationApplication', INCORPORATION, 'TJO4XI2qMo'), + ('alteration Withdrawn Filing', 'alteration', ALTERATION, 'BC1234567') +]) +async def test_worker_notice_of_withdrawal(app, session, test_name, filing_type, filing_template, identifier): + """Assert that the notice of withdrawal filing processes correctly.""" + import uuid + from unittest.mock import AsyncMock + from legal_api.utils.datetime import datetime as legal_datatime + # Setup + payment_id = str(random.SystemRandom().getrandbits(0x58)) + + # Create withdrawn_filing + withdrawn_filing_json = copy.deepcopy(FILING_HEADER) + withdrawn_filing_json['filing']['business']['legalType'] = 'BC' + withdrawn_filing_json['filing']['business']['identifier'] = identifier + withdrawn_filing_json['filing'][filing_type] = copy.deepcopy(filing_template) + if identifier.startswith('T'): + business = RegistrationBootstrapService.create_bootstrap(account=28) + withdrawn_filing = create_filing(token=payment_id, json_filing=withdrawn_filing_json, bootstrap_id=business.identifier) + else: + business = create_business(identifier, legal_type='BC') + withdrawn_filing = create_filing(payment_id, withdrawn_filing_json, business_id=business.id) + withdrawn_filing._filing_type = filing_type + withdrawn_filing.payment_completion_date = datetime.datetime.utcnow() # for setting the filing status PAID + withdrawn_filing._meta_data = {} + withdrawn_filing.save() + + # Create NoW filing + now_filing_json = copy.deepcopy(FILING_HEADER) + now_filing_json['filing']['business']['identifier'] = business.identifier + now_filing_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + now_filing_json['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing.id + now_filing = create_filing(payment_id, now_filing_json) + now_filing._filing_type = 'noticeOfWithdrawal' + if not identifier.startswith('T'): + now_filing.business_id = business.id + now_filing.withdrawn_filing_id = withdrawn_filing.id + now_filing.save() + + assert withdrawn_filing.status == Filing.Status.PAID.value + + # Test + filing_msg = {'filing': {'id': now_filing.id}} + await process_filing(filing_msg, app) + business.save() + + # Check NoW filing process results + final_withdrawn_filing = Filing.find_by_id(withdrawn_filing.id) + final_now_filing = Filing.find_by_id(now_filing.id) + + assert now_filing_json['filing']['noticeOfWithdrawal']['courtOrder']['orderDetails'] == final_now_filing.order_details + assert final_withdrawn_filing.status == Filing.Status.WITHDRAWN.value + assert final_withdrawn_filing.withdrawal_pending == False + assert final_withdrawn_filing.meta_data.get('withdrawnDate') + + # Test the publish_event + mock_publish = AsyncMock() + qsm.service = mock_publish + with freeze_time(legal_datatime.utcnow()), \ + patch.object(uuid, 'uuid4', return_value=1): + + final_business = Business.find_by_internal_id(final_now_filing.business_id) + await publish_event(final_business, final_now_filing) + payload = { + 'specversion': '1.x-wip', + 'type': 'bc.registry.business.' + final_now_filing.filing_type, + 'source': ''.join( + [APP_CONFIG.LEGAL_API_URL, + '/business/', + business.identifier, + '/filing/', + str(final_now_filing.id)]), + 'id': str(uuid.uuid4()), + 'time': legal_datatime.utcnow().isoformat(), + 'datacontenttype': 'application/json', + 'identifier': business.identifier, + 'data': { + 'filing': { + 'header': {'filingId': final_now_filing.id, + 'effectiveDate': final_now_filing.effective_date.isoformat() + }, + 'business': {'identifier': business.identifier}, + 'legalFilings': get_filing_types(final_now_filing.filing_json) + } + } + } + + if identifier.startswith('T'): + payload['tempidentifier'] = business.identifier + + mock_publish.publish.assert_called_with('entity.events', payload) diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_worker.py b/queue_services/entity-filer/tests/unit/test_worker/test_worker.py index 5a2188fd3e..9823e9d802 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_worker.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_worker.py @@ -479,7 +479,7 @@ async def test_publish_event(): ('Dont process the Filing', True, 'PAID'), ('Dont process the Filing', True, 'WITHDRAWN'), ]) -async def test_process_filing_completed(app, session, mocker, test_name, withdrawal_pending, filing_status): +async def test_skip_process_filing(app, session, mocker, test_name, withdrawal_pending, filing_status): """Assert that an filling can be processed.""" # vars filing_type = 'continuationIn' From 5f9e8d0f1b7c08253c82b30f894bc48ee374325a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Tue, 11 Feb 2025 13:12:39 -0800 Subject: [PATCH 065/133] Update version.py (#3220) - updated version to 2.141.0 --- legal-api/src/legal_api/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index d7d51345a7..f0ae7ce1af 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.140.0' # pylint: disable=invalid-name +__version__ = '2.141.0' # pylint: disable=invalid-name From 652d5ef9f827b7a6d8e3de15c2ebc28b72382f2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Tue, 11 Feb 2025 13:13:24 -0800 Subject: [PATCH 066/133] Update version.py (#3219) - updated version to 2.141.0 --- queue_services/entity-filer/src/entity_filer/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index 81c66b5c24..8b52684ce8 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.140.0' # pylint: disable=invalid-name +__version__ = '2.141.0' # pylint: disable=invalid-name From ac586807145a0a65f7ba6a4df7e5ebc698b542b3 Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Fri, 21 Feb 2025 01:08:01 +0000 Subject: [PATCH 067/133] 22998 Merge DB versioning feature branch into main (#3240) * 22943 update models to use new db_versioning (#3084) * 22943 update models to use new db_versioning * update colin_sync * 22947 - Update entity-bn to use the new db versioning (#3095) * Update entity-bn to use the new db versioning * 22948 - update emailer to use new db versioning (#3092) * enable emailer new db versioning * add db-versioning feature flags * update legal-api installation from the feature branch before merged to main * update unit tests configs * 22952 update furnishing job use versioning proxy (#3100) * 22952 update furnishings-job to use the versioning proxy * update ff value * 22954 - Verify future-effective-filings job to use new db versioning (#3104) * Remove future-effective-job flag * 22956 - Verify update legal filings job to use new db versioning (#3105) * update requirements for legal-api and schemas * remove update-legal-filings-job FFs for db-versioning * 22955 - Verify update-colin-filings job to use new db versioning (#3112) * Remove update-colin-filings-job flag * Fix: handle resolutions in versioned business objects * 22953 - verify email reminder job new db versioning (#3111) * update requirements * update to use new db versioning * update FFs value in other services * bump up attrs version to make it compatible * 22950 update dissolution jobs use new db versioning (#3110) * 22950 update dissolution_jobs use new db versioning * fix FF db versioning values * 22945 Filer - Updated to use new db versioning (#3113) * 22949 update digital credentials to use new versioning (#3117) * 22949 update digital credentials to use new versioning * update unit test, fix lint issue * 22943 update models to use new db_versioning (#3084) * 22943 update models to use new db_versioning * update colin_sync * 22947 - Update entity-bn to use the new db versioning (#3095) * Update entity-bn to use the new db versioning * 22948 - update emailer to use new db versioning (#3092) * enable emailer new db versioning * add db-versioning feature flags * update legal-api installation from the feature branch before merged to main * update unit tests configs * 22952 update furnishing job use versioning proxy (#3100) * 22952 update furnishings-job to use the versioning proxy * update ff value * 22954 - Verify future-effective-filings job to use new db versioning (#3104) * Remove future-effective-job flag * 22956 - Verify update legal filings job to use new db versioning (#3105) * update requirements for legal-api and schemas * remove update-legal-filings-job FFs for db-versioning * 22955 - Verify update-colin-filings job to use new db versioning (#3112) * Remove update-colin-filings-job flag * Fix: handle resolutions in versioned business objects * 22953 - verify email reminder job new db versioning (#3111) * update requirements * update to use new db versioning * update FFs value in other services * bump up attrs version to make it compatible * 22950 update dissolution jobs use new db versioning (#3110) * 22950 update dissolution_jobs use new db versioning * fix FF db versioning values * 22945 Filer - Updated to use new db versioning (#3113) * 22949 update digital credentials to use new versioning (#3117) * 22949 update digital credentials to use new versioning * update unit test, fix lint issue * 24754 Db Versioning - Update one-to-one relationships and directly query list-type relationships (#3214) * Update sql_versioning * Fix linting * Add relationship builder to versioning * Remove workaround * Update to test dynamic relationship * Update sql_versioning imports to feature branch * 22943 update models to use new db_versioning (#3084) * 22943 update models to use new db_versioning * update colin_sync * 22947 - Update entity-bn to use the new db versioning (#3095) * Update entity-bn to use the new db versioning * 22948 - update emailer to use new db versioning (#3092) * enable emailer new db versioning * add db-versioning feature flags * update legal-api installation from the feature branch before merged to main * update unit tests configs * 22952 update furnishing job use versioning proxy (#3100) * 22952 update furnishings-job to use the versioning proxy * update ff value * 22954 - Verify future-effective-filings job to use new db versioning (#3104) * Remove future-effective-job flag * 22956 - Verify update legal filings job to use new db versioning (#3105) * update requirements for legal-api and schemas * remove update-legal-filings-job FFs for db-versioning * 22955 - Verify update-colin-filings job to use new db versioning (#3112) * Remove update-colin-filings-job flag * Fix: handle resolutions in versioned business objects * 22953 - verify email reminder job new db versioning (#3111) * update requirements * update to use new db versioning * update FFs value in other services * bump up attrs version to make it compatible * 22950 update dissolution jobs use new db versioning (#3110) * 22950 update dissolution_jobs use new db versioning * fix FF db versioning values * 22945 Filer - Updated to use new db versioning (#3113) * 22949 update digital credentials to use new versioning (#3117) * 22949 update digital credentials to use new versioning * update unit test, fix lint issue * 22943 update models to use new db_versioning (#3084) * 22943 update models to use new db_versioning * update colin_sync * 24754 Db Versioning - Update one-to-one relationships and directly query list-type relationships (#3214) * Update sql_versioning * Fix linting * Add relationship builder to versioning * Remove workaround * Update to test dynamic relationship * Update sql_versioning imports to feature branch * 23352 update now filer email trigger (#3217) * 23352 fix_get_business_type * 23352 update_now_filer_email_trigger * fix missing * add the test for temp buisness filing * clean up * Update sql_versioning * Fix linting * Add relationship builder to versioning * Remove workaround * Update to test dynamic relationship * Get versioned relationship updates working * Remove duplicate test * Fix imports * Update makefile * Get remove function working for db versioned relationships * Fix lint * Fix function param typo * 24628 - update a work around to get resolution versioning data (#3239) * update a work around to get resolution versioning data when sync colin * Point requirements back to main --------- Co-authored-by: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Co-authored-by: Aimee Co-authored-by: EasonPan --- jobs/email-reminder/config.py | 2 + jobs/email-reminder/email_reminder.py | 5 +- jobs/email-reminder/flags.json | 15 + jobs/email-reminder/requirements.txt | 4 +- jobs/furnishings/flags.json | 13 +- jobs/furnishings/src/furnishings/config.py | 2 + jobs/furnishings/src/furnishings/worker.py | 5 +- jobs/furnishings/tests/unit/__init__.py | 7 +- jobs/involuntary-dissolutions/config.py | 1 + jobs/involuntary-dissolutions/flags.json | 13 +- .../involuntary_dissolutions.py | 3 +- jobs/update-legal-filings/Makefile | 6 +- jobs/update-legal-filings/requirements.txt | 10 +- legal-api/flags.json | 19 +- .../legal_api/models/amalgamating_business.py | 8 +- .../src/legal_api/models/amalgamation.py | 7 +- legal-api/src/legal_api/models/business.py | 10 +- legal-api/src/legal_api/models/db.py | 10 +- legal-api/src/legal_api/models/office.py | 2 +- .../resources/v2/business/colin_sync.py | 40 +- .../services/business_details_version.py | 35 +- legal-api/tests/unit/models/__init__.py | 15 +- legal-api/tests/unit/models/test_business.py | 7 +- legal-api/tests/unit/models/test_filing.py | 36 +- legal-api/tests/unit/reports/test_report.py | 5 +- .../sql-versioning/sql_versioning/__init__.py | 4 +- .../sql_versioning/expression_reflector.py | 46 +++ .../sql_versioning/relationship_builder.py | 381 ++++++++++++++++++ .../sql-versioning/sql_versioning/utils.py | 59 +++ .../sql_versioning/versioning.py | 56 ++- .../common/sql-versioning/tests/__init__.py | 82 ++++ .../sql-versioning/tests/test_versioning.py | 135 +++++-- queue_services/entity-bn/devops/vaults.json | 3 +- queue_services/entity-bn/flags.json | 14 + .../bn_processors/change_of_registration.py | 8 +- .../entity-bn/src/entity_bn/config.py | 2 + .../entity-bn/src/entity_bn/worker.py | 9 +- .../entity-digital-credentials/flags.json | 14 + .../requirements.txt | 2 +- .../src/entity_digital_credentials/config.py | 2 + .../src/entity_digital_credentials/worker.py | 4 +- .../tests/unit/__init__.py | 7 +- queue_services/entity-emailer/flags.json | 14 +- .../entity-emailer/requirements.txt | 2 +- .../src/entity_emailer/config.py | 2 + .../src/entity_emailer/worker.py | 6 +- .../entity-emailer/tests/unit/__init__.py | 42 +- .../entity-emailer/tracker/config.py | 4 +- queue_services/entity-filer/flags.json | 12 +- .../entity-filer/src/entity_filer/config.py | 1 + .../entity-filer/src/entity_filer/worker.py | 12 +- .../entity-filer/tests/unit/__init__.py | 7 +- .../filing_components/test_shares.py | 9 + 53 files changed, 994 insertions(+), 225 deletions(-) create mode 100644 jobs/email-reminder/flags.json create mode 100644 python/common/sql-versioning/sql_versioning/expression_reflector.py create mode 100644 python/common/sql-versioning/sql_versioning/relationship_builder.py create mode 100644 python/common/sql-versioning/sql_versioning/utils.py create mode 100644 queue_services/entity-bn/flags.json create mode 100644 queue_services/entity-digital-credentials/flags.json diff --git a/jobs/email-reminder/config.py b/jobs/email-reminder/config.py index e835295b49..032d9cfefc 100644 --- a/jobs/email-reminder/config.py +++ b/jobs/email-reminder/config.py @@ -44,6 +44,8 @@ def get_named_config(config_name: str = 'production'): class _Config(object): # pylint: disable=too-few-public-methods """Base class configuration.""" + # used to identify versioning flag + SERVICE_NAME = 'emailer-reminder-job' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SEND_OUTSTANDING_BCOMPS = os.getenv('SEND_OUTSTANDING_BCOMPS', None) diff --git a/jobs/email-reminder/email_reminder.py b/jobs/email-reminder/email_reminder.py index 8436b1ba58..8589665582 100644 --- a/jobs/email-reminder/email_reminder.py +++ b/jobs/email-reminder/email_reminder.py @@ -20,6 +20,7 @@ import requests import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from flask import Flask +from legal_api import init_db from legal_api.models import Business, Filing, db # noqa: I001 from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags @@ -30,6 +31,8 @@ import config # pylint: disable=import-error from utils.logging import setup_logging # pylint: disable=import-error + + # noqa: I003 setup_logging( @@ -46,7 +49,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(config.CONFIGURATION[run_mode]) - db.init_app(app) + init_db(app) # Configure Sentry if app.config.get('SENTRY_DSN', None): diff --git a/jobs/email-reminder/flags.json b/jobs/email-reminder/flags.json new file mode 100644 index 0000000000..147cc34398 --- /dev/null +++ b/jobs/email-reminder/flags.json @@ -0,0 +1,15 @@ +{ + "flagValues": { + "enable-bc-ccc-ulc": false, + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": true, + "emailer-reminder-job": true + } + } +} \ No newline at end of file diff --git a/jobs/email-reminder/requirements.txt b/jobs/email-reminder/requirements.txt index 3dce2c6323..8585b8ce43 100644 --- a/jobs/email-reminder/requirements.txt +++ b/jobs/email-reminder/requirements.txt @@ -7,7 +7,7 @@ Werkzeug==1.0.1 aniso8601==9.0.1 asyncio-nats-client==0.11.4 asyncio-nats-streaming==0.4.0 -attrs==20.3.0 +attrs==23.1.0 blinker==1.4 certifi==2020.12.5 click==7.1.2 @@ -29,4 +29,4 @@ six==1.15.0 urllib3==1.26.11 git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning -git+https://github.com/bcgov/business-schemas.git@2.15.38#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.31#egg=registry_schemas diff --git a/jobs/furnishings/flags.json b/jobs/furnishings/flags.json index 72c489d6cf..fbca79a275 100644 --- a/jobs/furnishings/flags.json +++ b/jobs/furnishings/flags.json @@ -2,6 +2,17 @@ "flagValues": { "enable-involuntary-dissolution": true, "disable-dissolution-sftp-bcmail": true, - "disable-dissolution-sftp-bclaws": false + "disable-dissolution-sftp-bclaws": false, + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": true, + "emailer-reminder-job": true, + "future-effective-job": false + } } } diff --git a/jobs/furnishings/src/furnishings/config.py b/jobs/furnishings/src/furnishings/config.py index 2056481993..9100da6bc4 100644 --- a/jobs/furnishings/src/furnishings/config.py +++ b/jobs/furnishings/src/furnishings/config.py @@ -45,6 +45,8 @@ def get_named_config(config_name: str = 'production'): class _Config: # pylint: disable=too-few-public-methods """Base class configuration.""" + # used to identify versioning flag + SERVICE_NAME = 'furnishings-job' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' diff --git a/jobs/furnishings/src/furnishings/worker.py b/jobs/furnishings/src/furnishings/worker.py index e10d4e267b..244dbc0a04 100644 --- a/jobs/furnishings/src/furnishings/worker.py +++ b/jobs/furnishings/src/furnishings/worker.py @@ -20,7 +20,8 @@ import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from croniter import croniter from flask import Flask -from legal_api.models import Configuration, db +from legal_api import init_db +from legal_api.models import Configuration from legal_api.services.flags import Flags from legal_api.services.queue import QueueService from sentry_sdk.integrations.logging import LoggingIntegration @@ -44,7 +45,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(get_named_config(run_mode)) - db.init_app(app) + init_db(app) # Configure Sentry if app.config.get('SENTRY_DSN', None): diff --git a/jobs/furnishings/tests/unit/__init__.py b/jobs/furnishings/tests/unit/__init__.py index 3c06391fa8..3c8e4a0701 100644 --- a/jobs/furnishings/tests/unit/__init__.py +++ b/jobs/furnishings/tests/unit/__init__.py @@ -21,7 +21,7 @@ from legal_api.models import Address, Batch, BatchProcessing, Business, Filing, Furnishing, db from legal_api.models.colin_event_id import ColinEventId -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=datetime.timezone.utc) @@ -115,9 +115,8 @@ def factory_completed_filing(business, filing._filing_sub_type = filing_sub_type filing.save() - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.payment_token = payment_token filing.effective_date = filing_date filing.payment_completion_date = filing_date diff --git a/jobs/involuntary-dissolutions/config.py b/jobs/involuntary-dissolutions/config.py index 6eda133991..3d31862e48 100644 --- a/jobs/involuntary-dissolutions/config.py +++ b/jobs/involuntary-dissolutions/config.py @@ -45,6 +45,7 @@ def get_named_config(config_name: str = 'production'): class _Config(object): # pylint: disable=too-few-public-methods """Base class configuration.""" + SERVICE_NAME = 'dissolutions-job' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' diff --git a/jobs/involuntary-dissolutions/flags.json b/jobs/involuntary-dissolutions/flags.json index 28dff8c224..75f4418e0b 100644 --- a/jobs/involuntary-dissolutions/flags.json +++ b/jobs/involuntary-dissolutions/flags.json @@ -2,6 +2,17 @@ "flagValues": { "enable-involuntary-dissolution": true, "disable-dissolution-sftp-bcmail": false, - "disable-dissolution-sftp-bclaws": false + "disable-dissolution-sftp-bclaws": false, + "db-versioning": { + "legal-api": true, + "emailer": false, + "filer": false, + "entity-bn": false, + "digital-credentials": false, + "dissolutions-job": true, + "furnishings-job": false, + "emailer-reminder-job": false, + "update-colin-filings-job": false + } } } diff --git a/jobs/involuntary-dissolutions/involuntary_dissolutions.py b/jobs/involuntary-dissolutions/involuntary_dissolutions.py index 54c5bd9fee..a4fda9ced3 100644 --- a/jobs/involuntary-dissolutions/involuntary_dissolutions.py +++ b/jobs/involuntary-dissolutions/involuntary_dissolutions.py @@ -21,6 +21,7 @@ import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from croniter import croniter from flask import Flask +from legal_api import init_db from legal_api.core.filing import Filing as CoreFiling from legal_api.models import Batch, BatchProcessing, Business, Configuration, Filing, Furnishing, db # noqa: I001 from legal_api.services.filings.validations.dissolution import DissolutionTypes @@ -52,7 +53,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(config.CONFIGURATION[run_mode]) - db.init_app(app) + init_db(app) # Configure Sentry if app.config.get('SENTRY_DSN', None): diff --git a/jobs/update-legal-filings/Makefile b/jobs/update-legal-filings/Makefile index 10a2e9baac..5510d8b979 100644 --- a/jobs/update-legal-filings/Makefile +++ b/jobs/update-legal-filings/Makefile @@ -39,7 +39,7 @@ clean-test: ## clean test files build-req: clean ## Upgrade requirements test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ + pip install --upgrade pip ;\ pip install -Ur requirements/prod.txt ;\ pip freeze | sort > requirements.txt ;\ cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ @@ -48,7 +48,7 @@ build-req: clean ## Upgrade requirements install: clean ## Install python virtual environment test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ + pip install --upgrade pip ;\ pip install -Ur requirements.txt install-dev: ## Install local application @@ -135,7 +135,7 @@ tag: push ## tag image ################################################################################# run: ## Run the project in local - . venv/bin/activate && python notebookreport.py + . venv/bin/activate && python update_legal_filings.py ################################################################################# # Self Documenting Commands # diff --git a/jobs/update-legal-filings/requirements.txt b/jobs/update-legal-filings/requirements.txt index 882d9aeca9..a6922ab6e2 100644 --- a/jobs/update-legal-filings/requirements.txt +++ b/jobs/update-legal-filings/requirements.txt @@ -5,20 +5,20 @@ Jinja2==2.11.3 MarkupSafe==1.1.1 Werkzeug==1.0.1 aniso8601==9.0.1 -attrs==20.3.0 +attrs==23.1.0 blinker==1.4 certifi==2020.12.5 -click==7.1.2 +click==8.1.3 ecdsa==0.14.1 flask-jwt-oidc==0.3.0 gunicorn==20.1.0 itsdangerous==1.1.0 -jsonschema==3.2.0 +jsonschema==4.19.0 pyasn1==0.4.8 pyrsistent==0.17.3 python-dotenv==0.17.1 python-jose==3.2.0 -pytz==2021.1 +pytz==2024.1 rsa==4.7.2 sentry-sdk==1.20.0 six==1.15.0 @@ -30,4 +30,4 @@ protobuf==3.15.8 git+https://github.com/bcgov/lear.git#subdirectory=colin-api git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning -git+https://github.com/bcgov/business-schemas.git@2.5.12#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas diff --git a/legal-api/flags.json b/legal-api/flags.json index 6f0d1d71ca..3ca7983767 100644 --- a/legal-api/flags.json +++ b/legal-api/flags.json @@ -12,17 +12,14 @@ "exclude-accounts": [] }, "db-versioning": { - "legal-api": false, - "emailer": false, - "filer": false, - "entity-bn": false, - "digital-credentials": false, - "dissolutions-job": false, - "furnishings-job": false, - "emailer-reminder-job": false, - "future-effective-job": false, - "update-colin-filings-job": false, - "update-legal-filings-job": false + "legal-api": true, + "emailer": true, + "filer": true, + "entity-bn": true, + "digital-credentials": true, + "dissolutions-job": true, + "furnishings-job": true, + "emailer-reminder-job": true } } } diff --git a/legal-api/src/legal_api/models/amalgamating_business.py b/legal-api/src/legal_api/models/amalgamating_business.py index d719bf8360..97d1b08213 100644 --- a/legal-api/src/legal_api/models/amalgamating_business.py +++ b/legal-api/src/legal_api/models/amalgamating_business.py @@ -20,10 +20,9 @@ from sql_versioning import Versioned from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from ..utils.base import BaseEnum -from .db import db +from .db import db, VersioningProxy # noqa: I001 class AmalgamatingBusiness(db.Model, Versioned): # pylint: disable=too-many-instance-attributes @@ -62,7 +61,7 @@ def save(self): def get_revision(cls, transaction_id, amalgamation_id): """Get amalgamating businesses for the given transaction id.""" # pylint: disable=singleton-comparison; - amalgamating_businesses_version = version_class(AmalgamatingBusiness) + amalgamating_businesses_version = VersioningProxy.version_class(db.session(), AmalgamatingBusiness) amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ .filter(amalgamating_businesses_version.transaction_id <= transaction_id) \ .filter(amalgamating_businesses_version.operation_type == 0) \ @@ -92,9 +91,10 @@ def get_all_revision(cls, business_id, tombstone=False): .filter(AmalgamatingBusiness.business_id == business_id) \ .all() else: - amalgamating_businesses_version = version_class(AmalgamatingBusiness) + amalgamating_businesses_version = VersioningProxy.version_class(db.session(), AmalgamatingBusiness) amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ .filter(amalgamating_businesses_version.operation_type == 0) \ .filter(amalgamating_businesses_version.business_id == business_id) \ .order_by(amalgamating_businesses_version.transaction_id).all() + return amalgamating_businesses diff --git a/legal-api/src/legal_api/models/amalgamation.py b/legal-api/src/legal_api/models/amalgamation.py index d5eabc237b..7f024d7e8c 100644 --- a/legal-api/src/legal_api/models/amalgamation.py +++ b/legal-api/src/legal_api/models/amalgamation.py @@ -21,10 +21,9 @@ from sql_versioning import Versioned from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from ..utils.base import BaseEnum -from .db import db +from .db import db, VersioningProxy # noqa: I001 class Amalgamation(db.Model, Versioned): # pylint: disable=too-many-instance-attributes @@ -92,7 +91,7 @@ def get_revision_by_id(cls, amalgamation_id, transaction_id=None, tombstone=Fals .filter(Amalgamation.id == amalgamation_id) \ .one_or_none() else: - amalgamation_version = version_class(Amalgamation) + amalgamation_version = VersioningProxy.version_class(db.session(), Amalgamation) amalgamation = db.session.query(amalgamation_version) \ .filter(amalgamation_version.transaction_id <= transaction_id) \ .filter(amalgamation_version.operation_type == 0) \ @@ -106,7 +105,7 @@ def get_revision_by_id(cls, amalgamation_id, transaction_id=None, tombstone=Fals def get_revision(cls, transaction_id, business_id): """Get amalgamation for the given transaction id.""" # pylint: disable=singleton-comparison; - amalgamation_version = version_class(Amalgamation) + amalgamation_version = VersioningProxy.version_class(db.session(), Amalgamation) amalgamation = db.session.query(amalgamation_version) \ .filter(amalgamation_version.transaction_id <= transaction_id) \ .filter(amalgamation_version.operation_type == 0) \ diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 552266febb..e4957b36ef 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -27,7 +27,6 @@ from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import aliased, backref from sqlalchemy.sql import and_, exists, func, not_, text -from sqlalchemy_continuum import version_class from legal_api.exceptions import BusinessException from legal_api.utils.base import BaseEnum @@ -37,7 +36,7 @@ from .amalgamation import Amalgamation # noqa: F401, I001, I003 pylint: disable=unused-import from .batch import Batch # noqa: F401, I001, I003 pylint: disable=unused-import from .batch_processing import BatchProcessing # noqa: F401, I001, I003 pylint: disable=unused-import -from .db import db # noqa: I001 +from .db import db, VersioningProxy # noqa: I001 from .party import Party from .share_class import ShareClass # noqa: F401,I001,I003 pylint: disable=unused-import @@ -267,9 +266,10 @@ class AssociationTypes(Enum): # relationships filings = db.relationship('Filing', lazy='dynamic') - offices = db.relationship('Office', lazy='dynamic', cascade='all, delete, delete-orphan') + offices = db.relationship('Office', backref='business', lazy='dynamic', cascade='all, delete, delete-orphan') party_roles = db.relationship('PartyRole', lazy='dynamic') - share_classes = db.relationship('ShareClass', lazy='dynamic', cascade='all, delete, delete-orphan') + share_classes = db.relationship('ShareClass', backref='business', lazy='dynamic', + cascade='all, delete, delete-orphan') aliases = db.relationship('Alias', lazy='dynamic') resolutions = db.relationship('Resolution', lazy='dynamic') documents = db.relationship('Document', lazy='dynamic') @@ -707,7 +707,7 @@ def get_alternate_names(self) -> dict: alternate_names = [] # Fetch aliases and related filings in a single query - alias_version = version_class(Alias) + alias_version = VersioningProxy.version_class(db.session(), Alias) filing_alias = aliased(Filing) aliases_query = db.session.query( alias_version.alias, diff --git a/legal-api/src/legal_api/models/db.py b/legal-api/src/legal_api/models/db.py index 67b3916632..6103dd2c22 100644 --- a/legal-api/src/legal_api/models/db.py +++ b/legal-api/src/legal_api/models/db.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Create SQLAlchenmy and Schema managers. +"""Create SQLAlchemy and Schema managers. These will get initialized by the application using the models """ @@ -267,7 +267,7 @@ def version_class(cls, session, obj): @debug def setup_versioning(): - """Set up and initialize versioining switching. + """Set up and initialize versioning switching. :return: None """ @@ -287,7 +287,7 @@ def clear_transaction(session, transaction): # TODO: enable versioning switching -# it should be called before data model initialzed, otherwise, old versioning doesn't work properly -# setup_versioning() +# it should be called before data model initialized, otherwise, old versioning doesn't work properly +setup_versioning() -make_versioned(user_cls=None, manager=versioning_manager) +# make_versioned(user_cls=None, manager=versioning_manager) diff --git a/legal-api/src/legal_api/models/office.py b/legal-api/src/legal_api/models/office.py index f04d16e37d..974f2dad9e 100644 --- a/legal-api/src/legal_api/models/office.py +++ b/legal-api/src/legal_api/models/office.py @@ -34,7 +34,7 @@ class Office(db.Model, Versioned): # pylint: disable=too-few-public-methods id = db.Column(db.Integer, primary_key=True) office_type = db.Column('office_type', db.String(75), db.ForeignKey('office_types.identifier')) business_id = db.Column('business_id', db.Integer, db.ForeignKey('businesses.id'), index=True) - addresses = db.relationship('Address', lazy='dynamic', cascade='all, delete, delete-orphan') + addresses = db.relationship('Address', backref='office', lazy='dynamic', cascade='all, delete, delete-orphan') deactivated_date = db.Column('deactivated_date', db.DateTime(timezone=True), default=None) # relationships diff --git a/legal-api/src/legal_api/resources/v2/business/colin_sync.py b/legal-api/src/legal_api/resources/v2/business/colin_sync.py index f87358f6c8..196cd97faf 100644 --- a/legal-api/src/legal_api/resources/v2/business/colin_sync.py +++ b/legal-api/src/legal_api/resources/v2/business/colin_sync.py @@ -21,7 +21,6 @@ from flask import current_app, jsonify, request from flask_cors import cross_origin from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from legal_api.exceptions import BusinessException from legal_api.models import ( @@ -42,6 +41,7 @@ db, ) from legal_api.models.colin_event_id import ColinEventId +from legal_api.models.db import VersioningProxy from legal_api.services.business_details_version import VersionedBusinessDetailsService from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime @@ -109,7 +109,6 @@ def get_completed_filings_for_colin(): current_app.logger.error(f'dissolution: filingId={filing.id}, missing batch processing info') # to skip this filing and block subsequent filing from syncing in update-colin-filings filing_json['filing']['header']['name'] = None - filings.append(filing_json) return jsonify({'filings': filings}), HTTPStatus.OK @@ -137,7 +136,7 @@ def set_correction_flags(filing_json, filing: Filing): def has_alias_changed(filing) -> bool: """Has alias changed in the given filing.""" - alias_version = version_class(Alias) + alias_version = VersioningProxy.version_class(db.session(), Alias) aliases_query = (db.session.query(alias_version) .filter(or_(alias_version.transaction_id == filing.transaction_id, alias_version.end_transaction_id == filing.transaction_id)) @@ -150,7 +149,7 @@ def has_office_changed(filing) -> bool: """Has office changed in the given filing.""" offices = db.session.query(Office).filter(Office.business_id == filing.business_id).all() - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) addresses_query = (db.session.query(address_version) .filter(or_(address_version.transaction_id == filing.transaction_id, address_version.end_transaction_id == filing.transaction_id)) @@ -162,7 +161,7 @@ def has_office_changed(filing) -> bool: def has_party_changed(filing: Filing) -> bool: """Has party changed in the given filing.""" - party_role_version = version_class(PartyRole) + party_role_version = VersioningProxy.version_class(db.session(), PartyRole) party_roles_query = (db.session.query(party_role_version) .filter(or_(party_role_version.transaction_id == filing.transaction_id, party_role_version.end_transaction_id == filing.transaction_id)) @@ -177,7 +176,7 @@ def has_party_changed(filing: Filing) -> bool: filing.business_id, role=PartyRole.RoleTypes.DIRECTOR.value) - party_version = version_class(Party) + party_version = VersioningProxy.version_class(db.session(), Party) for party_role in party_roles: parties_query = (db.session.query(party_version) .filter(or_(party_version.transaction_id == filing.transaction_id, @@ -188,7 +187,7 @@ def has_party_changed(filing: Filing) -> bool: return True party = VersionedBusinessDetailsService.get_party_revision(filing.transaction_id, party_role['id']) - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) # Has party delivery/mailing address modified address_query = (db.session.query(address_version) .filter(or_(address_version.transaction_id == filing.transaction_id, @@ -203,7 +202,7 @@ def has_party_changed(filing: Filing) -> bool: def has_resolution_changed(filing: Filing) -> bool: """Has resolution changed in the given filing.""" - resolution_version = version_class(Resolution) + resolution_version = VersioningProxy.version_class(db.session(), Resolution) resolution_query = (db.session.query(resolution_version) .filter(or_(resolution_version.transaction_id == filing.transaction_id, resolution_version.end_transaction_id == filing.transaction_id)) @@ -214,7 +213,7 @@ def has_resolution_changed(filing: Filing) -> bool: def has_share_changed(filing: Filing) -> bool: """Has share changed in the given filing.""" - share_class_version = version_class(ShareClass) + share_class_version = VersioningProxy.version_class(db.session(), ShareClass) share_class_query = (db.session.query(share_class_version) .filter(or_(share_class_version.transaction_id == filing.transaction_id, share_class_version.end_transaction_id == filing.transaction_id)) @@ -224,7 +223,7 @@ def has_share_changed(filing: Filing) -> bool: return True share_classes = VersionedBusinessDetailsService.get_share_class_revision(filing.transaction_id, filing.business_id) - series_version = version_class(ShareSeries) + series_version = VersioningProxy.version_class(db.session(), ShareSeries) share_series_query = (db.session.query(series_version) .filter(or_(series_version.transaction_id == filing.transaction_id, series_version.end_transaction_id == filing.transaction_id)) @@ -285,11 +284,28 @@ def _set_offices(primary_or_holding_business, amalgamation_filing, transaction_i def _set_shares(primary_or_holding_business, amalgamation_filing, transaction_id): - # copy shares + """Set shares from holding/primary business.""" + # Copy shares share_classes = VersionedBusinessDetailsService.get_share_class_revision(transaction_id, primary_or_holding_business.id) amalgamation_filing['shareStructure'] = {'shareClasses': share_classes} - business_dates = [item.resolution_date.isoformat() for item in primary_or_holding_business.resolutions] + + # Get resolution dates using versioned query + resolution_version = VersioningProxy.version_class(db.session(), Resolution) + resolutions_query = ( + db.session.query(resolution_version.resolution_date) + .filter(resolution_version.transaction_id <= transaction_id) # Get records valid at or before the transaction + .filter(resolution_version.operation_type != 2) # Exclude deleted records + .filter(resolution_version.business_id == primary_or_holding_business.id) + .filter(or_( + resolution_version.end_transaction_id.is_(None), # Records not yet ended + resolution_version.end_transaction_id > transaction_id # Records ended after our transaction + )) + .order_by(resolution_version.transaction_id) + .all() + ) + + business_dates = [res.resolution_date.isoformat() for res in resolutions_query] if business_dates: amalgamation_filing['shareStructure']['resolutionDates'] = business_dates diff --git a/legal-api/src/legal_api/services/business_details_version.py b/legal-api/src/legal_api/services/business_details_version.py index bd0693af62..61c7c3358b 100644 --- a/legal-api/src/legal_api/services/business_details_version.py +++ b/legal-api/src/legal_api/services/business_details_version.py @@ -18,7 +18,6 @@ import pycountry from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from legal_api.models import ( Address, @@ -33,6 +32,7 @@ ShareSeries, db, ) +from legal_api.models.db import VersioningProxy from legal_api.utils.legislation_datetime import LegislationDatetime @@ -210,7 +210,7 @@ def get_company_details_revision(filing_id, business_id) -> dict: @staticmethod def get_business_revision(transaction_id, business) -> dict: """Consolidates the business info as of a particular transaction.""" - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) business_revision = db.session.query(business_version) \ .filter(business_version.transaction_id <= transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -223,7 +223,7 @@ def get_business_revision(transaction_id, business) -> dict: @staticmethod def get_business_revision_obj(transaction_id, business_id): """Return business version object associated with a given transaction id for a business.""" - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) business_revision = db.session.query(business_version) \ .filter(business_version.transaction_id <= transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -238,7 +238,7 @@ def find_last_value_from_business_revision(transaction_id, business_id, is_dissolution_date=False, is_restoration_expiry_date=False) -> dict: """Get business info with last value of dissolution_date or restoration_expiry_date.""" - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) query = db.session.query(business_version) \ .filter(business_version.transaction_id < transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -255,7 +255,7 @@ def get_business_revision_after_filing(filing_id, business_id) -> dict: """Consolidates the business info as of a particular transaction.""" business = Business.find_by_internal_id(business_id) filing = Filing.find_by_id(filing_id) - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) business_revision = db.session.query(business_version) \ .filter(business_version.transaction_id > filing.transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -267,8 +267,8 @@ def get_business_revision_after_filing(filing_id, business_id) -> dict: def get_office_revision(transaction_id, business_id) -> dict: """Consolidates all office changes upto the given transaction id.""" offices_json = {} - address_version = version_class(Address) - offices_version = version_class(Office) + address_version = VersioningProxy.version_class(db.session(), Address) + offices_version = VersioningProxy.version_class(db.session(), Office) offices = db.session.query(offices_version) \ .filter(offices_version.transaction_id <= transaction_id) \ @@ -296,7 +296,7 @@ def get_office_revision(transaction_id, business_id) -> dict: @staticmethod def get_party_role_revision(transaction_id, business_id, is_ia_or_after=False, role=None) -> dict: """Consolidates all party changes upto the given transaction id.""" - party_role_version = version_class(PartyRole) + party_role_version = VersioningProxy.version_class(db.session(), PartyRole) party_roles = db.session.query(party_role_version)\ .filter(party_role_version.transaction_id <= transaction_id) \ .filter(party_role_version.operation_type != 2) \ @@ -322,7 +322,7 @@ def get_party_role_revision(transaction_id, business_id, is_ia_or_after=False, r @staticmethod def get_share_class_revision(transaction_id, business_id) -> dict: """Consolidates all share classes upto the given transaction id.""" - share_class_version = version_class(ShareClass) + share_class_version = VersioningProxy.version_class(db.session(), ShareClass) share_classes_list = db.session.query(share_class_version) \ .filter(share_class_version.transaction_id <= transaction_id) \ .filter(share_class_version.operation_type != 2) \ @@ -343,7 +343,7 @@ def get_share_class_revision(transaction_id, business_id) -> dict: @staticmethod def get_share_series_revision(transaction_id, share_class_id) -> dict: """Consolidates all share series under the share class upto the given transaction id.""" - share_series_version = version_class(ShareSeries) + share_series_version = VersioningProxy.version_class(db.session(), ShareSeries) share_series_list = db.session.query(share_series_version) \ .filter(share_series_version.transaction_id <= transaction_id) \ .filter(share_series_version.operation_type != 2) \ @@ -362,7 +362,7 @@ def get_share_series_revision(transaction_id, share_class_id) -> dict: @staticmethod def get_name_translations_revision(transaction_id, business_id) -> dict: """Consolidates all name translations upto the given transaction id.""" - name_translations_version = version_class(Alias) + name_translations_version = VersioningProxy.version_class(db.session(), Alias) name_translations_list = db.session.query(name_translations_version) \ .filter(name_translations_version.transaction_id <= transaction_id) \ .filter(name_translations_version.operation_type != 2) \ @@ -380,7 +380,7 @@ def get_name_translations_revision(transaction_id, business_id) -> dict: @staticmethod def get_name_translations_before_revision(transaction_id, business_id) -> dict: """Consolidates all name translations before deletion given a transaction id.""" - name_translations_version = version_class(Alias) + name_translations_version = VersioningProxy.version_class(db.session(), Alias) name_translations_list = db.session.query(name_translations_version) \ .filter(name_translations_version.transaction_id <= transaction_id) \ .filter(name_translations_version.operation_type != 2) \ @@ -396,7 +396,7 @@ def get_name_translations_before_revision(transaction_id, business_id) -> dict: @staticmethod def get_resolution_dates_revision(transaction_id, business_id) -> dict: """Consolidates all resolutions upto the given transaction id.""" - resolution_version = version_class(Resolution) + resolution_version = VersioningProxy.version_class(db.session(), Resolution) resolution_list = db.session.query(resolution_version) \ .filter(resolution_version.transaction_id <= transaction_id) \ .filter(resolution_version.operation_type != 2) \ @@ -439,7 +439,7 @@ def party_role_revision_json(transaction_id, party_role_revision, is_ia_or_after @staticmethod def get_party_revision(transaction_id, party_id) -> dict: """Consolidates all party changes upto the given transaction id.""" - party_version = version_class(Party) + party_version = VersioningProxy.version_class(db.session(), Party) party = db.session.query(party_version) \ .filter(party_version.transaction_id <= transaction_id) \ .filter(party_version.operation_type != 2) \ @@ -500,9 +500,8 @@ def party_revision_json(transaction_id, party_revision, is_ia_or_after) -> dict: if 'addressType' in member_mailing_address: del member_mailing_address['addressType'] member['mailingAddress'] = member_mailing_address - else: - if party_revision.delivery_address: - member['mailingAddress'] = member['deliveryAddress'] + elif party_revision.delivery_address_id: + member['mailingAddress'] = member['deliveryAddress'] if is_ia_or_after: member['officer']['id'] = str(party_revision.id) @@ -514,7 +513,7 @@ def party_revision_json(transaction_id, party_revision, is_ia_or_after) -> dict: @staticmethod def get_address_revision(transaction_id, address_id) -> dict: """Consolidates all party changes upto the given transaction id.""" - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) address = db.session.query(address_version) \ .filter(address_version.transaction_id <= transaction_id) \ .filter(address_version.operation_type != 2) \ diff --git a/legal-api/tests/unit/models/__init__.py b/legal-api/tests/unit/models/__init__.py index 3853258d2a..dc7742ed76 100644 --- a/legal-api/tests/unit/models/__init__.py +++ b/legal-api/tests/unit/models/__init__.py @@ -42,7 +42,7 @@ db, ) from legal_api.models.colin_event_id import ColinEventId -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.utils.datetime import datetime, timezone from tests import EPOCH_DATETIME, FROZEN_DATETIME @@ -160,8 +160,7 @@ def factory_business(identifier, no_dissolution=no_dissolution) # Versioning business - uow = versioning_manager.unit_of_work(db.session) - uow.create_transaction(db.session) + VersioningProxy.get_transaction_id(db.session()) business.save() return business @@ -250,9 +249,8 @@ def factory_completed_filing(business, filing.save() - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.payment_token = payment_token filing.effective_date = filing_date filing.payment_completion_date = filing_date @@ -292,9 +290,8 @@ def factory_epoch_filing(business, filing_date=FROZEN_DATETIME): """Create an error filing.""" filing = Filing() filing.business_id = business.id - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.filing_date = filing_date filing.filing_json = {'filing': {'header': {'name': 'lear_epoch'}}} filing.save() diff --git a/legal-api/tests/unit/models/test_business.py b/legal-api/tests/unit/models/test_business.py index e7e6289b3a..00f157b84b 100644 --- a/legal-api/tests/unit/models/test_business.py +++ b/legal-api/tests/unit/models/test_business.py @@ -37,7 +37,7 @@ PartyRole, db, ) -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.services import flags from legal_api.utils.legislation_datetime import LegislationDatetime from tests import EPOCH_DATETIME, TIMEZONE_OFFSET @@ -722,8 +722,7 @@ def test_amalgamated_into_business_json(session, test_name, existing_business_st filing.save() # Versioning business - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = Business( legal_name='Test - Legal Name', @@ -750,7 +749,7 @@ def test_amalgamated_into_business_json(session, test_name, existing_business_st db.session.add(existing_business) db.session.commit() - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.business_id = business.id filing.save() diff --git a/legal-api/tests/unit/models/test_filing.py b/legal-api/tests/unit/models/test_filing.py index d90f30a6d7..ffe40b294a 100644 --- a/legal-api/tests/unit/models/test_filing.py +++ b/legal-api/tests/unit/models/test_filing.py @@ -39,7 +39,7 @@ from legal_api.exceptions import BusinessException from legal_api.models import Business, Filing, User -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from tests import EPOCH_DATETIME from tests.conftest import not_raises from tests.unit.models import ( @@ -109,8 +109,7 @@ def test_filing_orm_delete_blocked_if_completed(session): """Assert that attempting to delete a filing will raise a BusinessException.""" from legal_api.exceptions import BusinessException - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) b = factory_business('CP1234567') @@ -120,7 +119,7 @@ def test_filing_orm_delete_blocked_if_completed(session): filing.filing_json = ANNUAL_REPORT filing.payment_token = 'a token' filing.payment_completion_date = datetime.datetime.utcnow() - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.save() with pytest.raises(BusinessException) as excinfo: @@ -332,15 +331,14 @@ def test_get_filing_by_payment_token(session): def test_get_filings_by_status(session): """Assert that a filing can be retrieved by status.""" - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = factory_business('CP1234567') payment_token = '1000' filing = Filing() filing.business_id = business.id filing.filing_json = ANNUAL_REPORT filing.payment_token = payment_token - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = datetime.datetime.utcnow() filing.save() @@ -359,7 +357,7 @@ def test_get_filings_by_status__default_order(session): # setup base_filing = copy.deepcopy(FILING_HEADER) base_filing['specialResolution'] = SPECIAL_RESOLUTION - uow = versioning_manager.unit_of_work(session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = factory_business('CP1234567') completion_date = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) @@ -369,7 +367,6 @@ def test_get_filings_by_status__default_order(session): file_counter = -1 with freeze_time(completion_date): for i in range(0, 5): - transaction = uow.create_transaction(session) payment_token = str(i) effective_date = f'200{i}-04-15T00:00:00+00:00' @@ -380,7 +377,7 @@ def test_get_filings_by_status__default_order(session): filing.filing_json = base_filing filing.effective_date = datetime.datetime.fromisoformat(effective_date) filing.payment_token = payment_token - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = completion_date filing.save() @@ -401,8 +398,7 @@ def test_get_filings_by_status__default_order(session): def test_get_most_recent_filing_by_legal_type_in_json(session): """Assert that the most recent legal filing can be retrieved.""" business = factory_business('CP1234567') - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) for i in range(1, 5): effective_date = f'200{i}-07-01T00:00:00+00:00' @@ -419,7 +415,7 @@ def test_get_most_recent_filing_by_legal_type_in_json(session): filing.filing_json = base_filing filing.effective_date = datetime.datetime.fromisoformat(effective_date) filing.payment_token = 'token' - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = completion_date filing.save() @@ -435,8 +431,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): Create 3 filings, find the 2 one by the type only. """ business = factory_business('CP1234567') - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) # filing 1 effective_date = '2001-07-01T00:00:00+00:00' @@ -449,7 +444,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): filing1.filing_json = base_filing filing1.effective_date = datetime.datetime.fromisoformat(effective_date) filing1.payment_token = 'token' - filing1.transaction_id = transaction.id + filing1.transaction_id = transaction_id filing1.payment_completion_date = completion_date filing1.save() @@ -466,7 +461,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): filing2.filing_json = base_filing filing2.effective_date = datetime.datetime.fromisoformat(effective_date) filing2.payment_token = 'token' - filing2.transaction_id = transaction.id + filing2.transaction_id = transaction_id filing2.payment_completion_date = completion_date filing2.save() @@ -481,7 +476,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): filing3.filing_json = base_filing filing3.effective_date = datetime.datetime.fromisoformat(effective_date) filing3.payment_token = 'token' - filing3.transaction_id = transaction.id + filing3.transaction_id = transaction_id filing3.payment_completion_date = completion_date filing3.save() @@ -501,8 +496,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): def test_get_filings_by_status_before_go_live_date(session, test_type, days, expected, status): """Assert that a filing can be retrieved by status.""" import copy - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = factory_business('CP1234567') payment_token = '1000' ar = copy.deepcopy(ANNUAL_REPORT) @@ -515,7 +509,7 @@ def test_get_filings_by_status_before_go_live_date(session, test_type, days, exp filing.business_id = business.id filing.filing_json = ar filing.payment_token = payment_token - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = datetime.datetime.utcnow() filing.save() diff --git a/legal-api/tests/unit/reports/test_report.py b/legal-api/tests/unit/reports/test_report.py index 05652094df..a94edf3108 100644 --- a/legal-api/tests/unit/reports/test_report.py +++ b/legal-api/tests/unit/reports/test_report.py @@ -41,7 +41,7 @@ ) from legal_api.models import db # noqa:I001 -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.reports.report import Report # noqa:I001 from legal_api.services import VersionedBusinessDetailsService # noqa:I001 from legal_api.utils.legislation_datetime import LegislationDatetime @@ -249,8 +249,7 @@ def test_alteration_name_change(session): def update_business_legal_name(business, legal_name): """Update business legal name.""" - uow = versioning_manager.unit_of_work(db.session) - uow.create_transaction(db.session) + VersioningProxy.get_transaction_id(db.session()) business.legal_name = legal_name business.save() diff --git a/python/common/sql-versioning/sql_versioning/__init__.py b/python/common/sql-versioning/sql_versioning/__init__.py index 9ece3f89e6..c90ceea973 100644 --- a/python/common/sql-versioning/sql_versioning/__init__.py +++ b/python/common/sql-versioning/sql_versioning/__init__.py @@ -14,8 +14,8 @@ """Versioning extension for SQLAlchemy.""" from .debugging import debug from .versioning import (Base, TransactionFactory, TransactionManager, - Versioned, disable_versioning, enable_versioning, - version_class) + Versioned, disable_versioning, enable_versioning) +from .utils import version_class __all__ = ( "Base", diff --git a/python/common/sql-versioning/sql_versioning/expression_reflector.py b/python/common/sql-versioning/sql_versioning/expression_reflector.py new file mode 100644 index 0000000000..8acc67e0d8 --- /dev/null +++ b/python/common/sql-versioning/sql_versioning/expression_reflector.py @@ -0,0 +1,46 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sqlalchemy as sa +from sqlalchemy.sql.expression import bindparam + +from .utils import version_table + + +class VersionExpressionReflector(sa.sql.visitors.ReplacingCloningVisitor): + def __init__(self, parent, relationship): + self.parent = parent + self.relationship = relationship + + def replace(self, column): + if not isinstance(column, sa.Column): + return + try: + table = version_table(column.table) + except KeyError: + reflected_column = column + else: + reflected_column = table.c[column.name] + if ( + column in self.relationship.local_columns and + table == self.parent.__table__ + ): + reflected_column = bindparam( + column.key, + getattr(self.parent, column.key) + ) + + return reflected_column + + def __call__(self, expr): + return self.traverse(expr) diff --git a/python/common/sql-versioning/sql_versioning/relationship_builder.py b/python/common/sql-versioning/sql_versioning/relationship_builder.py new file mode 100644 index 0000000000..c7642b136e --- /dev/null +++ b/python/common/sql-versioning/sql_versioning/relationship_builder.py @@ -0,0 +1,381 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sqlalchemy as sa +from enum import Enum + +from .expression_reflector import VersionExpressionReflector +from .utils import adapt_columns, version_class + + +class Operation(Enum): + INSERT = 0 + UPDATE = 1 + DELETE = 2 + + +class RelationshipBuilder(object): + def __init__(self, model, property_): + self.property = property_ + self.model = model + + def one_to_many_subquery(self, obj): + tx_column = "transaction_id" + + remote_alias = sa.orm.aliased(self.remote_cls) + primary_keys = [ + getattr(remote_alias, column.name) for column + in sa.inspect(remote_alias).mapper.columns + if column.primary_key and column.name != tx_column + ] + + return sa.exists( + sa.select(1).where( + sa.and_( + getattr(remote_alias, tx_column) <= + getattr(obj, tx_column), + *[ + getattr(remote_alias, pk.name) == + getattr(self.remote_cls, pk.name) + for pk in primary_keys + ] + ) + ).group_by( + *primary_keys + ).having( + sa.func.max(getattr(remote_alias, tx_column)) == + getattr(self.remote_cls, tx_column) + ).correlate(self.local_cls, self.remote_cls) + ) + + def many_to_one_subquery(self, obj): + tx_column = "transaction_id" + reflector = VersionExpressionReflector(obj, self.property) + subquery = sa.select( + sa.func.max(getattr(self.remote_cls, tx_column)) + ).where( + sa.and_( + getattr(self.remote_cls, tx_column) <= + getattr(obj, tx_column), + reflector(self.property.primaryjoin) + ) + ) + subquery = subquery.scalar_subquery() + + return getattr(self.remote_cls, tx_column) == subquery + + def query(self, obj): + session = sa.orm.object_session(obj) + return ( + session.query(self.remote_cls) + .filter( + self.criteria(obj) + ) + ) + + def process_query(self, query): + """ + Process given SQLAlchemy Query object depending on the associated + RelationshipProperty object. + + :param query: SQLAlchemy Query object + """ + if self.property.lazy == 'dynamic': + return query + if self.property.uselist is False: + return query.first() + return query.all() + + def criteria(self, obj): + direction = self.property.direction + + if self.versioned: + if direction.name == 'ONETOMANY': + return self.one_to_many_criteria(obj) + # TODO: Get many-to-many relationships working + # elif direction.name == 'MANYTOMANY': + # return self.many_to_many_criteria(obj) + elif direction.name == 'MANYTOONE': + return self.many_to_one_criteria(obj) + else: + reflector = VersionExpressionReflector(obj, self.property) + return reflector(self.property.primaryjoin) + + def many_to_many_criteria(self, obj): + """ + Returns the many-to-many query. + + Looks up remote items through associations and for each item returns + returns the last version with a transaction less than or equal to the + transaction of `obj`. This must hold true for both the association and + the remote relation items. + + Example + ------- + Select all tags of article with id 3 and transaction 5 + + .. code-block:: sql + + SELECT tags_version.* + FROM tags_version + WHERE EXISTS ( + SELECT 1 + FROM article_tag_version + WHERE article_id = 3 + AND tag_id = tags_version.id + AND operation_type != 2 + AND EXISTS ( + SELECT 1 + FROM article_tag_version as article_tag_version2 + WHERE article_tag_version2.tag_id = article_tag_version.tag_id + AND article_tag_version2.tx_id <= 5 + GROUP BY article_tag_version2.tag_id + HAVING + MAX(article_tag_version2.tx_id) = + article_tag_version.tx_id + ) + ) + AND EXISTS ( + SELECT 1 + FROM tags_version as tags_version_2 + WHERE tags_version_2.id = tags_version.id + AND tags_version_2.tx_id <= 5 + GROUP BY tags_version_2.id + HAVING MAX(tags_version_2.tx_id) = tags_version.tx_id + ) + AND operation_type != 2 + """ + return sa.and_( + self.association_subquery(obj), + self.one_to_many_subquery(obj), + self.remote_cls.operation_type != Operation.DELETE.value + ) + + def many_to_one_criteria(self, obj): + """Returns the many-to-one query. + + Returns the item on the 'one' side with the highest transaction id + as long as it is less or equal to the transaction id of the `obj`. + + Example + ------- + Look up the Article of a Tag with article_id = 4 and + transaction_id = 5 + + .. code-block:: sql + + SELECT * + FROM articles_version + WHERE id = 4 + AND transaction_id = ( + SELECT max(transaction_id) + FROM articles_version + WHERE transaction_id <= 5 + AND id = 4 + ) + AND operation_type != 2 + + """ + reflector = VersionExpressionReflector(obj, self.property) + return sa.and_( + reflector(self.property.primaryjoin), + self.many_to_one_subquery(obj), + self.remote_cls.operation_type != Operation.DELETE.value + ) + + def one_to_many_criteria(self, obj): + """ + Returns the one-to-many query. + + For each item on the 'many' side, returns its latest version as long as + the transaction of that version is less than equal of the transaction + of `obj`. + + Example + ------- + Using the Article-Tags relationship, where we look for tags of + article_version with id = 3 and transaction = 5 the sql produced is + + .. code-block:: sql + + SELECT tags_version.* + FROM tags_version + WHERE tags_version.article_id = 3 + AND tags_version.operation_type != 2 + AND EXISTS ( + SELECT 1 + FROM tags_version as tags_version_last + WHERE tags_version_last.transaction_id <= 5 + AND tags_version_last.id = tags_version.id + GROUP BY tags_version_last.id + HAVING + MAX(tags_version_last.transaction_id) = + tags_version.transaction_id + ) + + """ + reflector = VersionExpressionReflector(obj, self.property) + return sa.and_( + reflector(self.property.primaryjoin), + self.one_to_many_subquery(obj), + self.remote_cls.operation_type != Operation.DELETE.value + ) + + @property + def reflected_relationship(self): + """ + Builds a reflected one-to-many, one-to-one and many-to-one + relationship between two version classes. + """ + @property + def relationship(obj): + query = self.query(obj) + return self.process_query(query) + return relationship + + def association_subquery(self, obj): + """ + Returns an EXISTS clause that checks if an association exists for given + SQLAlchemy declarative object. This query is used by + many_to_many_criteria method. + + Example query: + + .. code-block:: sql + + EXISTS ( + SELECT 1 + FROM article_tag_version + WHERE article_id = 3 + AND tag_id = tags_version.id + AND operation_type != 2 + AND EXISTS ( + SELECT 1 + FROM article_tag_version as article_tag_version2 + WHERE article_tag_version2.tag_id = article_tag_version.tag_id + AND article_tag_version2.tx_id <=5 + AND article_tag_version2.article_id = 3 + GROUP BY article_tag_version2.tag_id + HAVING + MAX(article_tag_version2.tx_id) = + article_tag_version.tx_id + ) + ) + + :param obj: SQLAlchemy declarative object + """ + + tx_column = "transaction_id" + join_column = self.property.primaryjoin.right.name + object_join_column = self.property.primaryjoin.left.name + reflector = VersionExpressionReflector(obj, self.property) + + association_table_alias = self.association_version_table.alias() + association_cols = [ + association_table_alias.c[association_col.name] + for _, association_col + in self.remote_to_association_column_pairs + ] + + association_exists = sa.exists( + sa.select(1).where( + sa.and_( + association_table_alias.c[tx_column] <= + getattr(obj, tx_column), + association_table_alias.c[join_column] == getattr(obj, object_join_column), + *[association_col == + self.association_version_table.c[association_col.name] + for association_col + in association_cols] + ) + ).group_by( + *association_cols + ).having( + sa.func.max(association_table_alias.c[tx_column]) == + self.association_version_table.c[tx_column] + ).correlate(self.association_version_table) + ) + return sa.exists( + sa.select(1).where( + sa.and_( + reflector(self.property.primaryjoin), + association_exists, + self.association_version_table.c.operation_type != + Operation.DELETE.value, + adapt_columns(self.property.secondaryjoin), + ) + ).correlate(self.local_cls, self.remote_cls) + ) + + # TODO: Get many-to-many relationships working. + # def build_association_version_tables(self): + # """ + # Builds many-to-many association version table for given property. + # Association version tables are used for tracking change history of + # many-to-many associations. + # """ + # column = list(self.property.remote_side)[0] + + # self.manager.association_tables.add(column.table) + # builder = TableBuilder( + # self.manager, + # column.table + # ) + # metadata = column.table.metadata + # if builder.parent_table.schema: + # table_name = builder.parent_table.schema + '.' + builder.table_name + # elif metadata.schema: + # table_name = metadata.schema + '.' + builder.table_name + # else: + # table_name = builder.table_name + + # if table_name not in metadata.tables: + # self.association_version_table = table = builder() + # self.manager.association_version_tables.add(table) + # else: + # # may have already been created if we visiting the 'other' side of + # # a self-referential many-to-many relationship + # self.association_version_table = metadata.tables[table_name] + + def __call__(self): + """ + Builds reflected relationship between version classes based on given + parent object's RelationshipProperty. + """ + self.local_cls = version_class(self.model) + self.versioned = False + + if version_class(self.property.mapper.class_): + self.remote_cls = version_class(self.property.mapper.class_) + self.versioned = True + else: + self.remote_cls = self.property.mapper.class_ + + # TODO: Get many-to-many relationships working. + # if (self.property.secondary is not None and + # not self.property.viewonly and + # not self.manager.is_excluded_property( + # self.model, self.property.key)): + # self.build_association_version_tables() + + # # store remote cls to association table column pairs + # self.remote_to_association_column_pairs = [] + # for column_pair in self.property.local_remote_pairs: + # if column_pair[0] in self.property.target.c.values(): + # self.remote_to_association_column_pairs.append(column_pair) + + setattr( + self.local_cls, + self.property.key, + self.reflected_relationship + ) diff --git a/python/common/sql-versioning/sql_versioning/utils.py b/python/common/sql-versioning/sql_versioning/utils.py new file mode 100644 index 0000000000..4318fe16bc --- /dev/null +++ b/python/common/sql-versioning/sql_versioning/utils.py @@ -0,0 +1,59 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sqlalchemy as sa +from contextlib import suppress + + +def version_class(obj): + """Return the version class associated with a model. + + :param obj: The object to get the version class for. + :return: The version class or None if not found. + """ + with suppress(Exception): + versioned_class = obj.__versioned_cls__ + print(f'\033[32mVersioned Class={versioned_class}\033[0m') + return versioned_class + return None + + +def version_table(table): + """ + Return associated version table for given SQLAlchemy Table object. + + :param table: SQLAlchemy Table object + """ + if table.schema: + return table.metadata.tables[ + table.schema + '.' + table.name + '_version' + ] + elif table.metadata.schema: + return table.metadata.tables[ + table.metadata.schema + '.' + table.name + '_version' + ] + else: + return table.metadata.tables[ + table.name + '_version' + ] + + +class VersioningClauseAdapter(sa.sql.visitors.ReplacingCloningVisitor): + def replace(self, col): + if isinstance(col, sa.Column): + table = version_table(col.table) + return table.c.get(col.key) + + +def adapt_columns(expr): + return VersioningClauseAdapter().traverse(expr) diff --git a/python/common/sql-versioning/sql_versioning/versioning.py b/python/common/sql-versioning/sql_versioning/versioning.py index 3739b3e797..a849ed1b4a 100644 --- a/python/common/sql-versioning/sql_versioning/versioning.py +++ b/python/common/sql-versioning/sql_versioning/versioning.py @@ -13,15 +13,15 @@ # limitations under the License. """Versioned mixin class, listeners and other utilities.""" import datetime -from contextlib import suppress from sqlalchemy import (BigInteger, Column, DateTime, Integer, SmallInteger, String, and_, event, func, insert, inspect, select, update) from sqlalchemy.ext.declarative import declarative_base, declared_attr -from sqlalchemy.orm import Session, mapper +from sqlalchemy.orm import Session, mapper, relationships from .debugging import debug +from .relationship_builder import RelationshipBuilder Base = declarative_base() @@ -47,10 +47,31 @@ def _is_obj_modified(obj): return False +def _should_relationship_delete_orphan(session, obj): + """ + Checks if: + 1. This relationship is a many-to-one relationship + 2. If the opposite direction one-to-many relationship parent object has changes + 3. If the opposite direction one-to-many relationship has cascade=delete-orphan + + :param session: The database session instance. + :param obj: The object to inspect for changes. + :return: True if the above checks pass, otherwise False. + """ + should_delete = False + for r in inspect(obj.__class__).relationships: + if r.direction.name == 'MANYTOONE' and r._reverse_property: + reverse_rel, *_ = r._reverse_property + parent_obj = inspect(obj).committed_state.get(reverse_rel.backref, None) + if parent_obj in session.dirty: + should_delete = should_delete or "delete-orphan" in inspect(reverse_rel)._cascade + return should_delete + + def _is_session_modified(session): """Check if the session contains modified versioned objects. - :param session: The database sesseion instance. + :param session: The database session instance. :return: True if the session contains modified versioned objects, otherwise False. """ for obj in versioned_objects(session): @@ -61,19 +82,20 @@ def _is_session_modified(session): return False -def _get_operation_type(session, obj): +def _get_operation_type(session, obj, delete_orphan=False): """Return the operation type for the given object within the session. :param session: The database session instance. :param obj: The object to determine the operation type. :return: The operation type ('I' for insert, 'U' for update, 'D' for delete), or None if unchanged. """ + is_orphaned = inspect(obj)._orphaned_outside_of_session if obj in session.new: return 'I' + elif obj in session.deleted or (is_orphaned and delete_orphan): + return 'D' elif obj in session.dirty: return 'U' if _is_obj_modified(obj) else None - elif obj in session.deleted: - return 'D' return None @@ -270,7 +292,8 @@ def _after_flush(session, flush_context): """Trigger after a flush operation to create version records for changed objects.""" try: for obj in versioned_objects(session): - operation_type = _get_operation_type(session, obj) + should_delete_orphan = _should_relationship_delete_orphan(session, obj) + operation_type = _get_operation_type(session, obj, should_delete_orphan) if operation_type: _create_version(session, obj, operation_type) except Exception as e: @@ -359,26 +382,21 @@ def _after_configured(cls): for pending_cls in cls._pending_version_classes: version_cls = pending_cls._version_cls mapper = inspect(pending_cls) + # Now add columns from the original table for c in mapper.columns: # Make sure table's column name and class's property name can be different property_name = mapper.get_property_by_column(c).key if not hasattr(version_cls, property_name): setattr(version_cls, property_name, Column(c.name, c.type)) - delattr(cls, '_pending_version_classes') + # Build relationships + for prop in inspect(cls).iterate_properties: + if type(prop) == relationships.RelationshipProperty: + builder = RelationshipBuilder(cls, prop) + builder() -def version_class(obj): - """Return the version class associated with a model. - - :param obj: The object to get the version class for. - :return: The version class or None if not found. - """ - with suppress(Exception): - versioned_class = obj.__versioned_cls__ - print(f'\033[32mVersioned Class={versioned_class}\033[0m') - return versioned_class - return None + delattr(cls, '_pending_version_classes') def versioned_objects(session): diff --git a/python/common/sql-versioning/tests/__init__.py b/python/common/sql-versioning/tests/__init__.py index e69de29bb2..747b0cbfeb 100644 --- a/python/common/sql-versioning/tests/__init__.py +++ b/python/common/sql-versioning/tests/__init__.py @@ -0,0 +1,82 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for versioning extension. + +Initialization file that holds testing classes. +""" +from sqlalchemy import Column, ForeignKey, Integer, String, orm + +from sql_versioning import (Base, TransactionFactory, Versioned, + enable_versioning) + + +enable_versioning() + +Transaction = TransactionFactory.create_transaction_model() + +class Model(Base): + __tablename__ = 'models' + id = Column(Integer, primary_key=True) + name = Column(String) + +class User(Base, Versioned): + __tablename__ = 'users' + + id = Column(Integer, primary_key=True) + name = Column(String) + + # One-to-one versioned relationship + address = orm.relationship('Address', backref='user', uselist=False) + # One-to-one non-versioned relationship + location = orm.relationship('Location', backref='user', uselist=False) + # One-to-many versioned relationship + emails = orm.relationship('Email', backref='user', lazy='dynamic', cascade='all, delete, delete-orphan') + # One-to-many non versioned relationship + items = orm.relationship('Item', backref='user', lazy='dynamic', cascade='all, delete, delete-orphan') + +class Address(Base, Versioned): + __tablename__ = 'addresses' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + +class Location(Base): + __tablename__ = 'locations' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + +class Email(Base, Versioned): + __tablename__ = 'emails' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + + +class Item(Base): + __tablename__ = 'items' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + + +orm.configure_mappers() \ No newline at end of file diff --git a/python/common/sql-versioning/tests/test_versioning.py b/python/common/sql-versioning/tests/test_versioning.py index a7b74cb816..bc5d4e8656 100644 --- a/python/common/sql-versioning/tests/test_versioning.py +++ b/python/common/sql-versioning/tests/test_versioning.py @@ -16,37 +16,9 @@ Test-Suite to ensure that the versioning extension is working as expected. """ import pytest -from sqlalchemy import Column, ForeignKey, Integer, String, orm -from sql_versioning import (Base, TransactionFactory, Versioned, - enable_versioning, version_class) - -enable_versioning() - -Transaction = TransactionFactory.create_transaction_model() - -class Model(Base): - __tablename__ = 'models' - id = Column(Integer, primary_key=True) - name = Column(String) - -class User(Base, Versioned): - __tablename__ = 'users' - - id = Column(Integer, primary_key=True) - name = Column(String) - - address = orm.relationship('Address', backref='user', uselist=False) - -class Address(Base, Versioned): - __tablename__ = 'addresses' - - id = Column(Integer, primary_key=True) - name = Column(String) - - user_id = Column(Integer, ForeignKey('users.id')) - -orm.configure_mappers() +from sql_versioning import (version_class) +from tests import (Model, User, Address, Location, Email, Item, Transaction) @pytest.mark.parametrize('test_name', ['CLASS','INSTANCE']) @@ -135,6 +107,109 @@ def test_versioning_insert(db, session): assert result_versioned_address.end_transaction_id is None +def test_versioning_relationships(db, session): + user = User(name='user') + address = Address(name='Some address') + location = Location(name='Some location') + emails = [Email(name='primary'), Email(name='secondary')] + items = [Item(name='An item'), Item(name='Another item')] + user.address = address + user.location = location + user.items = items + user.emails = emails + session.add(user) + session.commit() + + user_version = version_class(User) + result_revision = session.query(user_version)\ + .filter(user_version.name=='user')\ + .one_or_none() + + # Test one-to-one relationship + # Versioned + assert result_revision.address.id == address.id + assert result_revision.address.name == "Some address" + assert result_revision.address.user.name == user.name + # Non versioned + assert result_revision.location.id == location.id + assert result_revision.location.name == "Some location" + assert result_revision.location.user.name == user.name + + # Test one-to-many relationship + # Versioned + result_emails = result_revision.emails.all() + assert len(result_emails) == len(emails) + assert result_emails[0].id == emails[0].id + assert result_emails[0].name == "primary" + assert result_emails[1].id == emails[1].id + assert result_emails[1].name == "secondary" + # Non versioned + result_items = result_revision.items.all() + assert len(result_items) == len(items) + assert result_items[0].id == items[0].id + assert result_items[0].name == "An item" + assert result_items[1].id == items[1].id + assert result_items[1].name == "Another item" + + # Test many-to-one relationship + # Note: this is a quirk of the RelationshipBuilder. We don't explicitly establish bi-directionality + # by including the "reverse" side of the relationship (i.e. Item.user), but it works anyway + # Versioned + assert result_revision.emails[0].user.name == user.name + assert result_revision.emails[1].user.name == user.name + # Non versioned + assert result_revision.items[0].user == user + assert result_revision.items[1].user == user + + # Test update relationship + user.address = Address(name='Some new address') + session.commit() + + user_version = version_class(User) + result_revisions = session.query(user_version)\ + .filter(user_version.name=='user')\ + .order_by(user_version.transaction_id)\ + .all() + + assert user.address.name == 'Some new address' + assert len(result_revisions) == 2 + assert result_revisions[0].address.name == "Some address" + assert result_revisions[1].address.name == "Some new address" + + +def test_versioning_relationships_remove(db, session): + """Test remove from relationship.""" + user = User(name='test') + for i in range(5): + email = Email(name=f'email {i}') + user.emails.append(email) + session.add(user) + session.commit() + + if existing_emails := user.emails.all(): + for email in existing_emails: + user.emails.remove(email) + session.add(user) + session.commit() + + user = session.query(User).one_or_none() + emails = user.emails.all() + assert not emails + + emails = session.query(Email).all() + assert not emails + + email_versions = session.query(version_class(Email))\ + .order_by(version_class(Email).transaction_id)\ + .all() + assert len(email_versions) == 10 + for i in range(10): + if i < 5: + assert email_versions[i].operation_type == 0 + else: + assert email_versions[i].operation_type == 2 + + def test_versioning_delete(db, session): """Test deletion.""" user = User(name='test') diff --git a/queue_services/entity-bn/devops/vaults.json b/queue_services/entity-bn/devops/vaults.json index 451dc80fa7..2b6c3dc51b 100644 --- a/queue_services/entity-bn/devops/vaults.json +++ b/queue_services/entity-bn/devops/vaults.json @@ -29,7 +29,8 @@ "vault": "entity", "application": [ "entity-service-account", - "sentry" + "sentry", + "launchdarkly" ] } ] diff --git a/queue_services/entity-bn/flags.json b/queue_services/entity-bn/flags.json new file mode 100644 index 0000000000..f773eec046 --- /dev/null +++ b/queue_services/entity-bn/flags.json @@ -0,0 +1,14 @@ +{ + "flagValues": { + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": true + } + } +} diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py b/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py index 96e898eaed..cc93535bad 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py @@ -19,10 +19,10 @@ import dpath from flask import current_app from legal_api.models import Address, Business, Filing, Party, PartyRole, RequestTracker, db +from legal_api.models.db import VersioningProxy from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime from sqlalchemy import and_, func -from sqlalchemy_continuum import version_class from entity_bn.bn_processors import ( bn_note, @@ -215,7 +215,7 @@ def change_address(business: Business, filing: Filing, # pylint: disable=too-ma def has_previous_address(transaction_id: int, office_id: int, address_type: str) -> bool: """Has previous address for the given transaction and office id.""" - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) address = db.session.query(address_version) \ .filter(address_version.operation_type != 2) \ .filter(address_version.office_id == office_id) \ @@ -227,7 +227,7 @@ def has_previous_address(transaction_id: int, office_id: int, address_type: str) def has_party_name_changed(business: Business, filing: Filing) -> bool: """Has party name changed in the given filing.""" - party_role_version = version_class(PartyRole) + party_role_version = VersioningProxy.version_class(db.session(), PartyRole) party_roles = db.session.query(party_role_version)\ .filter(party_role_version.transaction_id == filing.transaction_id) \ .filter(party_role_version.operation_type != 2) \ @@ -266,7 +266,7 @@ def _get_name(party) -> str: def _get_modified_parties(transaction_id, business_id): """Get all party values before the given transaction id.""" - party_version = version_class(Party) + party_version = VersioningProxy.version_class(db.session(), Party) parties = db.session.query(party_version) \ .join(PartyRole, and_(PartyRole.party_id == party_version.id, PartyRole.business_id == business_id)) \ .filter(PartyRole.role.in_([PartyRole.RoleTypes.PARTNER.value, PartyRole.RoleTypes.PROPRIETOR.value])) \ diff --git a/queue_services/entity-bn/src/entity_bn/config.py b/queue_services/entity-bn/src/entity_bn/config.py index 754de4878d..65bd323f2c 100644 --- a/queue_services/entity-bn/src/entity_bn/config.py +++ b/queue_services/entity-bn/src/entity_bn/config.py @@ -58,10 +58,12 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + SERVICE_NAME = 'entity-bn' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' SENTRY_DSN = '' if SENTRY_DSN.lower() == 'null' else SENTRY_DSN + LD_SDK_KEY = os.getenv('LD_SDK_KEY', None) COLIN_API = f"{os.getenv('COLIN_API_URL', '')}{os.getenv('COLIN_API_VERSION', '')}" SEARCH_API = \ diff --git a/queue_services/entity-bn/src/entity_bn/worker.py b/queue_services/entity-bn/src/entity_bn/worker.py index 7cd63c5ebc..3e4c358983 100644 --- a/queue_services/entity-bn/src/entity_bn/worker.py +++ b/queue_services/entity-bn/src/entity_bn/worker.py @@ -32,9 +32,10 @@ import nats from entity_queue_common.service_utils import QueueException, logger from flask import Flask -from legal_api import db +from legal_api import init_db from legal_api.core import Filing as FilingCore from legal_api.models import Business +from legal_api.services.flags import Flags from sentry_sdk import capture_message from sqlalchemy.exc import OperationalError @@ -49,10 +50,14 @@ from entity_bn.exceptions import BNException, BNRetryExceededException +flags = Flags() # pylint: disable=invalid-name APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production')) FLASK_APP = Flask(__name__) # pragma: no cover FLASK_APP.config.from_object(APP_CONFIG) -db.init_app(FLASK_APP) +init_db(FLASK_APP) + +if FLASK_APP.config.get('LD_SDK_KEY', None): + flags.init_app(FLASK_APP) async def process_event(msg: Dict, flask_app: Flask): # pylint: disable=too-many-branches,too-many-statements diff --git a/queue_services/entity-digital-credentials/flags.json b/queue_services/entity-digital-credentials/flags.json new file mode 100644 index 0000000000..257cf6ec93 --- /dev/null +++ b/queue_services/entity-digital-credentials/flags.json @@ -0,0 +1,14 @@ +{ + "flagValues": { + "db-versioning": { + "legal-api": true, + "emailer": false, + "filer": false, + "entity-bn": false, + "digital-credentials": true, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": false + } + } +} diff --git a/queue_services/entity-digital-credentials/requirements.txt b/queue_services/entity-digital-credentials/requirements.txt index a67f794736..f35b9f0271 100644 --- a/queue_services/entity-digital-credentials/requirements.txt +++ b/queue_services/entity-digital-credentials/requirements.txt @@ -22,5 +22,5 @@ urllib3==1.26.11 Werkzeug==1.0.1 git+https://github.com/bcgov/business-schemas.git@2.18.15#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common -git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning +git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py index 863c8f3ebf..06fa1a4f65 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py @@ -58,6 +58,8 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + # used to identify versioning flag + SERVICE_NAME = 'digital-credentials' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py index 03decbfc55..380d4d8a2b 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py @@ -33,7 +33,7 @@ from entity_queue_common.service import QueueServiceManager from entity_queue_common.service_utils import QueueException, logger from flask import Flask -from legal_api import db +from legal_api import init_db from legal_api.core import Filing as FilingCore from legal_api.models import Business from legal_api.services import digital_credentials, flags @@ -54,7 +54,7 @@ APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production')) FLASK_APP = Flask(__name__) FLASK_APP.config.from_object(APP_CONFIG) -db.init_app(FLASK_APP) +init_db(FLASK_APP) with FLASK_APP.app_context(): # db require app context digital_credentials.init_app(FLASK_APP) diff --git a/queue_services/entity-digital-credentials/tests/unit/__init__.py b/queue_services/entity-digital-credentials/tests/unit/__init__.py index 0ed27a9e90..88b20c032d 100644 --- a/queue_services/entity-digital-credentials/tests/unit/__init__.py +++ b/queue_services/entity-digital-credentials/tests/unit/__init__.py @@ -14,7 +14,7 @@ """The Unit Tests and the helper routines.""" from legal_api.models import Business, DCConnection, DCDefinition, DCIssuedCredential, Filing -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy def create_business(identifier): @@ -37,9 +37,8 @@ def create_filing(session, business_id=None, filing._status = filing_status if filing_status == Filing.Status.COMPLETED.value: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id if filing_json: filing.filing_json = filing_json if business_id: diff --git a/queue_services/entity-emailer/flags.json b/queue_services/entity-emailer/flags.json index 877972a91c..e1f4a3778b 100644 --- a/queue_services/entity-emailer/flags.json +++ b/queue_services/entity-emailer/flags.json @@ -1,5 +1,15 @@ { "flagValues": { - "disable-specific-service-provider": true + "disable-specific-service-provider": true, + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": true + } } -} \ No newline at end of file +} diff --git a/queue_services/entity-emailer/requirements.txt b/queue_services/entity-emailer/requirements.txt index bcbae5a182..a16b43522a 100644 --- a/queue_services/entity-emailer/requirements.txt +++ b/queue_services/entity-emailer/requirements.txt @@ -67,7 +67,7 @@ rsa==4.7.2 semver==2.13.0 sentry-sdk==1.20.0 six==1.15.0 -SQLAlchemy==1.3.24 +SQLAlchemy==1.4.44 SQLAlchemy-Continuum==1.3.13 SQLAlchemy-Utils==0.37.1 strict-rfc3339==0.7 diff --git a/queue_services/entity-emailer/src/entity_emailer/config.py b/queue_services/entity-emailer/src/entity_emailer/config.py index 057b00c19e..7ae1ff8718 100644 --- a/queue_services/entity-emailer/src/entity_emailer/config.py +++ b/queue_services/entity-emailer/src/entity_emailer/config.py @@ -58,6 +58,8 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + # used to identify versioning flag + SERVICE_NAME = 'emailer' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) MSG_RETRY_NUM = int(os.getenv('MSG_RETRY_NUM', '5')) diff --git a/queue_services/entity-emailer/src/entity_emailer/worker.py b/queue_services/entity-emailer/src/entity_emailer/worker.py index b440e0137a..d1e8518edd 100644 --- a/queue_services/entity-emailer/src/entity_emailer/worker.py +++ b/queue_services/entity-emailer/src/entity_emailer/worker.py @@ -34,7 +34,7 @@ from entity_queue_common.service import QueueServiceManager from entity_queue_common.service_utils import EmailException, QueueException, logger from flask import Flask -from legal_api import db +from legal_api import db, init_db # noqa:F401,I001;pylint:disable=unused-import; from legal_api.models import Filing, Furnishing from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags @@ -66,14 +66,14 @@ ) from .message_tracker import tracker as tracker_util - +# noqa:I003 qsm = QueueServiceManager() # pylint: disable=invalid-name flags = Flags() # pylint: disable=invalid-name APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production')) FLASK_APP = Flask(__name__) FLASK_APP.config.from_object(APP_CONFIG) -db.init_app(FLASK_APP) +init_db(FLASK_APP) if FLASK_APP.config.get('LD_SDK_KEY', None): flags.init_app(FLASK_APP) diff --git a/queue_services/entity-emailer/tests/unit/__init__.py b/queue_services/entity-emailer/tests/unit/__init__.py index b233e2cb4b..fb983aa024 100644 --- a/queue_services/entity-emailer/tests/unit/__init__.py +++ b/queue_services/entity-emailer/tests/unit/__init__.py @@ -19,7 +19,7 @@ from unittest.mock import Mock from legal_api.models import Batch, Business, Filing, Furnishing, Party, PartyRole, RegistrationBootstrap, User -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from registry_schemas.example_data import ( AGM_EXTENSION, AGM_LOCATION_CHANGE, @@ -128,9 +128,8 @@ def prep_incorp_filing(session, identifier, payment_id, option, legal_type=None) filing.payment_completion_date = filing.filing_date filing.save() if option in ['COMPLETED', 'bn']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -194,9 +193,8 @@ def prep_registration_filing(session, identifier, payment_id, option, legal_type filing.payment_completion_date = filing.filing_date filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -472,9 +470,8 @@ def prep_maintenance_filing(session, identifier, payment_id, status, filing_type filing.save() if status == 'COMPLETED': - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -494,9 +491,8 @@ def prep_incorporation_correction_filing(session, business, original_filing_id, filing.payment_completion_date = filing.filing_date filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -601,9 +597,8 @@ def prep_cp_special_resolution_correction_filing(session, business, original_fil filing._meta_data = {'correction': {'uploadNewRules': True, 'toLegalName': True}} filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -629,9 +624,8 @@ def prep_cp_special_resolution_correction_upload_memorandum_filing(session, busi filing._meta_data = {'correction': {'uploadNewMemorandum': True}} filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -664,9 +658,8 @@ def prep_amalgamation_filing(session, identifier, payment_id, option, legal_name filing.payment_completion_date = filing.filing_date filing.save() if option in [Filing.Status.COMPLETED.value, 'bn']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -692,9 +685,8 @@ def prep_continuation_in_filing(session, identifier, payment_id, option): filing.payment_completion_date = filing.filing_date filing.save() if option in [Filing.Status.COMPLETED.value, 'bn']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing diff --git a/queue_services/entity-emailer/tracker/config.py b/queue_services/entity-emailer/tracker/config.py index 744a9b44d4..35cb0b88ab 100644 --- a/queue_services/entity-emailer/tracker/config.py +++ b/queue_services/entity-emailer/tracker/config.py @@ -58,7 +58,9 @@ class _Config(): # pylint: disable=too-few-public-methods """ # PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - + + # used to identify versioning flag + SERVICE_NAME = 'emailer' SQLALCHEMY_TRACK_MODIFICATIONS = False # POSTGRESQL diff --git a/queue_services/entity-filer/flags.json b/queue_services/entity-filer/flags.json index 75a249ff46..7929957766 100644 --- a/queue_services/entity-filer/flags.json +++ b/queue_services/entity-filer/flags.json @@ -1,6 +1,16 @@ { "flagValues": { "enable-involuntary-dissolution": true, - "namex-nro-decommissioned": true + "namex-nro-decommissioned": true, + "db-versioning": { + "legal-api": true, + "emailer": false, + "filer": true, + "entity-bn": false, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": false + } } } diff --git a/queue_services/entity-filer/src/entity_filer/config.py b/queue_services/entity-filer/src/entity_filer/config.py index bf3d4dd7a7..ab04e09429 100644 --- a/queue_services/entity-filer/src/entity_filer/config.py +++ b/queue_services/entity-filer/src/entity_filer/config.py @@ -58,6 +58,7 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + SERVICE_NAME = 'filer' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) PAYMENT_SVC_URL = os.getenv('PAYMENT_SVC_URL', '') diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 4d724378d5..b9b8150263 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -36,10 +36,10 @@ from entity_queue_common.service_utils import FilingException, QueueException, logger from flask import Flask from gcp_queue import GcpQueue, SimpleCloudEvent, to_queue_message -from legal_api import db +from legal_api import init_db from legal_api.core import Filing as FilingCore -from legal_api.models import Business, Filing -from legal_api.models.db import init_db, versioning_manager +from legal_api.models import Business, Filing, db +from legal_api.models.db import VersioningProxy from legal_api.services import Flags from legal_api.utils.datetime import datetime, timezone from sentry_sdk import capture_message @@ -245,8 +245,7 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, is_correction = filing_core_submission.filing_type == FilingCore.FilingTypes.CORRECTION if legal_filings := filing_core_submission.legal_filings(): - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) + VersioningProxy.get_transaction_id(db.session()) business = Business.find_by_internal_id(filing_submission.business_id) @@ -361,7 +360,8 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, if filing.get('specialResolution'): special_resolution.process(business, filing, filing_submission) - filing_submission.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing_submission.transaction_id = transaction_id business_type = business.legal_type if business \ else filing_submission.filing_json.get('filing', {}).get('business', {}).get('legalType') diff --git a/queue_services/entity-filer/tests/unit/__init__.py b/queue_services/entity-filer/tests/unit/__init__.py index 11c7608453..e01e4739e0 100644 --- a/queue_services/entity-filer/tests/unit/__init__.py +++ b/queue_services/entity-filer/tests/unit/__init__.py @@ -22,7 +22,7 @@ from legal_api.models import Batch, BatchProcessing, Filing, Resolution, ShareClass, ShareSeries, db from legal_api.models.colin_event_id import ColinEventId -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.utils.datetime import datetime, timezone from tests import EPOCH_DATETIME, FROZEN_DATETIME @@ -589,9 +589,8 @@ def factory_completed_filing(business, data_dict, filing_date=FROZEN_DATETIME, p filing.filing_json = data_dict filing.save() - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.payment_token = payment_token filing.effective_date = filing_date filing.payment_completion_date = filing_date diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py index 584ee2f13a..01a58c4622 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py @@ -14,6 +14,7 @@ """The Unit Tests for the business filing component processors.""" import pytest from legal_api.models import Business +from sql_versioning import version_class from entity_filer.filing_processors.filing_components import shares from tests import strip_keys_from_dict @@ -119,5 +120,13 @@ def test_manage_share_structure__delete_shares(app, session): # check check_business = Business.find_by_internal_id(business_id) share_classes = check_business.share_classes.all() + assert not share_classes + share_classes = session.query(ShareClass).all() assert not share_classes + + share_class_version = version_class(ShareClass) + share_class_versions = session.query(share_class_version).all() + assert len(share_class_versions) == 5 + for scv in share_class_versions: + assert scv.operation_type == 2 From c7e4ce4139b4e4c1f18cafa8d07516a5924054c3 Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Mon, 24 Feb 2025 21:36:14 +0000 Subject: [PATCH 068/133] Bump versions to 2.142.0 (#3247) --- legal-api/src/legal_api/version.py | 2 +- queue_services/entity-bn/src/entity_bn/version.py | 2 +- .../src/entity_digital_credentials/version.py | 2 +- queue_services/entity-emailer/src/entity_emailer/version.py | 2 +- queue_services/entity-filer/src/entity_filer/version.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index f0ae7ce1af..cba46c8494 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.141.0' # pylint: disable=invalid-name +__version__ = '2.142.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-bn/src/entity_bn/version.py b/queue_services/entity-bn/src/entity_bn/version.py index c8e4310dd4..9aeaafef3c 100644 --- a/queue_services/entity-bn/src/entity_bn/version.py +++ b/queue_services/entity-bn/src/entity_bn/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.131.0' # pylint: disable=invalid-name +__version__ = '2.142.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py index cb5ffe29d1..2153e5ff80 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.131.0' # pylint: disable=invalid-name +__version__ = '2.142.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-emailer/src/entity_emailer/version.py b/queue_services/entity-emailer/src/entity_emailer/version.py index 7b1a128404..984cf87ae3 100644 --- a/queue_services/entity-emailer/src/entity_emailer/version.py +++ b/queue_services/entity-emailer/src/entity_emailer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.134.0' # pylint: disable=invalid-name +__version__ = '2.142.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index 8b52684ce8..984cf87ae3 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.141.0' # pylint: disable=invalid-name +__version__ = '2.142.0' # pylint: disable=invalid-name From c5992f517661759a2701d1af4c261e2ba69808d0 Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Tue, 25 Feb 2025 10:14:54 -0800 Subject: [PATCH 069/133] Merge feature-data-migration back to main branch (#3249) * 25654 Tombstone - colin event id (#3227) * 25678 Tombstone - Updates for involuntary dissolution (#3221) * 25678 Tombstone - bring data of corp in dissolution Signed-off-by: Hongjing Chen * update to avoid duplicate corp loading in the same db based on different corp_processing table(different ppl) Signed-off-by: Hongjing Chen * temporary fix for warning check of dissolution eligibility Signed-off-by: Hongjing Chen * 25672 - Tombstone - conversion involuntary dissolution Signed-off-by: Hongjing Chen * small fix for conversion IA Signed-off-by: Hongjing Chen * remove dissolution threshold timestamp config Signed-off-by: Hongjing Chen * remove debugging code Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen * 24699 & 24700 Tombstone - distinguish FED and non-FED, implement Notice of Withdrawal (#3231) * 24699 & 24700 distinguish FED and non-FED, implement NoW Signed-off-by: Hongjing Chen * small fix Signed-off-by: Hongjing Chen * throw exception for withdrawn IA/amalgamation/continuation to stop migration Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen * 25797 Tombstone pipeline include last_ar_reminder_year (#3237) * 26013 tweaks from prelim corps dry run (#3238) * 26013 tweaks from prelim corps dry run * revert back to old code for tombstone_utils.py * 24691 - updates for expired restoration & put back on/off (#3248) Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen Co-authored-by: Vysakh Menon Co-authored-by: Argus Chiu --- data-tool/.corps.env.sample | 2 +- data-tool/flows/batch_delete_flow.py | 4 + data-tool/flows/corps_tombstone_flow.py | 92 +++++++- .../flows/tombstone/tombstone_base_data.py | 56 ++++- .../flows/tombstone/tombstone_mappings.py | 41 ++-- .../flows/tombstone/tombstone_queries.py | 33 ++- data-tool/flows/tombstone/tombstone_utils.py | 213 ++++++++++++++---- .../scripts/README_COLIN_Corps_Extract.md | 4 +- .../scripts/colin_corps_extract_postgres_ddl | 119 +++++++++- data-tool/scripts/transfer_cprd_corps.sql | 9 +- .../legal_api/reports/business_document.py | 1 + .../involuntary_dissolution.py | 5 + 12 files changed, 505 insertions(+), 74 deletions(-) diff --git a/data-tool/.corps.env.sample b/data-tool/.corps.env.sample index e680ad60b6..fa83a184cd 100644 --- a/data-tool/.corps.env.sample +++ b/data-tool/.corps.env.sample @@ -71,4 +71,4 @@ DELETE_BATCH_SIZE=300 ## delete corps record in auth db, corp_processing of colin extract DELETE_AUTH_RECORDS=False -DELETE_CORP_PROCESSING_RECORDS=True \ No newline at end of file +DELETE_CORP_PROCESSING_RECORDS=True diff --git a/data-tool/flows/batch_delete_flow.py b/data-tool/flows/batch_delete_flow.py index 78f6b1d7ef..bb134a5a92 100644 --- a/data-tool/flows/batch_delete_flow.py +++ b/data-tool/flows/batch_delete_flow.py @@ -231,6 +231,10 @@ def lear_delete_versioned(conn: Connection, business_ids: list): 'source': 'comments', 'params': {'filing_id': filing_ids}, }, + { + 'source': 'furnishings', + 'params': {'business_id': business_ids}, + }, # there're some Comment records saved by legal-api directly instead of filer # some of them are linked via business_id { diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 633421ac23..4bbe65c860 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -1,3 +1,5 @@ +from pathlib import Path + import math from datetime import datetime, timedelta @@ -7,6 +9,7 @@ from prefect import flow, task, serve from prefect.futures import wait from prefect.context import get_run_context +from prefect.task_runners import ConcurrentTaskRunner from sqlalchemy import Connection, text from sqlalchemy.engine import Engine @@ -102,6 +105,7 @@ def get_snapshot_filings_data(config, colin_engine: Engine, corp_num: str) -> di return raw_data + @task(name='2.2-Corp-Snapshot-Placeholder-Filings-Cleanup-Task') def clean_snapshot_filings_data(data: dict) -> dict: """Clean corp snapshot and placeholder filings data.""" @@ -122,7 +126,7 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict, users_mapper: dic # Note: The business info is partially loaded for businesses table now. And it will be fully # updated by the following placeholder historical filings migration. But it depends on the # implementation of next step. - business_id = load_data(conn, 'businesses', tombstone_data['businesses'], 'identifier') + business_id = load_data(conn, 'businesses', tombstone_data['businesses'], 'identifier', conflict_error=True) for office in tombstone_data['offices']: office['offices']['business_id'] = business_id @@ -176,6 +180,20 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict, users_mapper: dic comment['staff_id'] = staff_id load_data(conn, 'comments', comment) + if in_dissolution := tombstone_data['in_dissolution']: + batch = in_dissolution['batches'] + batch_id = load_data(conn, 'batches', batch) + batch_processing = in_dissolution['batch_processing'] + + batch_processing['batch_id'] = batch_id + batch_processing['business_id'] = business_id + load_data(conn, 'batch_processing', batch_processing) + + furnishing = in_dissolution['furnishings'] + furnishing['batch_id'] = batch_id + furnishing['business_id'] = business_id + load_data(conn, 'furnishings', furnishing) + return business_id @@ -186,6 +204,7 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id update_info = tombstone_data['updates'] state_filing_index = update_info['state_filing_index'] update_business_data = update_info['businesses'] + filing_ids_mapper = {} # load placeholder filings for i, data in enumerate(filings_data): f = data['filings'] @@ -195,11 +214,18 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id f['submitter_id'] = user_id f['transaction_id'] = transaction_id f['business_id'] = business_id + if (withdrawn_idx := f['withdrawn_filing_id']) is not None: + f['withdrawn_filing_id'] = filing_ids_mapper[withdrawn_idx] + filing_id = load_data(conn, 'filings', f) + filing_ids_mapper[i] = filing_id + + data['colin_event_ids']['filing_id'] = filing_id + load_data(conn, 'colin_event_ids', data['colin_event_ids'], expecting_id=False) if i == state_filing_index: update_info['businesses']['state_filing_id'] = filing_id - + if jurisdiction := data['jurisdiction']: jurisdiction['business_id'] = business_id jurisdiction['filing_id'] = filing_id @@ -236,7 +262,7 @@ def load_amalgamation_snapshot(conn: Connection, amalgamation_data: dict, busine amalgamation_id = load_data(conn, 'amalgamations', amalgamation) for ting in amalgamation_data['amalgamating_businesses']: - if ting_identifier:= ting.get('ting_identifier'): + if ting_identifier := ting.get('ting_identifier'): # TODO: avoid update info for withdrawn amalg filing (will handle in NoW work) # TING must exists in db before updating state filing info, del ting['ting_identifier'] @@ -306,7 +332,7 @@ def get_tombstone_data(config, colin_engine: Engine, corp_num: str) -> tuple[str return corp_num, clean_data except Exception as e: print(f'❌ Error collecting corp snapshot and filings data for {corp_num}: {repr(e)}') - return corp_num, None + return corp_num, e @task(name='3-Corp-Tombstone-Migrate-Task-Async') @@ -333,7 +359,9 @@ def migrate_tombstone(config, lear_engine: Engine, corp_num: str, clean_data: di @flow( name='Corps-Tombstone-Migrate-Flow', log_prints=True, - persist_result=False + persist_result=False, + # use ConcurrentTaskRunner when using work pool based deployments + # task_runner=ConcurrentTaskRunner(max_workers=35) ) def tombstone_flow(): """Entry of tombstone pipeline""" @@ -391,7 +419,7 @@ def tombstone_flow(): skipped = 0 for f in data_futures: corp_num, clean_data = f.result() - if clean_data: + if clean_data and not isinstance(clean_data, Exception): corp_futures.append( migrate_tombstone.submit(config, lear_engine, corp_num, clean_data, users_mapper) ) @@ -401,7 +429,7 @@ def tombstone_flow(): flow_run_id, corp_num, ProcessingStatuses.FAILED, - error="Migration failed - Skip due to data collection error" + error=f"Migration failed - Skip due to data collection error: {repr(clean_data)}" ) print(f'❗ Skip migrating {corp_num} due to data collection error.') @@ -438,7 +466,7 @@ def tombstone_flow(): if __name__ == "__main__": - tombstone_flow() + tombstone_flow() # # Create deployment - only intended to test locally for parallel flows # deployment = tombstone_flow.to_deployment( @@ -449,3 +477,51 @@ def tombstone_flow(): # # # Start serving the deployment # serve(deployment) + + + # Work pool based deployments + # + # Only one of deployments 1-3 should be running at any given time. + # + # Note: the following deployment is used strictly for maximizing local resource usage for production + # dry runs and the actual final tombstone migration to the production environment. If there is no need + # to run multiple parallel flows, the following set-ups are not req'd. + + # flow_source = Path(__file__).parent + + # # 1. TINGs deployment setup + # # subquery = subqueries[1] + # # ensure "and cs.state_type_cd = 'ACT'" is commented out as TINGS are historical + # tombstone_flow.from_source( + # source=flow_source, + # entrypoint="corps_tombstone_flow.py:tombstone_flow" + # ).deploy( + # name="tombstone-tings-deployment", + # tags=["tombstone-tings-migration"], + # work_pool_name="tombstone-tings-pool", + # interval=timedelta(seconds=60) # Run every x seconds + # ) + + # # 2. TEDs deployment setup + # # subquery = subqueries[2] + # tombstone_flow.from_source( + # source=flow_source, + # entrypoint="corps_tombstone_flow.py:tombstone_flow" + # ).deploy( + # name="tombstone-teds-deployment", + # tags=["tombstone-teds-migration"], + # work_pool_name="tombstone-teds-pool", + # interval=timedelta(seconds=60) # Run every x seconds + # ) + + # # 3. OTHERs deployment setup + # # subquery = subqueries[3] + # tombstone_flow.from_source( + # source=flow_source, + # entrypoint="corps_tombstone_flow.py:tombstone_flow" + # ).deploy( + # name="tombstone-deployment", + # tags=["tombstone-migration"], + # work_pool_name="tombstone-pool", + # interval=timedelta(seconds=70) # Run every x seconds + # ) diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index 757b0925dd..8931a8741f 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -190,17 +190,19 @@ 'colin_only': False, 'deletion_locked': False, 'hide_in_ledger': False, # TODO: double check when doing cleanup - dissolution (invol, admin) - # TODO: new columns for NoW + 'withdrawal_pending': False, # FK 'business_id': None, 'transaction_id': None, 'submitter_id': None, + 'withdrawn_filing_id': None, # others 'submitter_roles': None, }, 'jurisdiction': None, # optional 'amalgamations': None, # optional - 'comments': None # optional + 'comments': None, # optional + 'colin_event_ids': None } FILING_COMBINED = { @@ -234,6 +236,54 @@ 'amalgamation_id': None, } + +# ======== in_dissoluion ======== +BATCH = { + 'batch_type': 'INVOLUNTARY_DISSOLUTION', + 'status': 'PROCESSING', + 'size': 1, + 'max_size': 1, + 'start_date': None, # timestamptz, required + 'notes': 'Import from COLIN', +} + +BATCH_PROCESSING = { + 'business_identifier': None, + 'step': None, + 'meta_data': None, + 'created_date': None, # timestamptz, required + 'last_modified': None, # timestamptz, required + 'trigger_date': None, # timestamptz + 'status': 'PROCESSING', + 'notes': 'Import from COLIN', + # FK + 'batch_id': None, + 'business_id': None, +} + +FURNISHING = { + 'business_identifier': None, + 'furnishing_type': None, + 'furnishing_name': None, + 'meta_data': None, + 'created_date': None, # timestamptz, required + 'last_modified': None, # timestamptz, required + 'processed_date': None, # timestamptz + 'status': 'PROCESSED', + 'notes': 'Import from COLIN', + # FK + 'batch_id': None, + 'business_id': None, + +} + +IN_DISSOLUTION = { + 'batches': BATCH, + 'batch_processing': BATCH_PROCESSING, + 'furnishings': FURNISHING, +} + + # ======== tombstone example ======== TOMBSTONE = { 'businesses': BUSINESS, @@ -243,6 +293,8 @@ 'aliases': [ALIAS], 'resolutions': [RESOLUTION], 'filings': [FILING], + 'comments': [COMMENT], + 'in_dissolution': IN_DISSOLUTION, 'updates': { 'businesses': BUSINESS, 'state_filing_index': -1 diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index 1aff238381..e33f688423 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -86,8 +86,6 @@ class EventFilings(str, Enum): FILE_CO_DI = 'FILE_CO_DI' FILE_CO_DO = 'FILE_CO_DO' FILE_CO_LI = 'FILE_CO_LI' - FILE_CO_PF = 'FILE_CO_PF' - FILE_CO_PO = 'FILE_CO_PO' FILE_CO_RM = 'FILE_CO_RM' FILE_CO_RR = 'FILE_CO_RR' FILE_CO_SS = 'FILE_CO_SS' @@ -120,8 +118,6 @@ class EventFilings(str, Enum): FILE_ICORC = 'FILE_ICORC' # TODO: Legacy Other - unsupported - FILE_AM_PF = 'FILE_AM_PF' - FILE_AM_PO = 'FILE_AM_PO' FILE_AM_TR = 'FILE_AM_TR' # TODO: Liquidation - unsupported @@ -130,6 +126,15 @@ class EventFilings(str, Enum): # TODO: Notice of Withdrawal - unsupported FILE_NWITH = 'FILE_NWITH' + # Put Back Off + SYSDL_NULL = 'SYSDL_NULL' + FILE_AM_PF = 'FILE_AM_PF' + FILE_CO_PF = 'FILE_CO_PF' + + # Put Back On + FILE_AM_PO = 'FILE_AM_PO' + FILE_CO_PO = 'FILE_CO_PO' + # Registrar's Notation FILE_REGSN = 'FILE_REGSN' @@ -216,8 +221,8 @@ def has_value(cls, value): EventFilings.CONVDSL_NULL: 'conversion', # TODO: liquidation EventFilings.CONVDSO_NULL: ['conversion', ('dissolution', 'unknown')], EventFilings.CONVICORP_NULL: 'conversion', - EventFilings.CONVID1_NULL: 'conversion', # TODO: related to invol dissolution - EventFilings.CONVID2_NULL: 'conversion', # TODO: related to invol dissolution + EventFilings.CONVID1_NULL: ['conversion', 'putBackOn'], # TODO: to confirm + EventFilings.CONVID2_NULL: ['conversion', 'putBackOn'], # TODO: to confirm EventFilings.CONVILIQ_NULL: 'conversion', # TODO: liquidation EventFilings.CONVLRSTR_NULL: ['conversion', ('restoration', 'limitedRestoration')], EventFilings.CONVNC_NULL: ['conversion', 'changeOfName'], @@ -228,8 +233,6 @@ def has_value(cls, value): EventFilings.FILE_CO_DI: 'correction', EventFilings.FILE_CO_DO: 'correction', EventFilings.FILE_CO_LI: 'correction', - EventFilings.FILE_CO_PF: 'correction', - EventFilings.FILE_CO_PO: 'correction', EventFilings.FILE_CO_RM: 'correction', EventFilings.FILE_CO_RR: 'correction', EventFilings.FILE_CO_SS: 'correction', @@ -256,14 +259,19 @@ def has_value(cls, value): EventFilings.FILE_ICORC: 'incorporationApplication', # TODO: Legacy Other - unsupported - EventFilings.FILE_AM_PF: 'legacyOther', - EventFilings.FILE_AM_PO: 'legacyOther', EventFilings.FILE_AM_TR: 'legacyOther', # TODO: Liquidation - unsupported EventFilings.FILE_NWITH: 'noticeOfWithdrawal', + EventFilings.SYSDL_NULL: 'putBackOff', + EventFilings.FILE_AM_PF: 'putBackOff', + EventFilings.FILE_CO_PF: 'putBackOff', + + EventFilings.FILE_AM_PO: 'putBackOn', + EventFilings.FILE_CO_PO: 'putBackOn', + EventFilings.FILE_REGSN: 'registrarsNotation', EventFilings.FILE_REGSO: 'registrarsOrder', @@ -333,8 +341,6 @@ def has_value(cls, value): EventFilings.FILE_CO_DI: 'Correction - Director', EventFilings.FILE_CO_DO: 'Correction - Dissolved Office', EventFilings.FILE_CO_LI: 'Correction - Ledger Information', - EventFilings.FILE_CO_PF: 'Correction - Put Back Off', - EventFilings.FILE_CO_PO: 'Correction - Put Back On', EventFilings.FILE_CO_RM: 'Correction - Receiver or Receiver Manager', EventFilings.FILE_CO_RR: 'Correction - Registered and Records Offices', EventFilings.FILE_CO_SS: 'Correction - Share Structure', @@ -363,8 +369,6 @@ def has_value(cls, value): EventFilings.FILE_ICORC: 'Incorporation Application for a Community Contribution Company', # TODO: Legacy Other - unsupported - EventFilings.FILE_AM_PF: 'Amendment - Put Back Off', - EventFilings.FILE_AM_PO: 'Amendment - Put Back On', EventFilings.FILE_AM_TR: 'Amendment - Transition', # TODO: Liquidation - unsupported (need to check if anything missing) @@ -391,6 +395,13 @@ def has_value(cls, value): EventFilings.FILE_NWITH: 'Notice of Withdrawal', + EventFilings.SYSDL_NULL: None, + EventFilings.FILE_AM_PF: 'Amendment - Put Back Off', + EventFilings.FILE_CO_PF: 'Correction - Put Back Off', + + EventFilings.FILE_AM_PO: 'Amendment - Put Back On', + EventFilings.FILE_CO_PO: 'Correction - Put Back On', + EventFilings.FILE_REGSN: "Registrar''s Notation", EventFilings.FILE_REGSO: "Registrar''s Order", @@ -418,6 +429,7 @@ def has_value(cls, value): # TODO: 'continuation_out_date' - continuation out 'continuationIn': ['last_coa_date', 'last_cod_date'], 'dissolution': ['dissolution_date'], + 'putBackOff': ['restoration_expiry_date', 'dissolution_date'], 'putBackOn': ['dissolution_date'], 'restoration': ['dissolution_date', 'restoration_expiry_date'], } @@ -426,6 +438,7 @@ def has_value(cls, value): LEAR_STATE_FILINGS = [ 'dissolution', 'restoration', + 'putBackOff', 'putBackOn', 'continuationOut', # TODO: other state filings that lear doesn't support for now e.g. liquidation diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 56cb190355..a7a7bf89e6 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -274,6 +274,7 @@ def get_business_query(corp_num, suffix): -- TODO: submitter_userid -- c.send_ar_ind, + c.last_ar_reminder_year, to_char(c.last_ar_filed_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as last_ar_date, -- admin_freeze case @@ -581,6 +582,15 @@ def get_filings_query(corp_num): f.filing_type_cd as f_filing_type_cd, to_char(f.effective_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, f.withdrawn_event_id as f_withdrawn_event_id, + case + when f.withdrawn_event_id is null then null + else ( + select + to_char(we.event_timerstamp::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') + from event we + where we.event_id = f.withdrawn_event_id + ) + end as f_withdrawn_event_ts_str, -- paper only now -> f_ods_type f.nr_num as f_nr_num, to_char(f.period_end_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, @@ -720,6 +730,26 @@ def get_filing_comments_query(corp_num): return query +def get_in_dissolution_query(corp_num): + query = f""" + select + cs.corp_num as cs_corp_num, + cs.state_type_cd as cs_state_type_cd, + e.event_id as e_event_id, + e.event_type_cd as e_event_type_cd, + to_char( + e.trigger_dts::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as e_trigger_dts_str + from corp_state cs + join event e on e.event_id = cs.start_event_id + where 1 = 1 + and cs.corp_num = '{corp_num}' + and cs.end_event_id is null + and cs.state_type_cd in ('D1F', 'D2F', 'D1T', 'D2T') + """ + return query + + def get_corp_snapshot_filings_queries(config, corp_num): queries = { 'businesses': get_business_query(corp_num, config.CORP_NAME_SUFFIX), @@ -732,7 +762,8 @@ def get_corp_snapshot_filings_queries(config, corp_num): 'filings': get_filings_query(corp_num), 'amalgamations': get_amalgamation_query(corp_num), 'business_comments': get_business_comments_query(corp_num), - 'filing_comments': get_filing_comments_query(corp_num) + 'filing_comments': get_filing_comments_query(corp_num), + 'in_dissolution': get_in_dissolution_query(corp_num), } return queries diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 5faf20a1ec..baaf6f8d2e 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -2,13 +2,15 @@ import json from datetime import datetime, timezone from decimal import Decimal +from typing import Optional import pandas as pd import pytz from sqlalchemy import Connection, text from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, - FILING_JSON, JURISDICTION, OFFICE, - PARTY, PARTY_ROLE, RESOLUTION, + FILING_JSON, IN_DISSOLUTION, + JURISDICTION, OFFICE, PARTY, + PARTY_ROLE, RESOLUTION, SHARE_CLASSES, USER) from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, EVENT_FILING_LEAR_TARGET_MAPPING, @@ -32,13 +34,20 @@ def format_business_data(data: dict) -> dict: last_ar_date = None last_ar_year = None + last_ar_reminder_year = business_data['last_ar_reminder_year'] + + # last_ar_reminder_year can be None if send_ar_ind is false or the business is in the 1st financial year + if business_data['send_ar_ind'] and last_ar_reminder_year is None: + last_ar_reminder_year = last_ar_year + formatted_business = { **business_data, 'last_ar_date': last_ar_date, 'last_ar_year': last_ar_year, + 'last_ar_reminder_year': last_ar_reminder_year, 'fiscal_year_end_date': business_data['founding_date'], 'last_ledger_timestamp': business_data['founding_date'], - 'last_modified': datetime.utcnow().replace(tzinfo=timezone.utc).isoformat() + 'last_modified': datetime.now(tz=timezone.utc).isoformat() } return formatted_business @@ -47,7 +56,7 @@ def format_business_data(data: dict) -> dict: def format_address_data(address_data: dict, prefix: str) -> dict: # Note: all corps have a format type of null or FOR address_type = 'mailing' if prefix == 'ma_' else 'delivery' - + street = address_data[f'{prefix}addr_line_1'] street_additional_elements = [] if (line_2 := address_data[f'{prefix}addr_line_2']) and (line_2 := line_2.strip()): @@ -57,7 +66,7 @@ def format_address_data(address_data: dict, prefix: str) -> dict: street_additional = ' '.join(street_additional_elements) if not (delivery_instructions := address_data[f'{prefix}delivery_instructions']) \ - or not (delivery_instructions := delivery_instructions.strip()): + or not (delivery_instructions := delivery_instructions.strip()): delivery_instructions = '' formatted_address = { @@ -90,7 +99,7 @@ def format_offices_data(data: dict) -> list[dict]: office['addresses'].append(delivery_address) formatted_offices.append(office) - + return formatted_offices @@ -117,7 +126,7 @@ def format_parties_data(data: dict) -> list[dict]: mailing_addr_data = group.loc[ma_index].to_dict() else: mailing_addr_data = None - + if (da_index := group['cp_delivery_addr_id'].first_valid_index()) is not None: delivery_addr_data = group.loc[da_index].to_dict() else: @@ -140,9 +149,9 @@ def format_parties_data(data: dict) -> list[dict]: party_role['appointment_date'] = r['cp_appointment_dt_str'] party_role['cessation_date'] = r['cp_cessation_dt_str'] formatted_party_roles.append(party_role) - + formatted_parties.append(party) - + return formatted_parties @@ -176,7 +185,7 @@ def format_share_classes_data(data: dict) -> list[dict]: priority = int(share_class_info['ssc_share_class_id']) if share_class_info['ssc_share_class_id'] else None max_shares = int(share_class_info['ssc_share_quantity']) if share_class_info['ssc_share_quantity'] else None par_value = float(share_class_info['ssc_par_value_amt']) if share_class_info['ssc_par_value_amt'] else None - + # TODO: map NULL or custom input value of ssc_other_currency if (currency := share_class_info['ssc_currency_typ_cd']) == 'OTH': currency = share_class_info['ssc_other_currency'] @@ -191,7 +200,7 @@ def format_share_classes_data(data: dict) -> list[dict]: share_class['share_classes']['special_rights_flag'] = share_class_info['ssc_spec_rights_ind'] # Note: srs_share_class_id should be either None or equal to share_class_id - matching_series = group[group['srs_share_class_id']==share_class_id] + matching_series = group[group['srs_share_class_id'] == share_class_id] formatted_series = share_class['share_series'] for _, r in matching_series.iterrows(): formatted_series.append(format_share_series_data(r.to_dict())) @@ -238,7 +247,7 @@ def format_jurisdictions_data(data: dict, event_id: Decimal) -> dict: if not matched_jurisdictions: return None - + formatted_jurisdiction = copy.deepcopy(JURISDICTION) jurisdiction_info = matched_jurisdictions[0] @@ -276,6 +285,7 @@ def format_filings_data(data: dict) -> list[dict]: formatted_filings = [] state_filing_idx = -1 idx = 0 + withdrawn_filing_idx = -1 for x in filings_data: event_file_type = x['event_file_type'] # TODO: build a new complete filing event mapper (WIP) @@ -285,7 +295,7 @@ def format_filings_data(data: dict) -> list[dict]: print(f'❗ Skip event filing type: {event_file_type}') unsupported_event_file_types.add(event_file_type) continue - + # get converted filing_type and filing_subtype if raw_filing_type == 'conversion': if isinstance(raw_filing_subtype, tuple): @@ -299,6 +309,7 @@ def format_filings_data(data: dict) -> list[dict]: filing_subtype = raw_filing_subtype effective_date = x['ce_effective_dt_str'] or x['f_effective_dt_str'] or x['e_event_dt_str'] + filing_date = x['ce_effective_dt_str'] or x['e_event_dt_str'] trigger_date = x['e_trigger_dt_str'] filing_json, meta_data = build_filing_json_meta_data(raw_filing_type, filing_type, filing_subtype, @@ -312,21 +323,40 @@ def format_filings_data(data: dict) -> list[dict]: if not (user_id := x['u_user_id']): user_id = x['u_full_name'] if x['u_full_name'] else None - if raw_filing_type == 'conversion' or raw_filing_subtype == 'involuntary': + if ( + raw_filing_type == 'conversion' + or raw_filing_subtype == 'involuntary' + or (raw_filing_type == 'putBackOff' and event_file_type == 'SYSDL_NULL') + ): hide_in_ledger = True else: hide_in_ledger = False + if x['f_withdrawn_event_id']: + if filing_type in [ + 'amalgamationApplication', + 'incorporationApplication', + 'continuationIn' + ]: + raise Exception('Stop migrating withdrawn corp') + status = 'WITHDRAWN' + completion_date = None + withdrawn_filing_idx = idx + else: + status = 'COMPLETED' + completion_date = effective_date + filing_body = { **filing_body, - 'filing_date': effective_date, + 'filing_date': filing_date, 'filing_type': raw_filing_type, 'filing_sub_type': raw_filing_subtype, - 'completion_date': effective_date, + 'completion_date': completion_date, 'effective_date': effective_date, 'filing_json': filing_json, 'meta_data': meta_data, 'hide_in_ledger': hide_in_ledger, + 'status': status, 'submitter_id': user_id, # will be updated to real user_id when loading data into db } @@ -334,17 +364,21 @@ def format_filings_data(data: dict) -> list[dict]: # based on converted filing type if filing_type == 'continuationIn': jurisdiction = format_jurisdictions_data(data, x['e_event_id']) - - if filing_type == 'amalgamationApplication': + elif filing_type == 'amalgamationApplication': amalgamation = format_amalgamations_data(data, x['e_event_id'], effective_date, filing_subtype) + elif filing_type == 'noticeOfWithdrawal': + filing_body['withdrawn_filing_id'] = withdrawn_filing_idx # will be updated to real filing_id when loading data + withdrawn_filing_idx = -1 comments = format_filing_comments_data(data, x['e_event_id']) + colin_event_ids = {'colin_event_id': x['e_event_id']} filing = { 'filings': filing_body, 'jurisdiction': jurisdiction, 'amalgamations': amalgamation, - 'comments': comments + 'comments': comments, + 'colin_event_ids': colin_event_ids } formatted_filings.append(filing) @@ -357,7 +391,7 @@ def format_filings_data(data: dict) -> list[dict]: # save state filing index if filing_type in LEAR_STATE_FILINGS and x['e_event_id'] == x['cs_state_event_id']: state_filing_idx = idx - + idx += 1 return { @@ -379,7 +413,7 @@ def format_amalgamations_data(data: dict, event_id: Decimal, amalgamation_date: formatted_amalgmation = copy.deepcopy(AMALGAMATION) amalgamation_info = matched_amalgamations[0] - + formatted_amalgmation['amalgamations']['amalgamation_date'] = amalgamation_date formatted_amalgmation['amalgamations']['court_approval'] = bool(amalgamation_info['f_court_approval']) @@ -396,8 +430,8 @@ def format_amalgamating_businesses(ting_data: dict) -> dict: role = 'holding' if ting_data['adopted_corp_ind'] else 'amalgamating' foreign_identifier = None - if not (ting_data['ting_corp_num'].startswith('BC') or\ - ting_data['ting_corp_num'].startswith('Q') or\ + if not (ting_data['ting_corp_num'].startswith('BC') or + ting_data['ting_corp_num'].startswith('Q') or ting_data['ting_corp_num'].startswith('C')): foreign_identifier = ting_data['ting_corp_num'] @@ -432,7 +466,7 @@ def format_filing_comments_data(data: dict, event_id: Decimal) -> list: if not matched_filing_comments: return None - + formatted_filing_comments = [] for x in matched_filing_comments: if c := x['lt_notation']: @@ -459,7 +493,7 @@ def format_filing_comments_data(data: dict, event_id: Decimal) -> list: def format_business_comments_data(data: dict) -> list: business_comments_data = data['business_comments'] formatted_business_comments = [] - + for x in business_comments_data: c = x['cc_comments'] if x['cc_comments'] else x['cc_accession_comments'] if not (staff_id := x['cc_user_id']): @@ -474,6 +508,68 @@ def format_business_comments_data(data: dict) -> list: return formatted_business_comments +def format_in_dissolution_data(data: dict) -> dict: + if not (in_dissolution_data := data['in_dissolution']): + return None + + in_dissolution_data = in_dissolution_data[0] + + formatted_in_dissolution = copy.deepcopy(IN_DISSOLUTION) + batch = formatted_in_dissolution['batches'] + batch_processiong = formatted_in_dissolution['batch_processing'] + furnishing = formatted_in_dissolution['furnishings'] + + utc_now_str = datetime.utcnow().replace(tzinfo=timezone.utc).isoformat() + batch['start_date'] = utc_now_str + + corp_state = in_dissolution_data['cs_state_type_cd'] + + batch_processiong['business_identifier'] = in_dissolution_data['cs_corp_num'] + batch_processiong['created_date'] = batch_processiong['last_modified'] = utc_now_str + batch_processiong['trigger_date'] = in_dissolution_data['e_trigger_dts_str'] + batch_processiong['meta_data'] = { + 'importFromColin': True, + 'colinDissolutionState': corp_state, + } + + furnishing['business_identifier'] = in_dissolution_data['cs_corp_num'] + furnishing['created_date'] = furnishing['last_modified'] = furnishing['processed_date'] = utc_now_str + furnishing['meta_data'] = { + 'importFromColin': True, + 'colinDissolutionState': corp_state, + } + + if corp_state in ('D1F', 'D1T'): + # stage 1 + batch_processiong['step'] = 'WARNING_LEVEL_1' + overdue_ar = True if corp_state == 'D1F' else False + batch_processiong['meta_data'] = { + **batch_processiong['meta_data'], + 'overdueARs': overdue_ar, + 'overdueTransition': not overdue_ar, + 'stage_1_date': utc_now_str, + } + + furnishing['furnishing_type'] = 'MAIL' # as placeholder + furnishing['furnishing_name'] = 'DISSOLUTION_COMMENCEMENT_NO_AR' if overdue_ar \ + else 'DISSOLUTION_COMMENCEMENT_NO_TR' + else: + # stage 2 + batch_processiong['step'] = 'WARNING_LEVEL_2' + overdue_ar = True if corp_state == 'D2F' else False + batch_processiong['meta_data'] = { + **batch_processiong['meta_data'], + 'overdueARs': overdue_ar, + 'overdueTransition': not overdue_ar, + 'stage_2_date': utc_now_str, + } + + furnishing['furnishing_type'] = 'GAZETTE' + furnishing['furnishing_name'] = 'INTENT_TO_DISSOLVE' + + return formatted_in_dissolution + + def format_users_data(users_data: list) -> list: formatted_users = [] @@ -481,10 +577,10 @@ def format_users_data(users_data: list) -> list: user = copy.deepcopy(USER) event_file_types = x['event_file_types'].split(',') # skip users if all event_file_type is unsupported or not users for staff comments - if not any(get_target_filing_type(ef)[0] for ef in event_file_types)\ - and not any (ef == 'STAFF_COMMENT' for ef in event_file_types): + if not any(get_target_filing_type(ef)[0] for ef in event_file_types) \ + and not any(ef == 'STAFF_COMMENT' for ef in event_file_types): continue - + if not (username := x['u_user_id']): username = x['u_full_name'] @@ -518,7 +614,6 @@ def formatted_data_cleanup(data: dict) -> dict: return data - def get_data_formatters() -> dict: ret = { 'businesses': format_business_data, @@ -529,6 +624,7 @@ def get_data_formatters() -> dict: 'resolutions': format_resolutions_data, 'filings': format_filings_data, 'comments': format_business_comments_data, # only for business level, filing level will be formatted ith filings + 'in_dissolution': format_in_dissolution_data, } return ret @@ -547,9 +643,14 @@ def get_target_filing_type(event_file_type: str) -> tuple[str, str]: def get_business_update_value(key: str, effective_date: str, trigger_date: str, filing_type: str, filing_subtype: str) -> str: if filing_type == 'putBackOn': value = None + elif filing_type == 'putBackOff': + if key == 'restoration_expiry_date': + value = None + else: + value = effective_date elif filing_type == 'restoration': - if key == 'restoration_expiry_date' and\ - filing_subtype in ['limitedRestoration', 'limitedRestorationExtension']: + if key == 'restoration_expiry_date' and \ + filing_subtype in ['limitedRestoration', 'limitedRestorationExtension']: value = trigger_date else: value = None @@ -562,7 +663,8 @@ def get_business_update_value(key: str, effective_date: str, trigger_date: str, def build_filing_json_meta_data(raw_filing_type: str, filing_type: str, filing_subtype: str, effective_date: str, data: dict) -> tuple[dict, dict]: filing_json = copy.deepcopy(FILING_JSON) filing_json['filing'][raw_filing_type] = {} - if raw_filing_type != filing_type: + # if conversion has conv filing type, set filing_json + if raw_filing_type != filing_type and filing_type: filing_json['filing'][filing_type] = {} meta_data = { @@ -648,6 +750,23 @@ def build_filing_json_meta_data(raw_filing_type: str, filing_type: str, filing_s 'fromLegalName': old_corp_name, 'toLegalName': new_corp_name, } + elif filing_type == 'putBackOff': + if (event_file_type := data['event_file_type']) == 'SYSDL_NULL': + filing_json['filing']['putBackOff'] = { + 'details': 'Put back off filing due to expired limited restoration.' + } + meta_data['putBackOff'] = { + 'reason': 'Limited Restoration Expired', + 'expiryDate': effective_date[:10] + } + + if withdrawn_ts_str := data['f_withdrawn_event_ts_str']: + withdrawn_ts = datetime.strptime(withdrawn_ts_str, '%Y-%m-%d %H:%M:%S%z') + meta_data = { + **meta_data, + 'withdrawnDate': withdrawn_ts.isoformat() + } + # TODO: populate meta_data for correction to display correct filing name return filing_json, meta_data @@ -663,12 +782,12 @@ def get_colin_display_name(data: dict) -> str: ar_dt = datetime.strptime(ar_dt_str, '%Y-%m-%d %H:%M:%S%z') suffix = ar_dt.strftime('%b %d, %Y').upper() name = f'{name} - {suffix}' - + # Change of Directors elif event_file_type == EventFilings.FILE_NOCDR.value: if not data['f_change_at_str']: name = f'{name} - Address Change or Name Correction Only' - + # Conversion Ledger elif event_file_type == EventFilings.FILE_CONVL.value: name = data['cl_ledger_title_txt'] @@ -691,7 +810,12 @@ def build_epoch_filing(business_id: int) -> dict: return filing -def load_data(conn: Connection, table_name: str, data: dict, conflict_column: str=None) -> int: +def load_data(conn: Connection, + table_name: str, + data: dict, + conflict_column: str = None, + conflict_error = False, + expecting_id: bool = True) -> Optional[int]: columns = ', '.join(data.keys()) values = ', '.join([format_value(v) for v in data.values()]) @@ -700,14 +824,22 @@ def load_data(conn: Connection, table_name: str, data: dict, conflict_column: st check_query = f"select id from {table_name} where {conflict_column} = {conflict_value}" check_result = conn.execute(text(check_query)).scalar() if check_result: - return check_result + if not conflict_error: + return check_result + else: + raise Exception('Trying to reload corp existing in db, run delete script first') - query = f"""insert into {table_name} ({columns}) values ({values}) returning id""" + query = f"""insert into {table_name} ({columns}) values ({values})""" + if expecting_id: + query = query + ' returning id' result = conn.execute(text(query)) - id = result.scalar() - return id + if expecting_id: + id = result.scalar() + return id + + return None def update_data(conn: Connection, table_name: str, data: dict, column: str, value: any) -> int: @@ -727,7 +859,8 @@ def format_value(value) -> str: elif isinstance(value, (int, float)): return str(value) elif isinstance(value, dict): - return f"'{json.dumps(value)}'" + value = json.dumps(value).replace("'", "''") + return f"'{value}'" else: # Note: handle single quote issue value = str(value).replace("'", "''") diff --git a/data-tool/scripts/README_COLIN_Corps_Extract.md b/data-tool/scripts/README_COLIN_Corps_Extract.md index 471284278a..18c17e4753 100644 --- a/data-tool/scripts/README_COLIN_Corps_Extract.md +++ b/data-tool/scripts/README_COLIN_Corps_Extract.md @@ -7,7 +7,7 @@ ``` # create empty db for the first time createdb -h localhost -p 5432 -U postgres -T template0 colin-mig-corps-data-test && \ -psql -h localhost -p 5432 -U postgres -d colin-mig-corps-test -f /data-tool/scripts/colin_corps_extract_postgres_ddl +psql -h localhost -p 5432 -U postgres -d colin-mig-corps-data-test -f /data-tool/scripts/colin_corps_extract_postgres_ddl # kill connection & recreate empty db psql -h localhost -p 5432 -U postgres -d colin-mig-corps-data-test -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE datname = 'colin-mig-corps-data-test' AND pid <> pg_backend_pid();" && \ @@ -25,7 +25,7 @@ connection cprd -d Oracle -u -p -h -P

:/ "port=5432" -connection cprd_pg -d PostgreSql -u postgres -p -h localhost -P -D colin-mig-corps-test +connection cprd_pg -d PostgreSql -u postgres -p -h localhost -P -D colin-mig-corps-data-test ``` 7. Transfer data `dbschemacli /data-tool/scripts/transfer_cprd_corps.sql` 8. Successful output will look something like following: diff --git a/data-tool/scripts/colin_corps_extract_postgres_ddl b/data-tool/scripts/colin_corps_extract_postgres_ddl index f307daac9e..5a8d5ee8af 100644 --- a/data-tool/scripts/colin_corps_extract_postgres_ddl +++ b/data-tool/scripts/colin_corps_extract_postgres_ddl @@ -6,10 +6,6 @@ create sequence corp_processing_id_seq; alter sequence corp_processing_id_seq owner to postgres; -create sequence synonym_id_seq; - -alter sequence synonym_id_seq owner to postgres; - create table if not exists address ( addr_id numeric(10) @@ -55,7 +51,8 @@ create table if not exists corporation admin_email varchar(254), accession_num varchar(10), send_ar_ind boolean, - last_ar_filed_dt timestamp with time zone + last_ar_filed_dt timestamp with time zone, + last_ar_reminder_year numeric(4) ); alter table corporation @@ -775,3 +772,115 @@ comment on table corp_involved_cont_in is 'new table\n\n"Optionally, a ""Continu alter table corp_involved_cont_in owner to postgres; + +CREATE INDEX if not exists ix_conv_event_event_id ON conv_event (event_id); + +CREATE INDEX if not exists ix_conv_ledger_event_id ON conv_ledger (event_id); + +CREATE INDEX if not exists ix_corp_comments_corp_num ON corp_comments (corp_num); + +CREATE INDEX if not exists ix_corp_comments_first_nme ON corp_comments (first_nme); + +CREATE INDEX if not exists ix_corp_comments_last_nme ON corp_comments (last_nme); + +CREATE INDEX if not exists ix_corp_comments_middle_nme ON corp_comments (middle_nme); + +CREATE INDEX if not exists ix_corp_involved_amalgamating_event_id ON corp_involved_amalgamating (event_id); + +CREATE INDEX if not exists ix_corp_involved_amalgamating_ted_corp_num ON corp_involved_amalgamating (ted_corp_num); + +CREATE INDEX if not exists ix_corp_involved_amalgamating_ting_corp_num ON corp_involved_amalgamating (ting_corp_num); + +CREATE INDEX if not exists ix_corp_name_corp_num ON corp_name (corp_num); + +CREATE INDEX if not exists ix_corp_name_start_event_id ON corp_name (start_event_id); + +CREATE INDEX if not exists ix_corp_name_end_event_id ON corp_name (end_event_id); + +CREATE INDEX if not exists ix_corp_name_corp_name_typ_cd ON corp_name (corp_name_typ_cd); + +CREATE INDEX if not exists ix_corp_party_mailing_addr_id ON corp_party (mailing_addr_id); + +CREATE INDEX if not exists ix_corp_party_delivery_addr_id ON corp_party (delivery_addr_id); + +CREATE INDEX if not exists ix_corp_party_corp_num ON corp_party (corp_num); + +CREATE INDEX if not exists ix_corp_party_start_event_id ON corp_party (start_event_id); + +CREATE INDEX if not exists ix_corp_party_end_event_id ON corp_party (end_event_id); + +CREATE INDEX if not exists ix_corp_party_appointment_dt ON corp_party (appointment_dt); + +CREATE INDEX if not exists ix_corp_processing_flow_run_id ON corp_processing (flow_run_id); + +CREATE INDEX if not exists ix_corp_processing_claimed_at ON corp_processing (claimed_at); + +CREATE INDEX if not exists ix_corp_state_corp_num ON corp_state (corp_num); + +CREATE INDEX if not exists ix_corp_state_start_event_id ON corp_state (start_event_id); + +CREATE INDEX if not exists ix_corp_state_end_event_id ON corp_state (end_event_id); + +CREATE INDEX if not exists ix_corp_state_state_type_cd ON corp_state (state_type_cd); + +CREATE INDEX if not exists ix_corporation_recognition_dts ON corporation (recognition_dts); + +CREATE INDEX if not exists ix_corporation_bn_9 ON corporation (bn_9); + +CREATE INDEX if not exists ix_corporation_bn_15 ON corporation (bn_15); + +CREATE INDEX if not exists ix_corporation_last_ar_filed_dt ON corporation (last_ar_filed_dt); + +CREATE INDEX if not exists ix_corporation_corp_frozen_type_cd ON corporation (corp_frozen_type_cd); + +CREATE INDEX if not exists ix_filing_withdrawn_event_id ON filing (withdrawn_event_id); + +CREATE INDEX if not exists ix_filing_user_event_id ON filing_user (event_id); + +CREATE INDEX if not exists ix_filing_user_last_name ON filing_user (last_name); + +CREATE INDEX if not exists ix_filing_user_middle_name ON filing_user (middle_name); + +CREATE INDEX if not exists ix_filing_user_first_name ON filing_user (first_name); + +CREATE INDEX if not exists ix_filing_user_user_id ON filing_user (user_id); + +CREATE INDEX if not exists ix_filing_user_role_typ_cd ON filing_user (role_typ_cd); + +CREATE INDEX if not exists ix_jurisdiction_corp_num ON jurisdiction (corp_num); + +CREATE INDEX if not exists ix_jurisdiction_start_event_id ON jurisdiction (start_event_id); + +CREATE INDEX if not exists ix_ledger_text_event_id ON ledger_text (event_id); + +CREATE INDEX if not exists ix_office_corp_num ON office (corp_num); + +CREATE INDEX if not exists ix_office_office_typ_cd ON office (office_typ_cd); + +CREATE INDEX if not exists ix_office_start_event_id ON office (start_event_id); + +CREATE INDEX if not exists ix_office_end_event_id ON office (end_event_id); + +CREATE INDEX if not exists ix_office_mailing_addr_id ON office (mailing_addr_id); + +CREATE INDEX if not exists ix_office_delivery_addr_id ON office (delivery_addr_id); + +CREATE INDEX if not exists ix_resolution_corp_num ON resolution (corp_num); + +CREATE INDEX if not exists ix_resolution_start_event_id ON resolution (start_event_id); + +CREATE INDEX if not exists ix_resolution_end_event_id ON resolution (end_event_id); + +CREATE INDEX if not exists ix_share_series_corp_num ON share_series (corp_num); + +CREATE INDEX if not exists ix_share_series ON share_series (share_class_id); + +CREATE INDEX if not exists ix_share_series_start_event_id ON share_series (start_event_id); + +CREATE INDEX if not exists ix_share_struct_end_event_id ON share_struct (end_event_id); + +CREATE INDEX if not exists ix_share_struct_cls_corp_num ON share_struct_cls (corp_num); + +CREATE INDEX if not exists ix_share_struct_cls_start_event_id ON share_struct_cls (start_event_id); + +CREATE INDEX if not exists ix_share_struct_cls_share_class_id ON share_struct_cls (share_class_id); diff --git a/data-tool/scripts/transfer_cprd_corps.sql b/data-tool/scripts/transfer_cprd_corps.sql index 0aeb1e271b..96477af66b 100644 --- a/data-tool/scripts/transfer_cprd_corps.sql +++ b/data-tool/scripts/transfer_cprd_corps.sql @@ -77,7 +77,14 @@ select case when 'N' then 0 when 'Y' then 1 else 1 - end SEND_AR_IND + end SEND_AR_IND, + (select + to_number(to_char(max(date_1), 'YYYY')) + from eml_log e, rep_data r + where + e.corp_num=c.corp_num + and e.param_id=r.param_id + and e.corp_num=r.t20_1) as LAST_AR_REMINDER_YEAR from corporation c where corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') -- and c.corp_num in ('1396310', '1396309', '1396308', '1396307', '1396306', '1396890', '1396889', '1396885', '1396883', '1396878','1396597', '1396143', '1395925', '1395116', '1394990', '1246445', '1216743', '1396508', '1396505', '1396488', '1396401', '1396387', '1396957', '1355943', '1340611', '1335427', '1327193', '1393945', '1208648', '1117024', '1120292', '1127373', '1135492') diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index c31801d6b8..3786ed6d88 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -330,6 +330,7 @@ def _set_business_state_changes(self, business: dict): # TODO: add conv liquidation etc. in the future work for filing in Filing.get_conversion_filings_by_conv_types(self._business.id, ['dissolution', 'continuationOut', + 'putBackOn', 'restoration']): state_filings.append(self._format_state_filing(filing)) diff --git a/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py b/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py index a573c303a2..219367246c 100644 --- a/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py +++ b/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py @@ -52,6 +52,11 @@ def check_business(business: Business) -> list: exclude_in_dissolution=False, exclude_future_effective_filing=True ) ) + + # dis_details is None when the account is not included in FF filter + if not dis_details: + return result + if dis_details.transition_overdue: result.append(transition_warning) elif dis_details.ar_overdue: From 14b74a5a608cefa7eff053b2ab3609ae09031989 Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Tue, 25 Feb 2025 11:30:08 -0800 Subject: [PATCH 070/133] 26151 - Rebase release branch and merge into main (#3251) * 25473 - Add Short Reason for Limited Restoration Expiry (#3222) * 25473-Add-reason-and-expiry-date-for-put-back-off * 25473-Remove-unused-import-from-test * 25473 - Changes from comments * 25473 - Fix missing initialization * 25897 fix document download when filing is in PAID status (#3223) * 23979 - Update Amalgamation Output Wording (#3226) * update court approval wording Signed-off-by: Qin * remove a not needed line Signed-off-by: Qin --------- Signed-off-by: Qin * 25863 - Enable Allowable actions after a successful NoW filing (#3232) * add WITHDRAWN status to incomplete check Signed-off-by: Qin * filter out WITHDRAWN status in get_incomplete_filings_by_type Signed-off-by: Qin --------- Signed-off-by: Qin * 25975 - Fix Submitter Name for Embedded NoW Filing (#3235) * 25975 - Fix display name and add unit tests * 25975 - Use filing class redact_submitter function instead * 25843 - Update NoW email template (#3236) * display name for temp businesss Signed-off-by: Qin * update email wording Signed-off-by: Qin * display filing id Signed-off-by: Qin * display filing id for IA, ContIn and Amal Signed-off-by: Qin * update display company name Signed-off-by: Arwen Qin * upate display filing ID Signed-off-by: Arwen Qin * remove worker change Signed-off-by: Arwen Qin * remove not needed change Signed-off-by: Arwen Qin * fix tests Signed-off-by: Arwen Qin * fix tests 2 Signed-off-by: Arwen Qin * fix tests 3 Signed-off-by: Arwen Qin * update company name default value Signed-off-by: Arwen Qin * fix lint Signed-off-by: Arwen Qin * fix lint 2 Signed-off-by: Arwen Qin --------- Signed-off-by: Qin Signed-off-by: Arwen Qin * 25472 - Update Outputs for Expired Limited Restoration (#3234) * 25472-Add-put-back-off-reasoning * 25472-Update-company-status-text * 25472-Update-expiry-date-for-restoration-expiration * 25472- lint fixes * 25472 - Remove helper * 25472 - Remove commented out code * 25472 - Simplify template logic for businessDetails * 25472 - condition fix * 24254 - continuation in email fixes (#3244) * 24254 - continuation in email fixes * 24254 - update to use release branch * 24254 - update to print conditionally * show blank recognition date when not come to pass (#3245) Signed-off-by: Qin * 25473 - Add Short Reason for Limited Restoration Expiry (#3222) * 25473-Add-reason-and-expiry-date-for-put-back-off * 25473-Remove-unused-import-from-test * 25473 - Changes from comments * 25473 - Fix missing initialization * 25897 fix document download when filing is in PAID status (#3223) * 23979 - Update Amalgamation Output Wording (#3226) * update court approval wording Signed-off-by: Qin * remove a not needed line Signed-off-by: Qin --------- Signed-off-by: Qin * 25863 - Enable Allowable actions after a successful NoW filing (#3232) * add WITHDRAWN status to incomplete check Signed-off-by: Qin * filter out WITHDRAWN status in get_incomplete_filings_by_type Signed-off-by: Qin --------- Signed-off-by: Qin * 25975 - Fix Submitter Name for Embedded NoW Filing (#3235) * 25975 - Fix display name and add unit tests * 25975 - Use filing class redact_submitter function instead * 25843 - Update NoW email template (#3236) * display name for temp businesss Signed-off-by: Qin * update email wording Signed-off-by: Qin * display filing id Signed-off-by: Qin * display filing id for IA, ContIn and Amal Signed-off-by: Qin * update display company name Signed-off-by: Arwen Qin * upate display filing ID Signed-off-by: Arwen Qin * remove worker change Signed-off-by: Arwen Qin * remove not needed change Signed-off-by: Arwen Qin * fix tests Signed-off-by: Arwen Qin * fix tests 2 Signed-off-by: Arwen Qin * fix tests 3 Signed-off-by: Arwen Qin * update company name default value Signed-off-by: Arwen Qin * fix lint Signed-off-by: Arwen Qin * fix lint 2 Signed-off-by: Arwen Qin --------- Signed-off-by: Qin Signed-off-by: Arwen Qin * 25472 - Update Outputs for Expired Limited Restoration (#3234) * 25472-Add-put-back-off-reasoning * 25472-Update-company-status-text * 25472-Update-expiry-date-for-restoration-expiration * 25472- lint fixes * 25472 - Remove helper * 25472 - Remove commented out code * 25472 - Simplify template logic for businessDetails * 25472 - condition fix * 24254 - continuation in email fixes (#3244) * 24254 - continuation in email fixes * 24254 - update to use release branch * 24254 - update to print conditionally * show blank recognition date when not come to pass (#3245) Signed-off-by: Qin * 26151 - revert to pull main * 26151 - fix revert to pull main * 26151 - bump up version for modified components --------- Signed-off-by: Qin Signed-off-by: Arwen Qin Co-authored-by: meawong Co-authored-by: Vysakh Menon Co-authored-by: Arwen Qin <122495122+ArwenQin@users.noreply.github.com> --- .../amalgamation/amalgamationStmt.html | 11 ++++-- .../business-summary/stateTransition.html | 3 +- .../common/businessDetails.html | 38 +++++++++++-------- legal-api/src/legal_api/models/filing.py | 4 +- .../legal_api/reports/business_document.py | 33 ++++++++++++---- legal-api/src/legal_api/reports/report.py | 31 +++++++++------ .../business_filings/business_filings.py | 11 ++++++ .../legal_api/utils/legislation_datetime.py | 7 ++++ legal-api/src/legal_api/version.py | 2 +- .../v2/test_business_filings/test_filings.py | 15 +++++++- .../notice_of_withdrawal_notification.py | 29 +++++++++----- .../email_templates/CONT-IN-APPROVED.html | 2 - .../email_templates/CONT-IN-PAID.html | 4 -- .../email_templates/CONT-IN-REJECTED.html | 4 +- .../email_templates/NOW-COMPLETED.html | 21 +++++----- .../continuation-application-details.html | 2 +- .../email_templates/common/cra-notice.html | 4 +- .../src/entity_emailer/version.py | 2 +- .../test_notice_of_withdrawal_notification.py | 6 +-- .../filing_processors/put_back_off.py | 10 +++++ .../entity-filer/src/entity_filer/version.py | 2 +- .../filing_processors/test_put_back_off.py | 7 ++++ 22 files changed, 166 insertions(+), 82 deletions(-) diff --git a/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html b/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html index ae4a756f8e..21993fc5d2 100644 --- a/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html +++ b/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html @@ -4,8 +4,13 @@ Amalgamation Statement

- This amalgamation has been effected without court approval. A copy of all of the required affidavits under section 277(1) have been obtained and the affidavit + This amalgamation has been effected + {% if amalgamationApplication.courtApproval %} + with + {% else %} + without + {% endif %} + court approval. A copy of all of the required affidavits under section 277(1) have been obtained and the affidavit obtained from each amalgamating company has been deposited in that company's records office.
- - \ No newline at end of file + diff --git a/legal-api/report-templates/template-parts/business-summary/stateTransition.html b/legal-api/report-templates/template-parts/business-summary/stateTransition.html index b7a6c3d3cb..c748ca7015 100644 --- a/legal-api/report-templates/template-parts/business-summary/stateTransition.html +++ b/legal-api/report-templates/template-parts/business-summary/stateTransition.html @@ -35,7 +35,8 @@
Incorporation Number: {{ filing.identifier }}
- {% elif filing.filingType == 'dissolution' and filing.filingSubType == 'involuntary' %} + {% elif (filing.filingType == 'dissolution' and filing.filingSubType == 'involuntary') or + filing.filingType == 'putBackOff' %} Effective Date: {{ filing.effectiveDateTime }} {% else %} diff --git a/legal-api/report-templates/template-parts/common/businessDetails.html b/legal-api/report-templates/template-parts/common/businessDetails.html index 1fc3e13b63..7db81a2f82 100644 --- a/legal-api/report-templates/template-parts/common/businessDetails.html +++ b/legal-api/report-templates/template-parts/common/businessDetails.html @@ -42,16 +42,24 @@
{{report_date_time}}
- {{business.state}} - {% if business.state in ('HISTORICAL', 'LIQUIDATION') and stateFilings %} - - - {% if business.legalType in ['GP', 'SP'] and business.state == 'HISTORICAL' %} - Dissolved - {% else %} - {{stateFilings[0].filingName}} - {% endif %} - - Effective {{stateFilings[0].effectiveDateTime}} - {% endif %} + {{business.state}} + {% if business.state in ('HISTORICAL', 'LIQUIDATION') and stateFilings %} + - + {% if business.state == 'HISTORICAL' %} + {% if business.legalType in ['GP', 'SP'] %} + Dissolved + {% else %} + {% set filing = stateFilings[0] %} + {% if filing.filingType == 'putBackOff' %} + {{filing.reason}} on {{filing.expiryDate}} + {% else %} + {{filing.filingName}} - Effective {{filing.effectiveDateTime}} + {% endif %} + {% endif %} + {% endif %} + {% endif %} +
+ {% elif header.reportType == 'amalgamationApplication' %} @@ -70,8 +78,8 @@
{{business.identifier}}
{% endif %}
{{effective_date_time}}
- {% if header.isFutureEffective %} -
{{effective_date_time}}
+ {% if not business or business.identifier.startswith('T') %} +
 
{% elif header.status == 'COMPLETED' %}
{{recognition_date_time}}
{% endif %} @@ -132,8 +140,8 @@
{{business.identifier}}
{% endif %}
{{filing_date_time}}
- {% if header.isFutureEffective %} -
{{effective_date_time}}
+ {% if not business or business.identifier.startswith('T') %} +
 
{% elif header.status == 'COMPLETED' %}
{{recognition_date_time}}
{% endif %} @@ -342,7 +350,7 @@ {% if not business or business.identifier.startswith('T') %}
{{ filing_date_time }}
-
{{ effective_date_time }}
+
 
{% else %}
{{business.identifier}}
{{ filing_date_time }}
diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index fed9ffea6d..912bc0d717 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -980,7 +980,7 @@ def get_incomplete_filings_by_type(business_id: int, filing_type: str): filings = db.session.query(Filing). \ filter(Filing.business_id == business_id). \ filter(Filing._filing_type == filing_type). \ - filter(Filing._status != Filing.Status.COMPLETED.value). \ + filter(not_(Filing._status.in_([Filing.Status.COMPLETED.value, Filing.Status.WITHDRAWN.value]))). \ order_by(desc(Filing.filing_date)). \ all() return filings @@ -1024,7 +1024,7 @@ def get_incomplete_filings_by_types(business_id: int, filing_types: list, exclud filings = db.session.query(Filing). \ filter(Filing.business_id == business_id). \ filter(Filing._filing_type.in_(filing_types)). \ - filter(Filing._status != Filing.Status.COMPLETED.value). \ + filter(not_(Filing._status.in_([Filing.Status.COMPLETED.value, Filing.Status.WITHDRAWN.value]))). \ filter(not_(Filing._status.in_(excluded_statuses))). \ order_by(desc(Filing.effective_date)). \ all() diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index 3786ed6d88..328b5d8bb7 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -323,7 +323,7 @@ def _set_business_state_changes(self, business: dict): 'dissolved', 'restoration', 'voluntaryDissolution', 'Involuntary Dissolution', - 'voluntaryLiquidation', 'putBackOn', + 'voluntaryLiquidation', 'putBackOn', 'putBackOff', 'continuationOut']): state_filings.append(self._format_state_filing(filing)) @@ -446,7 +446,8 @@ def _format_state_filing(self, filing: Filing) -> dict: filing_info['filingName'] = BusinessDocument.\ _get_summary_display_name(filing_type, filing_meta['dissolution']['dissolutionType'], - self._business.legal_type) + self._business.legal_type, + None) if self._business.legal_type in ['SP', 'GP'] and filing_meta['dissolution']['dissolutionType'] == \ 'voluntary': filing_info['dissolution_date_str'] = LegislationDatetime.as_legislation_timezone_from_date_str( @@ -455,14 +456,16 @@ def _format_state_filing(self, filing: Filing) -> dict: filing_info['filingName'] = BusinessDocument.\ _get_summary_display_name(filing_type, filing_sub_type, - self._business.legal_type) + self._business.legal_type, + None) if filing_sub_type in ['limitedRestoration', 'limitedRestorationExtension']: expiry_date = filing_meta['restoration']['expiry'] expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date) expiry_date = expiry_date.replace(minute=1) - filing_info['limitedRestorationExpiryDate'] = LegislationDatetime.format_as_report_string(expiry_date) + filing_info['limitedRestorationExpiryDate'] = LegislationDatetime.\ + format_as_report_expiry_string_1159(expiry_date) elif filing_type == 'continuationOut': - filing_info['filingName'] = BusinessDocument._get_summary_display_name(filing_type, None, None) + filing_info['filingName'] = BusinessDocument._get_summary_display_name(filing_type, None, None, None) country_code = filing_meta['continuationOut']['country'] region_code = filing_meta['continuationOut']['region'] @@ -476,9 +479,18 @@ def _format_state_filing(self, filing: Filing) -> dict: continuation_out_date = LegislationDatetime.as_legislation_timezone_from_date_str( filing_meta['continuationOut']['continuationOutDate']) filing_info['continuationOutDate'] = continuation_out_date.strftime(OUTPUT_DATE_FORMAT) + elif filing_type == 'putBackOff': + put_back_off = filing_meta.get('putBackOff') + reason = put_back_off.get('reason') + expiry_date_str = put_back_off.get('expiryDate') + filing_info['filingName'] = BusinessDocument.\ + _get_summary_display_name(filing_type, None, None, reason) + filing_info['reason'] = reason + expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date_str) + filing_info['expiryDate'] = expiry_date.strftime('%B %d, %Y') else: filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing_type, None, None) + _get_summary_display_name(filing_type, None, None, None) return filing_info def _set_amalgamation_details(self, business: dict): @@ -616,7 +628,9 @@ def _get_environment(): @staticmethod def _get_summary_display_name(filing_type: str, filing_sub_type: Optional[str], - legal_type: Optional[str]) -> str: + legal_type: Optional[str], + reason: Optional[str] + ) -> str: if filing_type == 'dissolution': if filing_sub_type == 'voluntary': if legal_type in ['SP', 'GP']: @@ -627,6 +641,8 @@ def _get_summary_display_name(filing_type: str, return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type][filing_sub_type] elif filing_type == 'restoration': return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type][filing_sub_type] + elif filing_type == 'putBackOff': + return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type][reason] else: return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type] @@ -659,6 +675,9 @@ def _get_legal_type_description(legal_type: str) -> str: 'Involuntary Dissolution': 'Involuntary Dissolution', 'voluntaryLiquidation': 'Voluntary Liquidation', 'putBackOn': 'Correction - Put Back On', + 'putBackOff': { + 'Limited Restoration Expired': 'Dissolved due to expired Limited Restoration' + }, 'continuationOut': 'Continuation Out' } diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index 091ae2c763..8e86695367 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -594,8 +594,10 @@ def _format_restoration_data(self, filing): filing['nameRequest'] = filing['restoration'].get('nameRequest') filing['parties'] = filing['restoration'].get('parties') filing['offices'] = filing['restoration']['offices'] - meta_data = self._filing.meta_data or {} - filing['fromLegalName'] = meta_data.get('restoration', {}).get('fromLegalName') + if self._filing.meta_data: # available when filing is COMPLETED + filing['fromLegalName'] = self._filing.meta_data.get('restoration', {}).get('fromLegalName') + else: + filing['fromLegalName'] = self._business.legal_name if relationships := filing['restoration'].get('relationships'): filing['relationshipsDesc'] = ', '.join(relationships) @@ -608,14 +610,16 @@ def _format_restoration_data(self, filing): filing['applicationDate'] = filing['restoration'].get('applicationDate', 'Not Applicable') filing['noticeDate'] = filing['restoration'].get('noticeDate', 'Not Applicable') - business_dissolution = VersionedBusinessDetailsService.find_last_value_from_business_revision( - self._filing.transaction_id, self._business.id, is_dissolution_date=True) - filing['dissolutionLegalName'] = business_dissolution.legal_name + if self._filing.transaction_id: # available when filing is COMPLETED + business_dissolution = VersionedBusinessDetailsService.find_last_value_from_business_revision( + self._filing.transaction_id, self._business.id, is_dissolution_date=True) + filing['dissolutionLegalName'] = business_dissolution.legal_name + else: + filing['dissolutionLegalName'] = self._business.legal_name - if expiry_date := meta_data.get('restoration', {}).get('expiry'): + if expiry_date := filing['restoration'].get('expiry'): expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date) - expiry_date = expiry_date.replace(minute=1) - filing['restoration_expiry_date'] = LegislationDatetime.format_as_report_string(expiry_date) + filing['restoration_expiry_date'] = LegislationDatetime.format_as_report_expiry_string_1159(expiry_date) def _format_consent_continuation_out_data(self, filing): cco = ConsentContinuationOut.get_by_filing_id(self._filing.id) @@ -685,9 +689,14 @@ def _format_alteration_data(self, filing): # Get current list of translations in alteration. None if it is deletion if 'nameTranslations' in filing['alteration']: filing['listOfTranslations'] = filing['alteration'].get('nameTranslations', []) - # Get previous translations for deleted translations. No record created in aliases version for deletions - filing['previousNameTranslations'] = VersionedBusinessDetailsService.get_name_translations_before_revision( - self._filing.transaction_id, self._business.id) + if self._filing.transaction_id: + # Get previous translations for deleted translations. No record created in version for deletions + filing['previousNameTranslations'] = ( + VersionedBusinessDetailsService.get_name_translations_before_revision( + self._filing.transaction_id, + self._business.id)) + else: + filing['previousNameTranslations'] = [alias.json for alias in self._business.aliases.all()] if filing['alteration'].get('shareStructure', None): filing['shareClasses'] = filing['alteration']['shareStructure'].get('shareClasses', []) dates = filing['alteration']['shareStructure'].get('resolutionDates', []) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index c2213e978f..dca4e93130 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -37,6 +37,7 @@ import legal_api.reports from legal_api.constants import BOB_DATE from legal_api.core import Filing as CoreFiling +from legal_api.core.constants import REDACTED_STAFF_SUBMITTER from legal_api.exceptions import BusinessException from legal_api.models import ( Address, @@ -310,6 +311,16 @@ def get_single_filing(identifier: str, filing_id: int): if (rv.status == Filing.Status.WITHDRAWN.value or rv.storage.withdrawal_pending) and identifier.startswith('T'): now_filing = ListFilingResource.get_notice_of_withdrawal(filing_json['filing']['header']['filingId']) filing_json['filing']['noticeOfWithdrawal'] = now_filing.json + + submitter = now_filing.filing_submitter + if submitter and submitter.username and jwt: + if rv.redact_submitter(now_filing.submitter_roles, jwt): + submitter_displayname = REDACTED_STAFF_SUBMITTER + else: + submitter_displayname = submitter.display_name or submitter.username + + filing_json['filing']['noticeOfWithdrawal']['filing']['header']['submitter'] = submitter_displayname + elif (rv.status in [Filing.Status.CHANGE_REQUESTED.value, Filing.Status.APPROVED.value, Filing.Status.REJECTED.value] and diff --git a/legal-api/src/legal_api/utils/legislation_datetime.py b/legal-api/src/legal_api/utils/legislation_datetime.py index 715e54489e..e283fc883c 100644 --- a/legal-api/src/legal_api/utils/legislation_datetime.py +++ b/legal-api/src/legal_api/utils/legislation_datetime.py @@ -126,6 +126,13 @@ def format_as_report_expiry_string(date_time: datetime) -> str: date_time_str = LegislationDatetime.format_as_report_string_with_custom_time(date_time, 0, 1, 0, 0) return date_time_str + @staticmethod + def format_as_report_expiry_string_1159(date_time: datetime) -> str: + """Return a datetime string in this format (eg: `August 5, 2021 at 11:59 pm Pacific time`).""" + # ensure is set to correct timezone + date_time_str = LegislationDatetime.format_as_report_string_with_custom_time(date_time, 23, 59, 0, 0) + return date_time_str + @staticmethod def format_as_legislation_date(date_time: datetime) -> str: """Return the date in legislation timezone as a string.""" diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index cba46c8494..60f74b6dac 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.142.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index 4be4566a3d..4cc19f2072 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -116,8 +116,16 @@ def test_get_temp_business_filing(session, client, jwt, legal_type, filing_type, assert rv.json['filing']['header']['name'] == filing_type assert rv.json['filing'][filing_type] == filing_json -def test_get_withdrawn_temp_business_filing(session, client, jwt): +@pytest.mark.parametrize( + 'jwt_role, expected', + [ + (UserRoles.staff, 'staff-person'), + (UserRoles.public_user, 'Registry Staff'), + ] +) +def test_get_withdrawn_temp_business_filing(session, client, jwt, jwt_role, expected): """Assert that a withdrawn FE temp business returns the filing with the NoW embedded once available.""" + user = factory_user('idir/staff-person') # set-up withdrawn boostrap FE filing today = datetime.utcnow().date() @@ -156,6 +164,8 @@ def test_get_withdrawn_temp_business_filing(session, client, jwt): del now_json_data['filing']['header']['filingId'] now_filing = factory_filing(None, now_json_data) now_filing.withdrawn_filing_id = withdrawn_filing_id + now_filing.submitter_id = user.id + now_filing.submitter_roles = UserRoles.staff now_filing.save() new_business_filing.withdrawal_pending = True new_business_filing.save() @@ -174,11 +184,12 @@ def test_get_withdrawn_temp_business_filing(session, client, jwt): # fetch filings after the bootstrap filing has been withdrawn rv = client.get(f'/api/v2/businesses/{identifier}/filings', - headers=create_header(jwt, [STAFF_ROLE], identifier)) + headers=create_header(jwt, [jwt_role], identifier)) # validate that the NoW is still embedded in the withdrawn filing assert 'noticeOfWithdrawal' in rv.json['filing'] assert rv.json['filing']['noticeOfWithdrawal'] is not None + assert rv.json['filing']['noticeOfWithdrawal']['filing']['header']['submitter'] == expected def test_get_filing_not_found(session, client, jwt): """Assert that the request fails if the filing ID doesn't match an existing filing.""" diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py index c36f6dac76..7b7b64e59c 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py @@ -40,12 +40,15 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many- # get template variables from filing filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info['filingId']) - - # display company name only for existing businesses - if business.get('identifier').startswith('T'): - company_name = None - else: - company_name = business.get('legalName') + legal_type = business.get('legalType') + + # display company name for existing businesses and temp businesses + company_name = ( + business.get('legalName') + or Business.BUSINESSES.get(legal_type, {}).get('numberedDescription') + # fall back default value + or 'Unknown Company' + ) # record to be withdrawn --> withdrawn filing display name withdrawn_filing = Filing.find_by_id(filing.withdrawn_filing_id) withdrawn_filing_display_name = FilingMeta.get_display_name( @@ -61,12 +64,20 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many- jnja_template = Template(filled_template, autoescape=True) filing_data = (filing.json)['filing'][f'{filing_type}'] filing_name = filing.filing_type[0].upper() + ' '.join(re.findall('[a-zA-Z][^A-Z]*', filing.filing_type[1:])) + + # default to None + filing_id = None + # show filing ID in email template when the withdrawn record is an IA, Amalg. or a ContIn + if business.get('identifier').startswith('T'): + filing_id = filing_data['filingId'] + html_out = jnja_template.render( business=business, filing=filing_data, header=(filing.json)['filing']['header'], company_name=company_name, filing_date_time=leg_tmz_filing_date, + filing_id=filing_id, effective_date_time=leg_tmz_effective_date, withdrawnFilingType=withdrawn_filing_display_name, entity_dashboard_url=current_app.config.get('DASHBOARD_URL') + @@ -86,11 +97,9 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many- # assign subject subject = 'Notice of Withdrawal filed Successfully' - - legal_name = business.get('legalName', None) + legal_name = company_name legal_name = 'Numbered Company' if legal_name.startswith(identifier) else legal_name - if not identifier.startswith('T'): - subject = f'{legal_name} - {subject}' if legal_name else subject + subject = f'{legal_name} - {subject}' return { 'recipients': recipients, diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html index dc6baa2a99..a185765ded 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html @@ -42,8 +42,6 @@ [[continuation-application-details.html]] - [[20px.html]] - [[divider.html]] [[20px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html index a15e839c93..0b13f0bc0e 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html @@ -20,15 +20,11 @@

We have received your Continuation Application

- [[20px.html]] - [[divider.html]] [[20px.html]] [[business-information.html]] - [[20px.html]] - [[divider.html]] [[20px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html index 87ab51a2fd..29daf600f0 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html @@ -30,7 +30,7 @@

Your Next Steps

    -
  1. Review the reasons for rejected as outlined below:
  2. +
  3. Review the reasons your authorization was rejected below:
  4. {{ latest_review_comment }}
  5. Visit My Business Registry to submit a new Continuation Application.
  6. @@ -44,8 +44,6 @@ [[continuation-application-details.html]] - [[20px.html]] - [[divider.html]] [[20px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html index 2ad556acfb..422a5fb811 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html @@ -25,21 +25,20 @@ [[20px.html]] [[divider.html]] [[20px.html]] - {% if company_name %} -

    Company Name:

    -

    {{ company_name }}

    - [[16px.html]] - {% endif %} -

    Date and Time of Filing:

    -

    {{ filing_date_time}}

    +

    Company Name:

    +

    {{ company_name }}

    [[16px.html]] -

    Effective Date and Time:

    + + +

    Date and Time of Withdrawal:

    {{ effective_date_time}}

    [[16px.html]] -

    Record to be Withdrawn:

    -

    {{ withdrawnFilingType }}

    +

    Withdrawn Record:

    +

    + {{ withdrawnFilingType }} {% if filing_id %} (Filing #{{ filing_id }}){% endif %} +

    [[20px.html]] [[divider.html]] [[20px.html]] @@ -60,4 +59,4 @@ - \ No newline at end of file + diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html index 5cddc6c689..f63ce03b85 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html @@ -6,7 +6,7 @@ [[16px.html]]
    -
    Identifying Number in Foreign Jurisdiction:
    +
    Identifying Number in Previous Jurisdiction:
    {{ filing.foreignJurisdiction.identifier }}
    [[16px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html index 1a01771548..f8ec597f7b 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html @@ -7,8 +7,8 @@ {% else %} As part of a provincial–federal partnership, BC Registries and Online Services and Canada Revenue Agency (CRA) have developed an agreement to assign a Business - Number to all companies operating in BC. As a result of this incorporation, a - Business number will be assigned to the company and will be emailed to the + Number to all companies operating in BC. As a result of this {% if filing_type == 'continuationIn' %}continuation, + {% else %}incorporation,{% endif %} a Business number will be assigned to the company and will be emailed to the company’s registered email address. {% endif %}

    diff --git a/queue_services/entity-emailer/src/entity_emailer/version.py b/queue_services/entity-emailer/src/entity_emailer/version.py index 984cf87ae3..ae7b37c91a 100644 --- a/queue_services/entity-emailer/src/entity_emailer/version.py +++ b/queue_services/entity-emailer/src/entity_emailer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.142.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py index 1aefc71915..0a9727762e 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py @@ -67,11 +67,7 @@ def test_notice_of_withdrawal_notification( {'filingId': now_filing.id, 'type': 'noticeOfWithdrawal', 'option': status}, token ) - if is_temp: - assert email['content']['subject'] == 'Notice of Withdrawal filed Successfully' - else: - assert email['content']['subject'] == f'{legal_name} - Notice of Withdrawal filed Successfully' - + assert email['content']['subject'] == f'{legal_name} - Notice of Withdrawal filed Successfully' assert 'recipient@email.com' in email['recipients'] assert email['content']['body'] assert email['content']['attachments'] == [] diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py index 76db9f809c..e81528024a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py @@ -19,6 +19,7 @@ import dpath from entity_queue_common.service_utils import QueueException, logger from legal_api.models import Business, Filing +from legal_api.utils.legislation_datetime import LegislationDatetime from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import filings @@ -32,6 +33,8 @@ def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: F logger.debug('processing putBackOff: %s', filing) + filing_meta.put_back_off = {} + # update court order, if any is present with suppress(IndexError, KeyError, TypeError): court_order_json = dpath.util.get(put_back_off_filing, '/courtOrder') @@ -39,6 +42,13 @@ def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: F filing_rec.order_details = put_back_off_filing.get('details') + if business.restoration_expiry_date: + filing_meta.put_back_off = { + **filing_meta.put_back_off, + 'reason': 'Limited Restoration Expired', + 'expiryDate': LegislationDatetime.format_as_legislation_date(business.restoration_expiry_date) + } + # change business state to historical business.state = Business.State.HISTORICAL business.state_filing_id = filing_rec.id diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index 984cf87ae3..ae7b37c91a 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.142.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py index a6a7c6066d..72d98e3933 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py @@ -16,6 +16,8 @@ import random from legal_api.models import Business, Filing +from legal_api.utils.datetime import datetime +from legal_api.utils.legislation_datetime import LegislationDatetime from registry_schemas.example_data import FILING_HEADER, PUT_BACK_OFF from entity_filer.filing_meta import FilingMeta @@ -28,6 +30,8 @@ def test_worker_put_back_off(session): # Setup identifier = 'BC1234567' business = create_business(identifier, legal_type='BC') + expiry = datetime.utcnow() + business.restoration_expiry_date = expiry # Create filing filing_json = copy.deepcopy(FILING_HEADER) @@ -50,3 +54,6 @@ def test_worker_put_back_off(session): assert business.state_filing_id == filing.id assert business.restoration_expiry_date is None assert filing.order_details == final_filing.order_details + + assert filing_meta.put_back_off['reason'] == 'Limited Restoration Expired' + assert filing_meta.put_back_off['expiryDate'] == LegislationDatetime.format_as_legislation_date(expiry) From cfc3a3a9c4e173681312ef563a919bdf71b52d15 Mon Sep 17 00:00:00 2001 From: Rajandeep Kaur <144159721+Rajandeep98@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:16:38 -0800 Subject: [PATCH 071/133] 25188 - court order filing added (#3203) * court order filing added * recipt added court order * fixed linting * adding display name and fee code as no fee for sp gp cp cben * linting * fee code updated * unit test, payment integ, document fix --- legal-api/src/legal_api/core/filing.py | 12 ++++++------ legal-api/src/legal_api/core/meta/filing.py | 16 ++++++++++++++-- legal-api/src/legal_api/models/filing.py | 19 ++++++++++++++++++- .../business_filings/business_filings.py | 2 +- .../tests/unit/services/test_authorization.py | 4 ++-- 5 files changed, 41 insertions(+), 12 deletions(-) diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index 1ad8757471..5af982f91b 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -482,7 +482,6 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m """Return a list of documents for a particular filing.""" no_output_filings = [ Filing.FilingTypes.CONVERSION.value, - Filing.FilingTypes.COURTORDER.value, Filing.FilingTypes.PUTBACKOFF.value, Filing.FilingTypes.PUTBACKON.value, Filing.FilingTypes.REGISTRARSNOTATION.value, @@ -518,20 +517,20 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m return documents if filing.storage and filing.storage.filing_type in no_output_filings: - if filing.filing_type == 'courtOrder' and \ - (filing.storage.documents.filter( - Document.type == DocumentType.COURT_ORDER.value).one_or_none()): - documents['documents']['uploadedCourtOrder'] = f'{base_url}{doc_url}/uploadedCourtOrder' - return documents # return a receipt for filings completed in our system if filing.storage and filing.storage.payment_completion_date: + if filing.filing_type == 'courtOrder' and \ + (filing.storage.documents.filter( + Document.type == DocumentType.COURT_ORDER.value).one_or_none()): + documents['documents']['uploadedCourtOrder'] = f'{base_url}{doc_url}/uploadedCourtOrder' documents['documents']['receipt'] = f'{base_url}{doc_url}/receipt' no_legal_filings_in_paid_withdrawn_status = [ Filing.FilingTypes.REGISTRATION.value, Filing.FilingTypes.CONSENTCONTINUATIONOUT.value, + Filing.FilingTypes.COURTORDER.value, Filing.FilingTypes.CONTINUATIONOUT.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, @@ -571,6 +570,7 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m no_legal_filings = [ Filing.FilingTypes.CONSENTCONTINUATIONOUT.value, Filing.FilingTypes.CONTINUATIONOUT.value, + Filing.FilingTypes.COURTORDER.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, Filing.FilingTypes.TRANSPARENCY_REGISTER.value, diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index f7f67b14c3..0ab7bf2f21 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -390,8 +390,20 @@ class FilingTitles(str, Enum): 'name': 'courtOrder', 'title': 'Court Order', 'displayName': 'Court Order', - 'code': 'NOFEE' - }, + 'codes': { + 'SP': 'COURT', + 'GP': 'COURT', + 'CP': 'COURT', + 'BC': 'COURT', + 'BEN': 'COURT', + 'CC': 'COURT', + 'ULC': 'COURT', + 'C': 'COURT', + 'CBEN': 'COURT', + 'CUL': 'COURT', + 'CCC': 'COURT', + } + }, 'dissolution': { 'name': 'dissolution', 'additional': [ diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 912bc0d717..a1f4b769b9 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -293,6 +293,24 @@ class Source(Enum): 'CCC': 'CRCTN', } }, + 'courtOrder': { + 'name': 'courtOrder', + 'title': 'Court Order', + 'displayName': 'Court Order', + 'codes': { + 'SP': 'COURT', + 'GP': 'COURT', + 'CP': 'COURT', + 'BC': 'COURT', + 'BEN': 'COURT', + 'CC': 'COURT', + 'ULC': 'COURT', + 'C': 'COURT', + 'CBEN': 'COURT', + 'CUL': 'COURT', + 'CCC': 'COURT', + } + }, 'dissolution': { 'name': 'dissolution', 'voluntary': { @@ -497,7 +515,6 @@ class Source(Enum): # changing the structure of fee code in courtOrder/registrarsNotation/registrarsOrder # for all the business the fee code remain same as NOFEE (Staff) 'adminFreeze': {'name': 'adminFreeze', 'title': 'Admin Freeze', 'code': 'NOFEE'}, - 'courtOrder': {'name': 'courtOrder', 'title': 'Court Order', 'code': 'NOFEE'}, 'putBackOff': {'name': 'putBackOff', 'title': 'Put Back Off', 'code': 'NOFEE'}, 'putBackOn': {'name': 'putBackOn', 'title': 'Put Back On', 'code': 'NOFEE'}, 'registrarsNotation': {'name': 'registrarsNotation', 'title': 'Registrars Notation', 'code': 'NOFEE'}, diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index dca4e93130..7a7507e2d9 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -735,7 +735,7 @@ def get_filing_types(business: Business, filing_json: dict): # pylint: disable= legal_type, priority_flag, waive_fees_flag)) - elif filing_type in ('adminFreeze', 'courtOrder', 'putBackOff', 'putBackOn', + elif filing_type in ('adminFreeze', 'putBackOff', 'putBackOn', 'registrarsNotation', 'registrarsOrder'): filing_type_code = Filing.FILINGS.get(filing_type, {}).get('code') filing_types.append({ diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 71b9c3e229..13b90d854d 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -177,7 +177,7 @@ class FilingKey(str, Enum): FilingKey.CORRCTN: {'displayName': 'Register Correction Application', 'feeCode': 'CRCTN', 'name': 'correction'}, FilingKey.CORRCTN_FIRMS: {'displayName': 'Register Correction Application', 'feeCode': 'FMCORR', 'name': 'correction'}, - FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'NOFEE', 'name': 'courtOrder'}, + FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'COURT', 'name': 'courtOrder'}, FilingKey.VOL_DISS: {'displayName': 'Voluntary Dissolution', 'feeCode': 'DIS_VOL', 'name': 'dissolution', 'type': 'voluntary'}, FilingKey.ADM_DISS: {'displayName': 'Administrative Dissolution', 'feeCode': 'DIS_ADM', @@ -256,7 +256,7 @@ class FilingKey(str, Enum): FilingKey.CORRCTN: {'displayName': 'Register Correction Application', 'feeCode': 'CRCTN', 'name': 'correction'}, FilingKey.CORRCTN_FIRMS: {'displayName': 'Register Correction Application', 'feeCode': 'FMCORR', 'name': 'correction'}, - FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'NOFEE', 'name': 'courtOrder'}, + FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'COURT', 'name': 'courtOrder'}, FilingKey.VOL_DISS: {'displayName': 'Voluntary Dissolution', 'feeCode': 'DIS_VOL', 'name': 'dissolution', 'type': 'voluntary'}, FilingKey.ADM_DISS: {'displayName': 'Administrative Dissolution', 'feeCode': 'DIS_ADM', From 44643aa6a0abb2462250ee2b57da2afd51a47708 Mon Sep 17 00:00:00 2001 From: Arwen Qin <122495122+ArwenQin@users.noreply.github.com> Date: Wed, 26 Feb 2025 13:48:13 -0800 Subject: [PATCH 072/133] 25940 - fix the dissolution key error (#3256) Signed-off-by: Qin --- legal-api/src/legal_api/core/meta/filing.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index 0ab7bf2f21..7795d81e26 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -748,8 +748,9 @@ def display_name(business: Business, filing: FilingStorage) -> Optional[str]: # overriden with the latest correction, which cause loosing the previous correction link. name = FilingMeta.get_corrected_filing_name(filing, business_revision, name) - elif filing.filing_type in ('dissolution') and filing.meta_data: - if filing.meta_data['dissolution'].get('dissolutionType') == 'administrative': + elif filing.filing_type in ('dissolution'): + dissolution_data = filing.meta_data.get('dissolution') if filing.meta_data else None + if dissolution_data and dissolution_data.get('dissolutionType') == 'administrative': name = 'Administrative Dissolution' elif filing.filing_type in ('adminFreeze') and filing.meta_data: From 82e0da452b8df986a4b39f8e8dec476e916d289b Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Wed, 26 Feb 2025 14:11:06 -0800 Subject: [PATCH 073/133] 25904 Tombstone pipeline - push contact email to auth (#3253) --- data-tool/flows/common/auth_service.py | 42 ++++++++++++++++++- data-tool/flows/corps_tombstone_flow.py | 23 +++++++++- .../flows/tombstone/tombstone_queries.py | 3 +- data-tool/flows/tombstone/tombstone_utils.py | 3 ++ .../filing_components/business_profile.py | 2 +- .../test_business_profile.py | 2 +- 6 files changed, 69 insertions(+), 6 deletions(-) diff --git a/data-tool/flows/common/auth_service.py b/data-tool/flows/common/auth_service.py index 5fb5c9d357..faacb9db68 100644 --- a/data-tool/flows/common/auth_service.py +++ b/data-tool/flows/common/auth_service.py @@ -128,7 +128,7 @@ def create_entity(cls, timeout=cls.get_time_out(config) ) - if entity_record.status_code != HTTPStatus.OK: + if entity_record.status_code not in (HTTPStatus.ACCEPTED, HTTPStatus.CREATED): return HTTPStatus.BAD_REQUEST return HTTPStatus.OK @@ -200,3 +200,43 @@ def delete_affiliation(cls, config, account: int, business_registration: str) -> or entity_record.status_code not in (HTTPStatus.OK, HTTPStatus.NO_CONTENT): return HTTPStatus.BAD_REQUEST return HTTPStatus.OK + + @classmethod + def update_contact_email(cls, config, identifier: str, email: str) -> Dict: + """Update contact email of the business.""" + token = cls.get_bearer_token(config) + auth_url = config.AUTH_SVC_URL + account_svc_entity_url = f'{auth_url}/entities' + + # Create an entity record + data = { + 'email': email, + 'phone': '', + 'phoneExtension': '' + } + + rv = requests.post( + url=f'{account_svc_entity_url}/{identifier}/contacts', + headers={ + **cls.CONTENT_TYPE_JSON, + 'Authorization': cls.BEARER + token + }, + data=json.dumps(data), + timeout=cls.get_time_out(config) + ) + + if (rv.status_code == HTTPStatus.BAD_REQUEST and 'DATA_ALREADY_EXISTS' in rv.text): + rv = requests.put( + url=f'{account_svc_entity_url}/{identifier}/contacts', + headers={ + **cls.CONTENT_TYPE_JSON, + 'Authorization': cls.BEARER + token + }, + data=json.dumps(data), + timeout=cls.get_time_out(config) + ) + + if rv.status_code in (HTTPStatus.OK, HTTPStatus.CREATED): + return HTTPStatus.OK + + return rv.status_code diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 4bbe65c860..3e96ca86bb 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -1,3 +1,5 @@ +import contextlib +from http import HTTPStatus from pathlib import Path import math @@ -287,21 +289,38 @@ def update_auth(conn: Connection, config, corp_num: str, tombstone_data: dict): if config.AFFILIATE_ENTITY: business_data = tombstone_data['businesses'] account_id = config.AFFILIATE_ENTITY_ACCOUNT_ID - AuthService.create_affiliation( + affiliation_status = AuthService.create_affiliation( config=config, account=account_id, business_registration=business_data['identifier'], business_name=business_data['legal_name'], corp_type_code=business_data['legal_type'] ) + if affiliation_status != HTTPStatus.OK: + with contextlib.suppress(Exception): + AuthService.delete_affiliation( + config=config, + account=account_id, + business_registration=business_data['identifier']) + raise Exception(f"""Failed to affiliate business {business_data['identifier']}""") if config.UPDATE_ENTITY: business_data = tombstone_data['businesses'] - AuthService.create_entity( + entity_status = AuthService.create_entity( config=config, business_registration=business_data['identifier'], business_name=business_data['legal_name'], corp_type_code=business_data['legal_type'] ) + if entity_status == HTTPStatus.OK: + update_email_status = AuthService.update_contact_email( + config=config, + identifier=business_data['identifier'], + email=tombstone_data['admin_email'] + ) + if update_email_status != HTTPStatus.OK: + raise Exception(f"""Failed to update admin email in auth {business_data['identifier']}""") + else: + raise Exception(f"""Failed to create entity in auth {business_data['identifier']}""") @task(name='1-Migrate-Corp-Users-Task') diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index a7a7bf89e6..4e729c1ccc 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -281,7 +281,8 @@ def get_business_query(corp_num, suffix): when c.corp_frozen_type_cd = 'C' then true else false - end admin_freeze + end admin_freeze, + c.admin_email from corporation c left outer join event e on e.corp_num = c.corp_num and e.event_type_cd IN ('CONVICORP', 'CONVAMAL') -- need to add other event like CONVCIN... where 1 = 1 diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index baaf6f8d2e..6d51bf5afb 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -610,6 +610,9 @@ def formatted_data_cleanup(data: dict) -> dict: 'state_filing_index': filings_business['state_filing_index'] } data['filings'] = filings_business['filings'] + + data['admin_email'] = data['businesses']['admin_email'] + del data['businesses']['admin_email'] return data diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py index c55c668f74..c4547c1a57 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py @@ -78,7 +78,7 @@ def _update_business_profile(business: Business, profile_info: Dict) -> Dict: if rv.status_code == HTTPStatus.BAD_REQUEST and \ 'DATA_ALREADY_EXISTS' in rv.text: put = requests.put( - url=''.join([account_svc_entity_url, '/', business.identifier]), + url=''.join([account_svc_entity_url, '/', business.identifier, '/contacts']), headers={**AccountService.CONTENT_TYPE_JSON, 'Authorization': AccountService.BEARER + token}, data=data, diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py index 9b613c4571..04080471f8 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py @@ -60,7 +60,7 @@ def test_update_business_profile(app, session, requests_mock, requests_mock.post(f'{current_app.config["ACCOUNT_SVC_ENTITY_URL"]}/{business.identifier}/contacts', json=response_json, status_code=response_status) - requests_mock.put(f'{current_app.config["ACCOUNT_SVC_ENTITY_URL"]}/{business.identifier}', + requests_mock.put(f'{current_app.config["ACCOUNT_SVC_ENTITY_URL"]}/{business.identifier}/contacts', status_code=put_status) # test From 6534268d3fbf63a0c325745e8f662161d5e38398 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Wed, 26 Feb 2025 15:46:36 -0800 Subject: [PATCH 074/133] 23001 - Reduce DB Versioning Debugging Logs to Minimal (#3255) * reduce db versioning debugging logs to minimal --- legal-api/src/legal_api/models/db.py | 38 +------------------ .../sql-versioning/sql_versioning/utils.py | 1 - .../sql_versioning/versioning.py | 8 ---- 3 files changed, 1 insertion(+), 46 deletions(-) diff --git a/legal-api/src/legal_api/models/db.py b/legal-api/src/legal_api/models/db.py index 6103dd2c22..37f14813a2 100644 --- a/legal-api/src/legal_api/models/db.py +++ b/legal-api/src/legal_api/models/db.py @@ -19,7 +19,7 @@ from flask import current_app from flask_sqlalchemy import SignallingSession, SQLAlchemy -from sql_versioning import TransactionManager, debug +from sql_versioning import TransactionManager from sql_versioning import disable_versioning as _new_disable_versioning from sql_versioning import enable_versioning as _new_enable_versioning from sql_versioning import version_class as _new_version_class @@ -161,7 +161,6 @@ def _switch_versioning(cls, previous, current): cls._versioning_control[current]['enable']() @classmethod - @debug def lock_versioning(cls, session, transaction): """Lock versioning for the session. @@ -171,37 +170,22 @@ def lock_versioning(cls, session, transaction): :param transaction: The transaction associated with the session. :return: None """ - print(f"\033[32mCurrent service={current_app.config['SERVICE_NAME']}, session={session}," - f' transaction={transaction}\033[0m') if '_versioning_locked' not in session.info: if not cls._is_initialized: cls._initialize_versioning() - print(f'\033[31mVersioning locked, current versioning type={cls._current_versioning}' - '(initialized)\033[0m') else: previous_versioning = cls._current_versioning cls._check_versioning() - # TODO: remove debug - lock_type - lock_type = 'unchanged' if cls._current_versioning != previous_versioning: cls._switch_versioning(previous_versioning, cls._current_versioning) - lock_type = 'switched' - - print(f'\033[31mVersioning locked, current versioning type={cls._current_versioning}' - f'({lock_type})\033[0m') session.info['_versioning_locked'] = cls._current_versioning session.info['_transactions_locked'] = [] - # TODO: remove debug - else statement - else: - print('\033[31mVersioning already set for this session, skip\033[0m') - session.info['_transactions_locked'].append(transaction) @classmethod - @debug def unlock_versioning(cls, session, transaction): """Unlock versioning for the session. @@ -211,27 +195,14 @@ def unlock_versioning(cls, session, transaction): :param transaction: The transaction associated with the session. :return: None """ - print(f'\033[32mSession={session}, transaction={transaction}\033[0m') - if '_versioning_locked' in session.info and '_transactions_locked' in session.info: session.info['_transactions_locked'].remove(transaction) - print('\033[31mTransaction unlocked\033[0m') if not session.info['_transactions_locked']: session.info.pop('_versioning_locked', None) session.info.pop('_transactions_locked', None) - print('\033[31mVersioning unlocked\033[0m') - - # TODO: remove debug - else statement - else: - print("\033[32mThis session has active transaction, can't be unlocked\033[0m") - - # TODO: remove debug - else statement - else: - print("\033[32mVersioning/Transaction lock doesn't exist, skip\033[0m") @classmethod - @debug def get_transaction_id(cls, session): """Get the transaction ID for the session. @@ -241,14 +212,11 @@ def get_transaction_id(cls, session): transaction_id = None current_versioning = session.info['_versioning_locked'] - print(f'\033[31mCurrent versioning type={current_versioning}\033[0m') transaction_id = cls._versioning_control[current_versioning]['get_transaction_id'](session) - print(f'\033[31mUsing transaction_id = {transaction_id}\033[0m') return transaction_id @classmethod - @debug def version_class(cls, session, obj): """Return version class for an object based in the session. @@ -260,12 +228,10 @@ def version_class(cls, session, obj): session.begin() current_versioning = session.info['_versioning_locked'] - print(f'\033[31mCurrent versioning type={current_versioning}\033[0m') return cls._versioning_control[current_versioning]['version_class'](obj) -@debug def setup_versioning(): """Set up and initialize versioning switching. @@ -273,12 +239,10 @@ def setup_versioning(): """ # use SignallingSession to skip events for continuum's internal session/txn operations @event.listens_for(SignallingSession, 'after_transaction_create') - @debug def after_transaction_create(session, transaction): VersioningProxy.lock_versioning(session, transaction) @event.listens_for(SignallingSession, 'after_transaction_end') - @debug def clear_transaction(session, transaction): VersioningProxy.unlock_versioning(session, transaction) diff --git a/python/common/sql-versioning/sql_versioning/utils.py b/python/common/sql-versioning/sql_versioning/utils.py index 4318fe16bc..f9a03c6496 100644 --- a/python/common/sql-versioning/sql_versioning/utils.py +++ b/python/common/sql-versioning/sql_versioning/utils.py @@ -23,7 +23,6 @@ def version_class(obj): """ with suppress(Exception): versioned_class = obj.__versioned_cls__ - print(f'\033[32mVersioned Class={versioned_class}\033[0m') return versioned_class return None diff --git a/python/common/sql-versioning/sql_versioning/versioning.py b/python/common/sql-versioning/sql_versioning/versioning.py index a849ed1b4a..dd6b16ca84 100644 --- a/python/common/sql-versioning/sql_versioning/versioning.py +++ b/python/common/sql-versioning/sql_versioning/versioning.py @@ -20,7 +20,6 @@ from sqlalchemy.ext.declarative import declarative_base, declared_attr from sqlalchemy.orm import Session, mapper, relationships -from .debugging import debug from .relationship_builder import RelationshipBuilder Base = declarative_base() @@ -220,7 +219,6 @@ def __init__(self, session): self.session = session self.transaction_model = TransactionFactory.create_transaction_model() - @debug def create_transaction(self): """Create a new transaction in the session. @@ -254,7 +252,6 @@ def get_current_transaction_id(self): else: return self.create_transaction() - @debug def clear_current_transaction(self): """Clear the current transaction_id stored in the session. @@ -268,7 +265,6 @@ def clear_current_transaction(self): # ---------- Event Listeners ---------- -@debug def _before_flush(session, flush_context, instances): """Trigger before a flush operation to ensure a transaction is created.""" try: @@ -287,7 +283,6 @@ def _before_flush(session, flush_context, instances): raise e -@debug def _after_flush(session, flush_context): """Trigger after a flush operation to create version records for changed objects.""" try: @@ -300,7 +295,6 @@ def _after_flush(session, flush_context): raise e -@debug def _clear_transaction(session): """Clears the current transaction from the session after commit or rollback.""" try: @@ -410,7 +404,6 @@ def versioned_objects(session): yield obj -@debug def enable_versioning(transaction_cls=None): """Enable versioning. It registers listeners. @@ -426,7 +419,6 @@ def enable_versioning(transaction_cls=None): raise e -@debug def disable_versioning(): """Disable versioning. It removes listeners. From 512dae2d43ca056c2d81e97b929f95238236e20f Mon Sep 17 00:00:00 2001 From: meawong Date: Wed, 26 Feb 2025 16:19:03 -0800 Subject: [PATCH 075/133] 26140 - Add alteration code for C to CUL (#3257) --- legal-api/src/legal_api/core/meta/filing.py | 3 ++- legal-api/src/legal_api/models/filing.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index 7795d81e26..6297d63daf 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -137,7 +137,8 @@ class FilingTitles(str, Enum): 'CBEN': 'ALTER', 'CUL': 'ALTER', 'CCC': 'ALTER', - 'BC_TO_ULC': 'NOALU' + 'BC_TO_ULC': 'NOALU', + 'C_TO_CUL': 'NOALU' }, 'additional': [ { diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index a1f4b769b9..82050b9824 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -118,7 +118,8 @@ class Source(Enum): 'CBEN': 'ALTER', 'CUL': 'ALTER', 'CCC': 'ALTER', - 'BC_TO_ULC': 'NOALU' + 'BC_TO_ULC': 'NOALU', + 'C_TO_CUL': 'NOALU' } }, 'amalgamationApplication': { From b032dc390b608f53e32eef7892997dd22328a672 Mon Sep 17 00:00:00 2001 From: Kial Date: Thu, 27 Feb 2025 09:01:57 -0500 Subject: [PATCH 076/133] API/Filer - TR todo logic (#3246) * API/Filer - TR todo logic Signed-off-by: Kial Jinnah * cleanup Signed-off-by: Kial Jinnah * filer lint fix Signed-off-by: Kial Jinnah * filer test fix Signed-off-by: Kial Jinnah * filer test fix Signed-off-by: Kial Jinnah --------- Signed-off-by: Kial Jinnah --- .../migrations/versions/ad21c1ed551e_.py | 30 ++++ legal-api/src/legal_api/config.py | 6 + legal-api/src/legal_api/models/business.py | 80 ++++++++++ legal-api/src/legal_api/models/filing.py | 3 +- .../resources/v2/business/business_tasks.py | 151 +++++++++++++++++- .../unit/resources/v2/test_business_tasks.py | 89 ++++++++++- queue_services/entity-filer/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 2 +- .../transparency_register.py | 52 ++++++ .../entity-filer/src/entity_filer/worker.py | 4 + .../test_transparency_register.py | 77 +++++++++ 11 files changed, 490 insertions(+), 6 deletions(-) create mode 100644 legal-api/migrations/versions/ad21c1ed551e_.py create mode 100644 queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py create mode 100644 queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py diff --git a/legal-api/migrations/versions/ad21c1ed551e_.py b/legal-api/migrations/versions/ad21c1ed551e_.py new file mode 100644 index 0000000000..47c297e21d --- /dev/null +++ b/legal-api/migrations/versions/ad21c1ed551e_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: ad21c1ed551e +Revises: d9254d3cbbf4 +Create Date: 2025-02-21 14:05:14.971210 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'ad21c1ed551e' +down_revision = 'd0b10576924c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('businesses', sa.Column('last_tr_year', sa.Integer(), nullable=True)) + op.add_column('businesses_version', sa.Column('last_tr_year', sa.Integer(), autoincrement=False, nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('businesses_version', 'last_tr_year') + op.drop_column('businesses', 'last_tr_year') + # ### end Alembic commands ### diff --git a/legal-api/src/legal_api/config.py b/legal-api/src/legal_api/config.py index 0b35323e3a..38f2b746d5 100644 --- a/legal-api/src/legal_api/config.py +++ b/legal-api/src/legal_api/config.py @@ -182,6 +182,9 @@ class _Config(): # pylint: disable=too-few-public-methods STAGE_1_DELAY = int(os.getenv('STAGE_1_DELAY', '42')) STAGE_2_DELAY = int(os.getenv('STAGE_2_DELAY', '30')) + # Transparency Register + TR_START_DATE = os.getenv('TR_START_DATE', '').strip() # i.e. '2025-02-01' + TESTING = False DEBUG = False @@ -216,6 +219,9 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods # URLs AUTH_SVC_URL = os.getenv('AUTH_SVC_URL', 'http://test-auth-url') + # Transparency Register - test cases set this explicitly as needed + TR_START_DATE = '' + # JWT OIDC settings # JWT_OIDC_TEST_MODE will set jwt_manager to use JWT_OIDC_TEST_MODE = True diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index e4957b36ef..c5b66ec521 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -204,6 +204,7 @@ class AssociationTypes(Enum): 'last_ledger_timestamp', 'last_modified', 'last_remote_ledger_id', + 'last_tr_year', 'legal_name', 'legal_type', 'restriction_ind', @@ -247,6 +248,7 @@ class AssociationTypes(Enum): restriction_ind = db.Column('restriction_ind', db.Boolean, unique=False, default=False) last_ar_year = db.Column('last_ar_year', db.Integer) last_ar_reminder_year = db.Column('last_ar_reminder_year', db.Integer) + last_tr_year = db.Column('last_tr_year', db.Integer) association_type = db.Column('association_type', db.String(50)) state = db.Column('state', db.Enum(State), default=State.ACTIVE.value) state_filing_id = db.Column('state_filing_id', db.Integer) @@ -333,6 +335,84 @@ def next_anniversary(self): return last_anniversary + datedelta.datedelta(years=1) + @property + def next_annual_tr_due_datetime(self) -> datetime: + """Retrieve the next annual TR filing due datetime for the business.""" + due_year_offset = 1 + # NOTE: Converting to pacific time to ensure we get the right date + # for comparisons and when replacing time at the end + founding_datetime = LegislationDatetime.as_legislation_timezone(self.founding_date) + + tr_start_datetime = None + if tr_start_date := current_app.config.get('TR_START_DATE', None): + tr_start_datetime = LegislationDatetime.as_legislation_timezone_from_date( + datetime.fromisoformat(tr_start_date)) + + last_restoration_datetime = None + if restoration_filing := Filing.get_most_recent_filing(self.id, 'restoration'): + if restoration_filing.effective_date: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.effective_date) + else: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.filing_date) + + if ( + last_restoration_datetime and + last_restoration_datetime.year > (self.last_tr_year or tr_start_datetime.year or 0) + ): + # Set offset based on the year of the restoration + # NOTE: Currently could end up being due before the initial filing - policy still getting worked out + due_year_offset = last_restoration_datetime.year - founding_datetime.year + if ( + last_restoration_datetime.month > founding_datetime.month or + ( + last_restoration_datetime.month == founding_datetime.month and + last_restoration_datetime.day >= founding_datetime.day + ) + ): + # Month/day of the founding date has already passed for this year so add 1 + due_year_offset += 1 + + elif self.last_tr_year: + # i.e. founding_date.year=2023, last_tr_year=2024, then due_year_offset=2 and next due date for 2025 + due_year_offset = (self.last_tr_year - founding_datetime.year) + 1 + + elif tr_start_datetime: + # Case examples: + # ---- Founded before TR start, month/day are earlier or the same + # -> tr_start_date=2025-02-01, founding_date=2023-01-01.., + # then due_year_offset=3 and next due date for 2026 + # -> tr_start_date=2025-02-01, founding_date=2024-01-01.., + # then due_year_offset=2 and next due date for 2026 + # ---- Founded before TR start, month/day are after + # -> tr_start_date=2025-02-01, founding_date=2023-02-02.., + # then due_year_offset=2 and next due date for 2025 + # -> tr_start_date=2025-02-01, founding_date=2024-02-02.., + # then due_year_offset=1 and next due date for 2025 + # ---- Founded after TR start, nothing needed + # -> tr_start_date=2025-02-01, founding_date=2025-02-02.., + # then due_year_offset=1 and next due date for 2026 (regular) + # -> tr_start_date=2025-02-01, founding_date=2026-02-02.., + # then due_year_offset=1 and next due date for 2027 (regular) + if tr_start_datetime > founding_datetime: + # Set offset based on the year of the tr start + due_year_offset = tr_start_datetime.year - founding_datetime.year + if ( + tr_start_datetime.month > founding_datetime.month or + ( + tr_start_datetime.month == founding_datetime.month and + tr_start_datetime.day >= founding_datetime.day + ) + ): + # Month/day of the founding date had already passed for that year so add 1 + due_year_offset += 1 + + due_datetime = founding_datetime + datedelta.datedelta(years=due_year_offset, months=2) + + # return as this date at 23:59:59 + return due_datetime.replace(hour=23, minute=59, second=59, microsecond=0) + def get_ar_dates(self, next_ar_year): """Get ar min and max date for the specific year.""" ar_min_date = datetime(next_ar_year, 1, 1).date() diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 82050b9824..00186528b0 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -827,7 +827,8 @@ def set_processed(self, business_type): def effective_date_can_be_before_payment_completion_date(self, business_type): """For AR or COD filings then the effective date can be before the payment date.""" return self.filing_type in (Filing.FILINGS['annualReport'].get('name'), - Filing.FILINGS['changeOfDirectors'].get('name')) + Filing.FILINGS['changeOfDirectors'].get('name'), + Filing.FILINGS['transparencyRegister'].get('name')) @staticmethod def _raise_default_lock_exception(): diff --git a/legal-api/src/legal_api/resources/v2/business/business_tasks.py b/legal-api/src/legal_api/resources/v2/business/business_tasks.py index d4a02c2369..9411322715 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_tasks.py +++ b/legal-api/src/legal_api/resources/v2/business/business_tasks.py @@ -19,6 +19,7 @@ from datetime import datetime from http import HTTPStatus +import datedelta import requests from requests import exceptions # noqa I001 from flask import current_app, jsonify @@ -28,6 +29,7 @@ from legal_api.services import check_warnings, namex from legal_api.services.warnings.business.business_checks import WarningType from legal_api.utils.auth import jwt +from legal_api.utils.legislation_datetime import LegislationDatetime from .bp import bp @@ -77,7 +79,7 @@ def get_tasks(identifier): return jsonify(tasks=rv) -def construct_task_list(business): # pylint: disable=too-many-locals; only 2 extra +def construct_task_list(business: Business): # pylint: disable=too-many-locals; only 2 extra """ Return all current pending tasks to do. @@ -113,7 +115,11 @@ def construct_task_list(business): # pylint: disable=too-many-locals; only 2 ex Filing.Status.PENDING_CORRECTION.value, Filing.Status.ERROR.value]) # Create a todo item for each pending filing + pending_tr_type: str = None for filing in pending_filings: + if filing.filing_type == 'transparencyRegister': + pending_tr_type = filing.filing_sub_type + filing_json = filing.json if filing.payment_status_code == 'CREATED' and filing.payment_token: # get current pay details from pay-api @@ -163,9 +169,131 @@ def construct_task_list(business): # pylint: disable=too-many-locals; only 2 ex next_ar_year += 1 ar_min_date, ar_max_date = business.get_ar_dates(next_ar_year) order += 1 + + tasks, order = add_tr_tasks(business, tasks, order, pending_tr_type) + return tasks +def add_tr_tasks(business: Business, tasks: list, order: int, pending_tr_type: str = None): + """Add Transparency Register tasks to the tasks list.""" + entity_types_no_tr = ['SP', 'GP', 'CP'] + tr_required = business.state != Business.State.HISTORICAL.value and business.legal_type not in entity_types_no_tr + if tr_required and (tr_start_date := current_app.config.get('TR_START_DATE', None)): + # Initial TR todo + if not pending_tr_type: + tr_start_datetime = LegislationDatetime.as_legislation_timezone_from_date( + datetime.fromisoformat(tr_start_date)) + initial_filing: Filing = Filing.get_most_recent_filing(business.id, 'transparencyRegister', 'initial') + last_restoration_datetime = None + if restoration_filing := Filing.get_most_recent_filing(business.id, 'restoration'): + if restoration_filing.effective_date: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.effective_date) + else: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.filing_date) + + if ( + last_restoration_datetime and + not (initial_filing and initial_filing.effective_date > last_restoration_datetime) + ): + pending_tr_type = 'initial' + tasks, order = _add_tr_task(tasks, + order, + True, + business, + 'initial', + last_restoration_datetime + datedelta.datedelta(months=6)) + + elif business.founding_date > tr_start_datetime and not initial_filing: + pending_tr_type = 'initial' + tasks, order = _add_tr_task(tasks, + order, + True, + business, + 'initial', + business.founding_date + datedelta.datedelta(months=6)) + + # Annual TR todos + if (LegislationDatetime.now() + datedelta.datedelta(months=2)) > business.next_annual_tr_due_datetime: + # the next annual tr due datetime is within 2 months so add task for annual TR + annual_year = (business.next_annual_tr_due_datetime - datedelta.datedelta(months=2)).year + if pending_tr_type != 'annual': + tasks, order = _add_tr_task(tasks, + order, + not pending_tr_type, + business, + 'annual', + business.next_annual_tr_due_datetime, + annual_year) + # add any other outstanding annual TRs to the list + now = LegislationDatetime.now() + years_offset = 0 + while annual_year < now.year: + years_offset += 1 + annual_year += 1 + # NOTE: can't just replace with annual_year due to 2 month offset (could be off by 1) + due_date = business.next_annual_tr_due_datetime + datedelta.datedelta(years=years_offset) + if (now + datedelta.datedelta(months=2)) > due_date: + tasks, order = _add_tr_task(tasks, order, False, business, 'annual', due_date, annual_year) + + return tasks, order + + +def _find_task_order_for_tr(tasks: list, order: int, tr_sub_type: str, year: int) -> int: + """Find the appropriate task order value for the TR filing in the task list.""" + ar_todo_tasks = [task for task in tasks if task['task'].get('todo', {}).get('header', {}).get('ARFilingYear')] + if not ar_todo_tasks: + # default order will be after any pending tasks + return order + + def _by_order(e: dict): + """Return the order value of the given task.""" + return e['order'] + + ar_todo_tasks.sort(key=_by_order) + if tr_sub_type == 'initial': + # Should be directly after any AR in the same year as initial + # (not possible to have ARs outstanding in previous years) + if ar_todo_tasks[0]['task']['todo']['header']['ARFilingYear'] == year: + # Will be directly after this task + return ar_todo_tasks[0]['order'] + 1 + # Will be ahead of this task + return ar_todo_tasks[0]['order'] + else: + # tr annual task, should be directly after the AR task of the same year + for ar_task in ar_todo_tasks: + if ar_task['task']['todo']['header']['ARFilingYear'] == year: + # Will be directly after this task + return ar_task['order'] + 1 + elif ar_task['task']['todo']['header']['ARFilingYear'] > year: + # Will be ahead of this task + return ar_task['order'] + + # is an annual task and should after all existing AR tasks + return order + + +def _bump_task_order(tasks: list, bump_start_point: int) -> list: + """Bump the order of the task list down from the start point.""" + for task in tasks: + if task['order'] >= bump_start_point: + task['order'] += 1 + return tasks + + +def _add_tr_task(tasks: list, order: int, enabled: bool, # pylint: disable=too-many-arguments + business: Business, sub_type: str, due_date: datetime, year: int = None): + """Add a TR task to the list of tasks in the correct order.""" + tr_order = _find_task_order_for_tr(tasks, order, sub_type, year) + # bump the order of all the tasks after the tr by 1 + tasks = _bump_task_order(tasks, tr_order) + tasks.append(create_tr_todo(business, tr_order, enabled, sub_type, due_date, year)) + order += 1 + return tasks, order + + def create_todo(business, ar_year, ar_min_date, ar_max_date, order, enabled): # pylint: disable=too-many-arguments """Return a to-do JSON object.""" todo = { @@ -221,3 +349,24 @@ def create_conversion_filing_todo(business, order, enabled): 'enabled': enabled } return todo + + +def create_tr_todo(business: Business, order: int, enabled: bool, # pylint: disable=too-many-arguments + sub_type: str, due_date: datetime, year: int = None): + """Return a to-do JSON object for a Tranparency Register todo item.""" + return { + 'task': { + 'todo': { + 'business': business.json(), + 'header': { + 'TRFilingYear': year, + 'dueDate': LegislationDatetime.as_legislation_timezone(due_date).isoformat(), + 'name': 'tranparencyRegister', + 'status': 'NEW', + 'subType': sub_type + } + } + }, + 'order': order, + 'enabled': enabled + } diff --git a/legal-api/tests/unit/resources/v2/test_business_tasks.py b/legal-api/tests/unit/resources/v2/test_business_tasks.py index 48b0fb8bd2..a75e71baf0 100644 --- a/legal-api/tests/unit/resources/v2/test_business_tasks.py +++ b/legal-api/tests/unit/resources/v2/test_business_tasks.py @@ -28,8 +28,9 @@ from legal_api.models import Business from legal_api.services.authz import STAFF_ROLE +from legal_api.utils.legislation_datetime import LegislationDatetime from tests import integration_payment -from tests.unit.models import factory_business, factory_business_mailing_address, factory_filing, factory_pending_filing +from tests.unit.models import factory_business, factory_business_mailing_address, factory_completed_filing, factory_filing, factory_pending_filing from tests.unit.services.utils import create_header from tests.unit.services.warnings import create_business @@ -283,7 +284,7 @@ def test_get_tasks_pending_correction_filings(session, client, jwt): ('SP no AR', 'FM1234567', '2019-05-15', None, Business.LegalTypes.SOLE_PROP.value, 0), ('GP no AR', 'FM1234567', '2019-05-15', None, Business.LegalTypes.PARTNERSHIP.value, 0) ]) -def test_construct_task_list(session, client, jwt, test_name, identifier, founding_date, previous_ar_date, legal_type, +def test_construct_task_list_ar(session, client, jwt, test_name, identifier, founding_date, previous_ar_date, legal_type, tasks_length): """Assert that construct_task_list returns the correct number of AR to be filed.""" from legal_api.resources.v2.business.business_tasks import construct_task_list @@ -299,6 +300,90 @@ def test_construct_task_list(session, client, jwt, test_name, identifier, foundi assert tasks[0]['task']['todo']['business']['nextAnnualReport'][-14:] != '00:00:00+00:00' +@pytest.mark.parametrize('test_name, identifier, founding_date, last_ar_date, legal_type, last_tr_date, tr_start_date, initial_date, restored_date, expected', [ + ('BEN_ITR', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}]), + ('BEN_ITR_DRAFT', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), datetime(2025, 7, 2), None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'status': 'DRAFT', 'enabled': True}]), + ('BEN_ITR_PENDING', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), datetime(2025, 7, 2), None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'status': 'PENDING', 'enabled': True}]), + ('BEN_ITR_FILED', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), datetime(2025, 7, 2), None, []), + ('BEN_ITR_NONE', 'BC1234567', datetime(2025, 7, 1, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 2), None, None, []), + ('BEN_ATR', 'BC1234567', datetime(2023, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2024, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': True}]), + ('BEN_ATR_MULTI', 'BC1234567', datetime(2021, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2023, 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ATR_PREV_FILED', 'BC1234567', datetime(2022, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2024, 1, 1), datetime(2023, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': True}]), + ('BEN_ATR_PREV_FILED_MULTI', 'BC1234567', datetime(2021, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2023, 2, 1), datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR', 'BC1234567', datetime(2024, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR_MULTI', 'BC1234567', datetime(2023, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR_RESTORATION', 'BC1234567', datetime(2010, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, datetime(2023, 1, 1, 8), [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ATR_RESTORATION_PREV_FILED', 'BC1234567', datetime(2010, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2024, 2, 1), datetime(2022, 1, 1), datetime(2023, 2, 1), datetime(2023, 1, 1, 8), [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': True}]), + ('BEN_ATR_RESTORATION_PREV_FILED_COMPLEX', 'BC1234567', datetime(2020, 1, 2, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2021, 2, 1), datetime(2020, 1, 1), datetime(2020, 2, 1), datetime(2023, 1, 3, 8), [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR_AR', 'BC1234567', datetime(2023, 2, 1, 8), datetime(2024, 2, 1), Business.LegalTypes.BCOMP.value, None, datetime(2023, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'annualReport', 'ARFilingYear': 2025, 'enabled': True}, {'order': 4, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ATR_AR', 'BC1234567', datetime(2020, 2, 1, 8), datetime(2023, 2, 1), Business.LegalTypes.BCOMP.value, datetime(2023, 2, 1), datetime(2021, 1, 1), None, None, [{'order': 1, 'name': 'annualReport', 'ARFilingYear': 2024, 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': True}, {'order': 3, 'name': 'annualReport', 'ARFilingYear': 2025, 'enabled': False}, {'order': 4, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), +]) +def test_construct_task_list_tr(app, session, client, jwt, test_name, identifier, founding_date, last_ar_date, + legal_type, last_tr_date, tr_start_date, initial_date, restored_date, expected): + """Assert that construct_task_list returns the correct items concerning TR and AR filings.""" + from legal_api.resources.v2.business.business_tasks import construct_task_list + + # tests expect current date to be in 2025. Adjust accordingly for the current year (freezetime only works for some things) + year_offset = (datetime.now()).year - 2025 + founding_date += datedelta.datedelta(years=year_offset) + tr_start_date += datedelta.datedelta(years=year_offset) + if last_ar_date: + last_ar_date += datedelta.datedelta(years=year_offset) + if last_tr_date: + last_tr_date += datedelta.datedelta(years=year_offset) + + app.config['TR_START_DATE'] = tr_start_date.isoformat() + with patch('legal_api.resources.v2.business.business_tasks.check_warnings', return_value=[]): + business = factory_business(identifier, founding_date, last_ar_date, legal_type) + business.last_tr_year = last_tr_date.year if last_tr_date else None + if initial_date: + filing = { + 'filing': { + 'header': {'name': 'transparencyRegister', 'certifiedBy': 'test', 'date': initial_date.isoformat()}, + 'transparencyRegister': {'type': 'initial', 'ledgerReferenceNumber': '1234'} + }} + if 'DRAFT' in test_name: + factory_filing(business, filing, initial_date, 'transparencyRegister', 'initial') + elif 'PENDING' in test_name: + factory_pending_filing(business, filing, initial_date) + else: + factory_completed_filing(business, filing, initial_date, None, None, 'transparencyRegister', 'initial') + + if restored_date: + filing = {'filing': {'header': {'name': 'restoration', 'date': restored_date.isoformat(), 'certifiedBy': 'test'}, 'restoration': {'type': 'fullRestoration'}}} + filing_obj = factory_completed_filing(business, filing, initial_date, None, None, 'restoration', 'fullRestoration') + filing_obj.effective_date = restored_date + filing_obj.save() + + business.save() + tasks = construct_task_list(business) + + # check number of tasks + # assert tasks == expected + assert len(tasks) == len(expected) + if tasks: + # check order and values + def get_order_val(e: dict): + """Return the order value of the task.""" + return e['order'] + + tasks.sort(key=get_order_val) + expected.sort(key=get_order_val) + + for task, expected_task in zip(tasks, expected): + assert task['order'] == expected_task['order'] + assert task['enabled'] == expected_task.get('enabled') + if task['task'].get('todo'): + assert task['task']['todo']['header']['name'] == expected_task['name'] + assert task['task']['todo']['header'].get('ARFilingYear') == expected_task.get('ARFilingYear') + assert task['task']['todo']['header'].get('TRFilingYear') == expected_task.get('TRFilingYear') + assert task['task']['todo']['header'].get('subType') == expected_task.get('subType') + else: + assert task['task']['filing']['header']['status'] == expected_task.get('status') + + # Reset this to empty string so it doesn't interfere with other tests + app.config['TR_START_DATE'] = '' + @pytest.mark.parametrize('test_name, legal_type, identifier, has_missing_business_info, conversion_task_expected', [ ('CONVERSION_TODO_EXISTS_MISSING_DATA', 'SP', 'FM0000001', True, True), ('CONVERSION_TODO_EXISTS_MISSING_DATA', 'GP', 'FM0000002', True, True), diff --git a/queue_services/entity-filer/requirements.txt b/queue_services/entity-filer/requirements.txt index 98298d1a47..d44df25ab9 100755 --- a/queue_services/entity-filer/requirements.txt +++ b/queue_services/entity-filer/requirements.txt @@ -24,7 +24,7 @@ minio==7.0.2 PyPDF2==1.26.0 reportlab==3.6.12 git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.34#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-filer/requirements/bcregistry-libraries.txt b/queue_services/entity-filer/requirements/bcregistry-libraries.txt index d9a337a3fd..a8e57e2c9c 100644 --- a/queue_services/entity-filer/requirements/bcregistry-libraries.txt +++ b/queue_services/entity-filer/requirements/bcregistry-libraries.txt @@ -1,5 +1,5 @@ git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.32#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.34#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py b/queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py new file mode 100644 index 0000000000..a0592e53a8 --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py @@ -0,0 +1,52 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""File processing rules and actions for the transparency register filing.""" +from entity_queue_common.service_utils import QueueException +from legal_api.models import Business, Filing + + +def process(business: Business, filing_rec: Filing, filing: dict): + """Process the incoming transparency register filing.""" + if not (tr_filing := filing.get('transparencyRegister')): # pylint: disable=superfluous-parens; + raise QueueException(f'legal_filing:transparencyRegister data missing from {filing_rec.id}') + if not (sub_type := tr_filing.get('type')): # pylint: disable=superfluous-parens; + raise QueueException(f'legal_filing:transparencyRegister data missing from {filing_rec.id}') + if not business: + raise QueueException(f'Business does not exist: legal_filing:transparencyRegister {filing_rec.id}') + + if sub_type == 'annual': + # set the last_tr_year for the business + business.last_tr_year = filing_rec.effective_date.year + + return business, filing_rec diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index b9b8150263..1d63140411 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -75,6 +75,7 @@ restoration, special_resolution, transition, + transparency_register, ) from entity_filer.filing_processors.filing_components import business_profile, name_request @@ -357,6 +358,9 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, filing_submission, filing_meta) + elif filing.get('transparencyRegister'): + transparency_register.process(business, filing_submission, filing_core_submission.json) + if filing.get('specialResolution'): special_resolution.process(business, filing, filing_submission) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py b/queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py new file mode 100644 index 0000000000..a3f66bd673 --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py @@ -0,0 +1,77 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""The Unit Tests for the Transparency Register filing.""" +from datetime import datetime + +import pytest +from legal_api.models import Filing +from legal_api.utils.legislation_datetime import LegislationDatetime + +from entity_filer.filing_processors import transparency_register +from tests.unit import create_business, create_filing + + +@pytest.mark.parametrize('test_name, sub_type, expected', [ + ('INITIAL', 'initial', None), + ('CHANGE', 'change', None), + ('ANNUAL', 'annual', 2024) +]) +def test_transparency_register_filing_process_annual(app, session, test_name, sub_type, expected): + """Assert that the transparency register object is correctly populated to model objects.""" + # setup + effective_date = LegislationDatetime.as_legislation_timezone(datetime(2024, 3, 2)) + filing = { + 'filing': { + 'header': { + 'name': 'transparencyRegister', + 'date': LegislationDatetime.datenow().isoformat(), + 'effectiveDate': effective_date.isoformat(), + 'certifiedBy': 'test' + }, + 'business': {'identifier': 'BC1234567'}, + 'transparencyRegister': { + 'type': sub_type, + 'ledgerReferenceNumber': '12384cnfjnj43' + }}} + + business = create_business(filing['filing']['business']['identifier']) + create_filing('123', filing) + + filing_rec = Filing(effective_date=effective_date, filing_json=filing) + + # test + transparency_register.process(business, filing_rec, filing['filing']) + + # Assertions + assert business.last_tr_year == expected From c6dad190a4de6b14b0fcb2cb191d6a1258df2713 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <54437031+kzdev420@users.noreply.github.com> Date: Thu, 27 Feb 2025 08:34:39 -0800 Subject: [PATCH 077/133] 25957 appoint_receiver_filing (#3252) * 25957 appoint_receiver_filing * update test * update legal_types, test * fix the test --- legal-api/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 2 +- legal-api/src/legal_api/core/meta/filing.py | 15 +++++++ legal-api/src/legal_api/models/filing.py | 14 ++++++ legal-api/src/legal_api/models/party_role.py | 1 + legal-api/src/legal_api/services/authz.py | 6 +++ .../filings/validations/appoint_receiver.py | 43 +++++++++++++++++++ .../filings/validations/validation.py | 4 ++ .../tests/unit/resources/v2/test_business.py | 4 ++ .../tests/unit/services/test_authorization.py | 29 ++++++++++++- 10 files changed, 116 insertions(+), 4 deletions(-) create mode 100644 legal-api/src/legal_api/services/filings/validations/appoint_receiver.py diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index 3760bb7eac..c65e9cb282 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.34#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.35#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/requirements/bcregistry-libraries.txt b/legal-api/requirements/bcregistry-libraries.txt index 80d739e0f5..ec5a8d3ed7 100644 --- a/legal-api/requirements/bcregistry-libraries.txt +++ b/legal-api/requirements/bcregistry-libraries.txt @@ -1,2 +1,2 @@ -git+https://github.com/bcgov/business-schemas.git@2.18.33#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.35#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index 6297d63daf..bb521d87e9 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -205,6 +205,21 @@ class FilingTitles(str, Enum): 'CCC': 'BCANN' } }, + 'appointReceiver': { + 'name': 'appointReceiver', + 'title': 'Appoint Receiver Filing', + 'displayName': 'Appoint Receiver', + 'codes': { + 'BEN': 'NOARM', + 'BC': 'NOARM', + 'ULC': 'NOARM', + 'CC': 'NOARM', + 'CBEN': 'NOARM', + 'C': 'NOARM', + 'CUL': 'NOARM', + 'CCC': 'NOARM' + } + }, 'changeOfAddress': { 'name': 'changeOfAddress', 'title': 'Change of Address Filing', diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 00186528b0..e1bcf268b3 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -171,6 +171,20 @@ class Source(Enum): 'CCC': 'BCANN' } }, + 'appointReceiver': { + 'name': 'appointReceiver', + 'title': 'Appoint Receiver Filing', + 'codes': { + 'BEN': 'NOARM', + 'BC': 'NOARM', + 'ULC': 'NOARM', + 'CC': 'NOARM', + 'CBEN': 'NOARM', + 'C': 'NOARM', + 'CUL': 'NOARM', + 'CCC': 'NOARM' + } + }, 'changeOfAddress': { 'name': 'changeOfAddress', 'title': 'Change of Address Filing', diff --git a/legal-api/src/legal_api/models/party_role.py b/legal-api/src/legal_api/models/party_role.py index 2d68828387..e2fcd1b949 100644 --- a/legal-api/src/legal_api/models/party_role.py +++ b/legal-api/src/legal_api/models/party_role.py @@ -38,6 +38,7 @@ class RoleTypes(Enum): LIQUIDATOR = 'liquidator' PROPRIETOR = 'proprietor' PARTNER = 'partner' + RECEIVER = 'receiver' __versioned__ = {} __tablename__ = 'party_roles' diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index 4d9589f6a2..ce6bcd6b41 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -207,6 +207,12 @@ def get_allowable_filings_dict(): 'business': [BusinessBlocker.DEFAULT] } }, + 'appointReceiver': { + 'legalTypes': ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.DEFAULT] + } + }, 'changeOfAddress': { 'legalTypes': ['CP', 'BEN', 'BC', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { diff --git a/legal-api/src/legal_api/services/filings/validations/appoint_receiver.py b/legal-api/src/legal_api/services/filings/validations/appoint_receiver.py new file mode 100644 index 0000000000..4fef0b356c --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/appoint_receiver.py @@ -0,0 +1,43 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Validation for the Appoint Receiver filing.""" +from typing import Optional + +from legal_api.errors import Error + + +def validate(_: dict) -> Optional[Error]: + """Validate the Appoint Receiver filing.""" + # NOTE: There isn't anything to validate outside what is already validated via the schema yet + return None diff --git a/legal-api/src/legal_api/services/filings/validations/validation.py b/legal-api/src/legal_api/services/filings/validations/validation.py index 3681b5d93e..61045ed24f 100644 --- a/legal-api/src/legal_api/services/filings/validations/validation.py +++ b/legal-api/src/legal_api/services/filings/validations/validation.py @@ -27,6 +27,7 @@ from .alteration import validate as alteration_validate from .amalgamation_application import validate as amalgamation_application_validate from .annual_report import validate as annual_report_validate +from .appoint_receiver import validate as appoint_receiver_validate from .change_of_address import validate as coa_validate from .change_of_directors import validate as cod_validate from .change_of_name import validate as con_validate @@ -194,6 +195,9 @@ def validate(business: Business, # pylint: disable=too-many-branches,too-many-s elif k == Filing.FILINGS['transparencyRegister'].get('name'): err = transparency_register_validate(filing_json) # pylint: disable=assignment-from-none + elif k == Filing.FILINGS['appointReceiver'].get('name'): + err = appoint_receiver_validate(filing_json) # pylint: disable=assignment-from-none + if err: return err diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index f45481f801..2273a50eef 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -588,6 +588,10 @@ def test_get_could_file(session, client, jwt): "displayName": "Annual Report", "name": "annualReport" }, + { + "displayName": "Appoint Receiver", + "name": "appointReceiver" + }, { "displayName": "Address Change", "name": "changeOfAddress" diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 13b90d854d..775bf6809a 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -164,6 +164,7 @@ class FilingKey(str, Enum): TRANSPARENCY_REGISTER_ANNUAL = 'TRANSPARENCY_REGISTER_ANNUAL' TRANSPARENCY_REGISTER_CHANGE = 'TRANSPARENCY_REGISTER_CHANGE' TRANSPARENCY_REGISTER_INITIAL = 'TRANSPARENCY_REGISTER_INITIAL' + APPOINT_RECEIVER = 'APPOINT_RECEIVER' EXPECTED_DATA = { @@ -243,6 +244,7 @@ class FilingKey(str, Enum): FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'} } EXPECTED_DATA_CONT_IN = { @@ -327,6 +329,7 @@ class FilingKey(str, Enum): FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'} } BLOCKER_FILING_STATUSES = factory_incomplete_statuses() @@ -556,14 +559,14 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me 'registrarsNotation', 'registrarsOrder', 'specialResolution']), ('staff_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, @@ -720,6 +723,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_allowed', Business.State.ACTIVE, 'changeOfRegistration', None, ['SP', 'GP'], 'staff', [STAFF_ROLE], True), + ('staff_active_allowed', Business.State.ACTIVE, 'appointReceiver', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), + ('staff_active', Business.State.ACTIVE, 'appointReceiver', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), + ('user_active_allowed', Business.State.ACTIVE, 'agmExtension', None, ['BC', 'BEN', 'ULC', 'CC'], 'general', [BASIC_USER], True), ('user_active', Business.State.ACTIVE, 'agmExtension', None, @@ -958,6 +966,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -979,6 +988,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -1258,6 +1268,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -1279,6 +1290,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -1587,6 +1599,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -1602,6 +1615,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -2065,6 +2079,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2085,6 +2100,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2231,6 +2247,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2255,6 +2272,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2278,6 +2296,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2299,6 +2318,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2609,6 +2629,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2648,6 +2669,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2715,6 +2737,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_corps', True, Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.ADMN_FRZE, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -2826,6 +2849,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -2872,6 +2896,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, From 7f9eb74db624caea42f1a21520d106e4114d2bbd Mon Sep 17 00:00:00 2001 From: EasonPan Date: Thu, 27 Feb 2025 09:41:44 -0800 Subject: [PATCH 078/133] 23001 - Further Reduce DB-Versioning Related Debug Logging (#3258) * Removed logging print() in the custom db-versioning library * Removed the debug decorator file, since we are not using it anywhere anymore --- .../sql_versioning/debugging.py | 27 ------------------- .../sql_versioning/versioning.py | 18 +------------ 2 files changed, 1 insertion(+), 44 deletions(-) delete mode 100644 python/common/sql-versioning/sql_versioning/debugging.py diff --git a/python/common/sql-versioning/sql_versioning/debugging.py b/python/common/sql-versioning/sql_versioning/debugging.py deleted file mode 100644 index 5909fd9aaa..0000000000 --- a/python/common/sql-versioning/sql_versioning/debugging.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright © 2024 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utilities used for debugging.""" -# TODO: remove this debugging utility file -import functools - - -def debug(func): - """A decorator to print a message before and after a function call.""" - @functools.wraps(func) - def wrapper(*args, **kwargs): - print(f'\033[34m--> Entering {func.__qualname__}()\033[0m') - ret = func(*args, **kwargs) - print(f'\033[34m<-- Exiting {func.__qualname__}()\033[0m') - return ret - return wrapper diff --git a/python/common/sql-versioning/sql_versioning/versioning.py b/python/common/sql-versioning/sql_versioning/versioning.py index dd6b16ca84..c715a29cd7 100644 --- a/python/common/sql-versioning/sql_versioning/versioning.py +++ b/python/common/sql-versioning/sql_versioning/versioning.py @@ -106,11 +106,7 @@ def _create_version(session, target, operation_type): :param operation_type: The type of operation ('I', 'U', 'D') being performed on the object. :return: None """ - - print(f'\033[32mCreating version for {target.__class__.__name__} (id={target.id}), operation_type: {operation_type}\033[0m') - if not session: - print(f'\033[32mSkipping version creation for {target.__class__.__name__} (id={target.id})\033[0m') return transaction_manager = TransactionManager(session) @@ -173,8 +169,6 @@ def _create_version(session, target, operation_type): values(end_transaction_id=transaction_id) ) - print(f'\033[32mVersion created/updated for {target.__class__.__name__} (id={target.id}), transaction_id: {transaction_id}\033[0m') - # ---------- Transaction Related Classes ---------- class TransactionFactory: @@ -226,7 +220,6 @@ def create_transaction(self): """ if 'current_transaction_id' in self.session.info: - print(f"\033[32mPoping out existing transaction: {self.session.info['current_transaction_id']}\033[0m") self.session.info.pop('current_transaction_id', None) # Use insert().returning() to get the ID and issued_at without committing @@ -236,10 +229,7 @@ def create_transaction(self): result = self.session.execute(stmt) transaction_id, issued_at = result.first() - print(f'\033[32mCreated new transaction: {transaction_id}\033[0m') - self.session.info['current_transaction_id'] = transaction_id - print(f'\033[32mSet current_transaction_id: {transaction_id}\033[0m') return transaction_id def get_current_transaction_id(self): @@ -258,9 +248,7 @@ def clear_current_transaction(self): :return: None """ if self.session.transaction.nested: - print(f"\033[32mSkip clearing nested transaction\033[0m") return - print(f"\033[32mClearing current transaction: {self.session.info.get('current_transaction_id')}\033[0m") self.session.info.pop('current_transaction_id', None) @@ -269,13 +257,9 @@ def _before_flush(session, flush_context, instances): """Trigger before a flush operation to ensure a transaction is created.""" try: if not _is_session_modified(session): - print('\033[31mThere is no modified versioned object in this session.\033[0m') return - if 'current_transaction_id' in session.info: - print(f"\033[31mtransaction_id={session.info['current_transaction_id']} exists before flush.\033[0m") - else: - print('\033[31mCreating transaction before flush.\033[0m') + if 'current_transaction_id' not in session.info: transaction_manager = TransactionManager(session) transaction_manager.create_transaction() From f4cffdead0f4101a2d4795a6db49845fa1c556ac Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Thu, 27 Feb 2025 11:27:18 -0800 Subject: [PATCH 079/133] Bump db versioning component versions to 2.143.0 (#3260) --- queue_services/entity-bn/src/entity_bn/version.py | 2 +- .../src/entity_digital_credentials/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/queue_services/entity-bn/src/entity_bn/version.py b/queue_services/entity-bn/src/entity_bn/version.py index 9aeaafef3c..c6bc4525e4 100644 --- a/queue_services/entity-bn/src/entity_bn/version.py +++ b/queue_services/entity-bn/src/entity_bn/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.142.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py index 2153e5ff80..a05b27332a 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.142.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name From 2a462375d8d11a2fe4e5cbed462ed671fbece9fd Mon Sep 17 00:00:00 2001 From: EasonPan Date: Thu, 27 Feb 2025 11:45:10 -0800 Subject: [PATCH 080/133] remove deleted debugging decorator import (#3261) --- python/common/sql-versioning/sql_versioning/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/common/sql-versioning/sql_versioning/__init__.py b/python/common/sql-versioning/sql_versioning/__init__.py index c90ceea973..d255c8e3a9 100644 --- a/python/common/sql-versioning/sql_versioning/__init__.py +++ b/python/common/sql-versioning/sql_versioning/__init__.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. """Versioning extension for SQLAlchemy.""" -from .debugging import debug from .versioning import (Base, TransactionFactory, TransactionManager, Versioned, disable_versioning, enable_versioning) from .utils import version_class From 11332187f98c1122d195ec3be3b3c741d7862e66 Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Thu, 27 Feb 2025 14:24:40 -0800 Subject: [PATCH 081/133] 25870 Tombstone - Extract updates & filing submitter and staff user info updates (#3259) * 25870 - update extract to include payment info & merge Q* type into BC Signed-off-by: Hongjing Chen * 25870 - tombstone - bring over correct filing submitter & staff comment user Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- .../flows/tombstone/tombstone_queries.py | 65 ++++++++----------- data-tool/flows/tombstone/tombstone_utils.py | 28 ++++---- .../scripts/colin_corps_extract_postgres_ddl | 13 ++++ data-tool/scripts/transfer_cprd_corps.sql | 21 +++++- 4 files changed, 77 insertions(+), 50 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 4e729c1ccc..8c8c615bcd 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -166,7 +166,6 @@ def get_corp_users_query(corp_nums: list): query = f""" select u_user_id, - u_full_name, string_agg(event_type_cd || '_' || coalesce(filing_type_cd, 'NULL'), ',') as event_file_types, u_first_name, u_middle_name, @@ -176,48 +175,44 @@ def get_corp_users_query(corp_nums: list): 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as earliest_event_dt_str, min(u_email_addr) as u_email_addr, - u_role_typ_cd + u_role_typ_cd, + p_cc_holder_name from ( select - upper(u.user_id) as u_user_id, - u.last_name as u_last_name, - u.first_name as u_first_name, - u.middle_name as u_middle_name, + upper(u.user_id) as u_user_id, + trim(u.last_name) as u_last_name, + trim(u.first_name) as u_first_name, + trim(u.middle_name) as u_middle_name, e.event_type_cd, f.filing_type_cd, e.event_timerstamp as u_timestamp, - case - when u.first_name is null and u.middle_name is null and u.last_name is null then null - else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) - end as u_full_name, u.email_addr as u_email_addr, - u.role_typ_cd as u_role_typ_cd + u.role_typ_cd as u_role_typ_cd, + p.cc_holder_nme as p_cc_holder_name from event e left outer join filing f on e.event_id = f.event_id - left outer join filing_user u on u.event_id = e.event_id + left outer join filing_user u on e.event_id = u.event_id + left outer join payment p on e.event_id = p.event_id where 1 = 1 -- and e.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') and e.corp_num in ({corp_nums_str}) union -- staff comment at business level select - upper(cc.user_id) as u_user_id, - cc.last_nme as u_last_name, - cc.first_nme as u_first_name, - cc.middle_nme as u_middle_name, + upper(cc.user_id) as u_user_id, + trim(cc.last_nme) as u_last_name, + trim(cc.first_nme) as u_first_name, + trim(cc.middle_nme) as u_middle_name, 'STAFF' as event_type_cd, -- placeholder 'COMMENT' as filing_type_cd, -- placeholder comment_dts as u_timestamp, - case - when cc.first_nme is null and cc.middle_nme is null and cc.last_nme is null then null - else upper(concat_ws('_', nullif(trim(cc.first_nme),''), nullif(trim(cc.middle_nme),''), nullif(trim(cc.last_nme),''))) - end as u_full_name, null as u_email_addr, - null as u_role_typ_cd + null as u_role_typ_cd, + null as p_cc_holder_name from corp_comments cc where cc.corp_num in ({corp_nums_str}) ) sub - group by sub.u_user_id, sub.u_full_name, sub.u_first_name, sub.u_middle_name, sub.u_last_name, sub.u_role_typ_cd + group by sub.u_user_id, sub.u_first_name, sub.u_middle_name, sub.u_last_name, sub.u_role_typ_cd, sub.p_cc_holder_name order by sub.u_user_id; """ return query @@ -605,16 +600,13 @@ def get_filings_query(corp_num): and end_event_id is null ) as cs_state_event_id, --- filing user - upper(u.user_id) as u_user_id, - u.last_name as u_last_name, - u.first_name as u_first_name, - u.middle_name as u_middle_name, - case - when u.first_name is null and u.middle_name is null and u.last_name is null then null - else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) - end as u_full_name, + upper(u.user_id) as u_user_id, + trim(u.last_name) as u_last_name, + trim(u.first_name) as u_first_name, + trim(u.middle_name) as u_middle_name, u.email_addr as u_email_addr, u.role_typ_cd as u_role_typ_cd, + p.cc_holder_nme as p_cc_holder_name, --- conversion ledger cl.ledger_title_txt as cl_ledger_title_txt, -- conv event @@ -625,6 +617,7 @@ def get_filings_query(corp_num): from event e left outer join filing f on e.event_id = f.event_id left outer join filing_user u on u.event_id = e.event_id + left outer join payment p on p.event_id = e.event_id left outer join conv_ledger cl on cl.event_id = e.event_id left outer join conv_event ce on e.event_id = ce.event_id left outer join corp_name cn_old on e.event_id = cn_old.end_event_id @@ -684,14 +677,10 @@ def get_business_comments_query(corp_num): ) as cc_comments_dts_str, cc.comments as cc_comments, cc.accession_comments as cc_accession_comments, - upper(cc.user_id) as cc_user_id, - cc.first_nme as cc_first_name, - cc.last_nme as cc_last_name, - cc.middle_nme as cc_middle_name, - case - when cc.first_nme is null and cc.middle_nme is null and cc.last_nme is null then null - else upper(concat_ws('_', nullif(trim(cc.first_nme),''), nullif(trim(cc.middle_nme),''), nullif(trim(cc.last_nme),''))) - end as cc_full_name + upper(cc.user_id) as u_user_id, + trim(cc.first_nme) as u_first_name, + trim(cc.last_nme) as u_last_name, + trim(cc.middle_nme) as u_middle_name from corp_comments cc where corp_num = '{corp_num}'; """ diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 6d51bf5afb..51cd081cb6 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -319,9 +319,7 @@ def format_filings_data(data: dict) -> list[dict]: jurisdiction = None amalgamation = None - # make it None if no valid value - if not (user_id := x['u_user_id']): - user_id = x['u_full_name'] if x['u_full_name'] else None + user_id = get_username(x) if ( raw_filing_type == 'conversion' @@ -496,8 +494,7 @@ def format_business_comments_data(data: dict) -> list: for x in business_comments_data: c = x['cc_comments'] if x['cc_comments'] else x['cc_accession_comments'] - if not (staff_id := x['cc_user_id']): - staff_id = x['cc_full_name'] if x['cc_full_name'] else None + staff_id = get_username(x) comment = { 'comment': c, 'timestamp': x['cc_comments_dts_str'], @@ -581,19 +578,14 @@ def format_users_data(users_data: list) -> list: and not any(ef == 'STAFF_COMMENT' for ef in event_file_types): continue - if not (username := x['u_user_id']): - username = x['u_full_name'] + username = get_username(x) - # skip if both u_user_id and u_full_name is empty if not username: continue user = { **user, 'username': username, - 'firstname': x['u_first_name'], - 'middlename': x['u_middle_name'], - 'lastname': x['u_last_name'], 'email': x['u_email_addr'], 'creation_date': x['earliest_event_dt_str'] } @@ -813,6 +805,20 @@ def build_epoch_filing(business_id: int) -> dict: return filing +def get_username(data: dict) -> str: + first_name = data.get('u_first_name') + middle_name = data.get('u_middle_name') + last_name = data.get('u_last_name') + + username = ' '.join([name for name in [first_name, middle_name, last_name] if name]) + if not username: + username = data.get('u_user_id') + if not username: + username = data.get('p_cc_holder_name') + + return username + + def load_data(conn: Connection, table_name: str, data: dict, diff --git a/data-tool/scripts/colin_corps_extract_postgres_ddl b/data-tool/scripts/colin_corps_extract_postgres_ddl index 5a8d5ee8af..f4e8b18be2 100644 --- a/data-tool/scripts/colin_corps_extract_postgres_ddl +++ b/data-tool/scripts/colin_corps_extract_postgres_ddl @@ -772,6 +772,17 @@ comment on table corp_involved_cont_in is 'new table\n\n"Optionally, a ""Continu alter table corp_involved_cont_in owner to postgres; +create table if not exists payment +( + event_id numeric(9) not null + constraint fk_payment + references event (event_id), + payment_typ_cd varchar(4) not null, + cc_holder_nme varchar(80) +); + +alter table payment + owner to postgres; CREATE INDEX if not exists ix_conv_event_event_id ON conv_event (event_id); @@ -865,6 +876,8 @@ CREATE INDEX if not exists ix_office_mailing_addr_id ON office (mailing_addr_id) CREATE INDEX if not exists ix_office_delivery_addr_id ON office (delivery_addr_id); +CREATE INDEX if not exists ix_payment_event_id ON payment (event_id); + CREATE INDEX if not exists ix_resolution_corp_num ON resolution (corp_num); CREATE INDEX if not exists ix_resolution_start_event_id ON resolution (start_event_id); diff --git a/data-tool/scripts/transfer_cprd_corps.sql b/data-tool/scripts/transfer_cprd_corps.sql index 96477af66b..c0a54fc932 100644 --- a/data-tool/scripts/transfer_cprd_corps.sql +++ b/data-tool/scripts/transfer_cprd_corps.sql @@ -66,7 +66,10 @@ select case else c.CORP_NUM end CORP_NUM, CORP_FROZEN_TYP_CD as corp_frozen_type_cd, - CORP_TYP_CD as CORP_TYPE_CD, + case + when c.CORP_TYP_CD in ('QA', 'QB', 'QC', 'QD', 'QE') then 'BC' + else c.CORP_TYP_CD + end CORP_TYPE_CD, RECOGNITION_DTS, BN_9, bn_15, @@ -889,6 +892,22 @@ where cp.CORP_PARTY_ID = pn.party_id order by c.corp_num; + +-- payment +transfer public.payment from cprd using +select p.event_id, + p.payment_typ_cd, + p.cc_holder_nme +from payment p + , event e + , corporation c +where p.event_id = e.event_id +and e.corp_num = c.corp_num +and c.corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') +order by e.event_id; + + + -- alter tables alter table corporation alter column send_ar_ind type boolean using send_ar_ind::boolean; alter table filing From ad0260c2e60ec2d7eb4fe5b0416d1ccbd21762c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Thu, 27 Feb 2025 15:01:04 -0800 Subject: [PATCH 082/133] - updated CI scripts to run on Ubuntu 24.04 (#3263) Co-authored-by: Severin Beauvais --- .github/workflows/business-pay-ocp-cd.yml | 4 ++-- .github/workflows/colin-api-cd.yml | 4 ++-- .github/workflows/colin-api-ci.yml | 8 ++++---- .github/workflows/data-reset-tool-cd.yml | 4 ++-- .github/workflows/data-reset-tool-ci.yml | 8 ++++---- .github/workflows/email-reminder-cd.yml | 4 ++-- .github/workflows/email-reminder-ci.yml | 8 ++++---- .github/workflows/entity-bn-cd.yml | 4 ++-- .github/workflows/entity-bn-ci.yml | 8 ++++---- .github/workflows/entity-digital-credentials-cd.yml | 4 ++-- .github/workflows/entity-digital-credentials-ci.yml | 8 ++++---- .github/workflows/entity-emailer-cd.yml | 4 ++-- .github/workflows/entity-emailer-ci.yml | 8 ++++---- .github/workflows/entity-filer-cd.yml | 4 ++-- .github/workflows/entity-filer-ci.yml | 8 ++++---- .github/workflows/entity-pay-cd.yml | 4 ++-- .github/workflows/entity-pay-ci.yml | 8 ++++---- .github/workflows/expired-limited-restoration-cd.yml | 4 ++-- .github/workflows/expired-limited-restoration-ci.yml | 8 ++++---- .github/workflows/filings-notebook-report-cd.yml | 4 ++-- .github/workflows/filings-notebook-report-ci.yml | 8 ++++---- .github/workflows/furnishings-cd.yml | 4 ++-- .github/workflows/furnishings-ci.yml | 8 ++++---- .github/workflows/future-effective-filings-cd.yml | 4 ++-- .github/workflows/future-effective-filings-ci.yml | 8 ++++---- .github/workflows/involuntary-dissolutions-cd.yml | 4 ++-- .github/workflows/involuntary-dissolutions-ci.yml | 8 ++++---- .github/workflows/legal-api-cd.yml | 4 ++-- .github/workflows/legal-api-ci.yml | 8 ++++---- .github/workflows/sftp-icbc-report-cd.yml | 4 ++-- .github/workflows/sftp-icbc-report-ci.yml | 8 ++++---- .github/workflows/sftp-nuans-report-cd.yml | 4 ++-- .github/workflows/sftp-nuans-report-ci.yml | 8 ++++---- .github/workflows/update-colin-filings-cd.yml | 4 ++-- .github/workflows/update-colin-filings-ci.yml | 8 ++++---- .github/workflows/update-legal-filings-cd.yml | 4 ++-- .github/workflows/update-legal-filings-ci.yml | 8 ++++---- 37 files changed, 110 insertions(+), 110 deletions(-) diff --git a/.github/workflows/business-pay-ocp-cd.yml b/.github/workflows/business-pay-ocp-cd.yml index 908121310f..1c2676acf7 100644 --- a/.github/workflows/business-pay-ocp-cd.yml +++ b/.github/workflows/business-pay-ocp-cd.yml @@ -24,7 +24,7 @@ env: jobs: entity-pay-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -67,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-pay-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/colin-api-cd.yml b/.github/workflows/colin-api-cd.yml index 9b3e921239..59c8dfabd0 100644 --- a/.github/workflows/colin-api-cd.yml +++ b/.github/workflows/colin-api-cd.yml @@ -24,7 +24,7 @@ env: jobs: colin-api-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -67,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} colin-api-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/colin-api-ci.yml b/.github/workflows/colin-api-ci.yml index 2f0a954179..b55ce2d36c 100644 --- a/.github/workflows/colin-api-ci.yml +++ b/.github/workflows/colin-api-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -64,7 +64,7 @@ jobs: JWT_OIDC_JWKS_CACHE_TIMEOUT: 300 GO_LIVE_DATE: 2019-08-12 - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: postgres: @@ -102,7 +102,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/data-reset-tool-cd.yml b/.github/workflows/data-reset-tool-cd.yml index 6d0497e245..d5bda34531 100644 --- a/.github/workflows/data-reset-tool-cd.yml +++ b/.github/workflows/data-reset-tool-cd.yml @@ -24,7 +24,7 @@ env: jobs: data-reset-tool-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -67,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} data-reset-tool-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/data-reset-tool-ci.yml b/.github/workflows/data-reset-tool-ci.yml index 2c3cb28994..9874bfe487 100644 --- a/.github/workflows/data-reset-tool-ci.yml +++ b/.github/workflows/data-reset-tool-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -50,7 +50,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 @@ -75,7 +75,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/email-reminder-cd.yml b/.github/workflows/email-reminder-cd.yml index 5c6ff84bfb..c31dd262a6 100644 --- a/.github/workflows/email-reminder-cd.yml +++ b/.github/workflows/email-reminder-cd.yml @@ -24,7 +24,7 @@ env: jobs: email-reminder-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} email-reminder-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/email-reminder-ci.yml b/.github/workflows/email-reminder-ci.yml index cf1a3fb317..5ee967fd17 100644 --- a/.github/workflows/email-reminder-ci.yml +++ b/.github/workflows/email-reminder-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: # testing: # needs: setup-job - # runs-on: ubuntu-20.04 + # runs-on: ubuntu-24.04 # steps: # - uses: actions/checkout@v3 # - name: Set up Python ${{ matrix.python-version }} @@ -66,7 +66,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-bn-cd.yml b/.github/workflows/entity-bn-cd.yml index 57ea0d4c29..a9bc66d3a3 100644 --- a/.github/workflows/entity-bn-cd.yml +++ b/.github/workflows/entity-bn-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-bn-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -68,7 +68,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-bn-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/entity-bn-ci.yml b/.github/workflows/entity-bn-ci.yml index a4b71479a4..8b3890c536 100644 --- a/.github/workflows/entity-bn-ci.yml +++ b/.github/workflows/entity-bn-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -67,7 +67,7 @@ jobs: BN_HUB_CLIENT_ID: id BN_HUB_CLIENT_SECRET: secret - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: postgres: @@ -105,7 +105,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-digital-credentials-cd.yml b/.github/workflows/entity-digital-credentials-cd.yml index 56ceda2230..78a829e399 100644 --- a/.github/workflows/entity-digital-credentials-cd.yml +++ b/.github/workflows/entity-digital-credentials-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-digital-credentials-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -68,7 +68,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-digital-credentials-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/entity-digital-credentials-ci.yml b/.github/workflows/entity-digital-credentials-ci.yml index bca691e437..18a85be556 100644 --- a/.github/workflows/entity-digital-credentials-ci.yml +++ b/.github/workflows/entity-digital-credentials-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -63,7 +63,7 @@ jobs: TEST_NATS_DOCKER: True STAN_CLUSTER_NAME: test-cluster - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: postgres: @@ -101,7 +101,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-emailer-cd.yml b/.github/workflows/entity-emailer-cd.yml index 9643b0bd78..4a0d9b9f62 100644 --- a/.github/workflows/entity-emailer-cd.yml +++ b/.github/workflows/entity-emailer-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-emailer-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -68,7 +68,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-emailer-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/entity-emailer-ci.yml b/.github/workflows/entity-emailer-ci.yml index 3b51c68ac5..899e882226 100644 --- a/.github/workflows/entity-emailer-ci.yml +++ b/.github/workflows/entity-emailer-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -73,7 +73,7 @@ jobs: TEST_NATS_DOCKER: True STAN_CLUSTER_NAME: test-cluster - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: postgres: @@ -121,7 +121,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-filer-cd.yml b/.github/workflows/entity-filer-cd.yml index c02c180cff..bbaa7705fb 100644 --- a/.github/workflows/entity-filer-cd.yml +++ b/.github/workflows/entity-filer-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-filer-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -68,7 +68,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-filer-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/entity-filer-ci.yml b/.github/workflows/entity-filer-ci.yml index 8dfe6e208e..e59c10fe35 100644 --- a/.github/workflows/entity-filer-ci.yml +++ b/.github/workflows/entity-filer-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -75,7 +75,7 @@ jobs: BUSINESS_EVENTS_TOPIC: projects/project-id/topics/test - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -122,7 +122,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-pay-cd.yml b/.github/workflows/entity-pay-cd.yml index 77508a607b..232a4e1967 100644 --- a/.github/workflows/entity-pay-cd.yml +++ b/.github/workflows/entity-pay-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-pay-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -68,7 +68,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-pay-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/entity-pay-ci.yml b/.github/workflows/entity-pay-ci.yml index d6163cf3da..0f53b4a757 100644 --- a/.github/workflows/entity-pay-ci.yml +++ b/.github/workflows/entity-pay-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -69,7 +69,7 @@ jobs: NATS_SUBJECT: entity.filings NATS_EMAILER_SUBJECT: entity.email - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: postgres: @@ -107,7 +107,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/expired-limited-restoration-cd.yml b/.github/workflows/expired-limited-restoration-cd.yml index 902b9cb75b..bc3008ac9f 100644 --- a/.github/workflows/expired-limited-restoration-cd.yml +++ b/.github/workflows/expired-limited-restoration-cd.yml @@ -24,7 +24,7 @@ env: jobs: expired-limited-restoration-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} expired-limited-restoration-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/expired-limited-restoration-ci.yml b/.github/workflows/expired-limited-restoration-ci.yml index f32a1b70d9..6ce8de2548 100644 --- a/.github/workflows/expired-limited-restoration-ci.yml +++ b/.github/workflows/expired-limited-restoration-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -73,7 +73,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/filings-notebook-report-cd.yml b/.github/workflows/filings-notebook-report-cd.yml index e1465ef216..b1003e5b62 100644 --- a/.github/workflows/filings-notebook-report-cd.yml +++ b/.github/workflows/filings-notebook-report-cd.yml @@ -24,7 +24,7 @@ env: jobs: filings-notebook-report-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} filings-notebook-report-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/filings-notebook-report-ci.yml b/.github/workflows/filings-notebook-report-ci.yml index 7dbd4adac6..3cb98bce83 100644 --- a/.github/workflows/filings-notebook-report-ci.yml +++ b/.github/workflows/filings-notebook-report-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -48,7 +48,7 @@ jobs: make flake8 testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -72,7 +72,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/furnishings-cd.yml b/.github/workflows/furnishings-cd.yml index 0c43cacf23..8b0045fc2a 100644 --- a/.github/workflows/furnishings-cd.yml +++ b/.github/workflows/furnishings-cd.yml @@ -24,7 +24,7 @@ env: jobs: furnishings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} furnishings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/furnishings-ci.yml b/.github/workflows/furnishings-ci.yml index b57ee06f73..676f3c11c6 100644 --- a/.github/workflows/furnishings-ci.yml +++ b/.github/workflows/furnishings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 env: DATABASE_USERNAME: postgres DATABASE_PASSWORD: postgres @@ -98,7 +98,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/future-effective-filings-cd.yml b/.github/workflows/future-effective-filings-cd.yml index ccde9f3172..20f33580fc 100644 --- a/.github/workflows/future-effective-filings-cd.yml +++ b/.github/workflows/future-effective-filings-cd.yml @@ -24,7 +24,7 @@ env: jobs: future-effective-filings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} future-effective-filings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/future-effective-filings-ci.yml b/.github/workflows/future-effective-filings-ci.yml index 83e9babd84..a09473283a 100644 --- a/.github/workflows/future-effective-filings-ci.yml +++ b/.github/workflows/future-effective-filings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -73,7 +73,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/involuntary-dissolutions-cd.yml b/.github/workflows/involuntary-dissolutions-cd.yml index a06f83284f..61c0ccaf05 100644 --- a/.github/workflows/involuntary-dissolutions-cd.yml +++ b/.github/workflows/involuntary-dissolutions-cd.yml @@ -24,7 +24,7 @@ env: jobs: involuntary-dissolutions-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} involuntary-dissolutions-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/involuntary-dissolutions-ci.yml b/.github/workflows/involuntary-dissolutions-ci.yml index 1b1c1c0a2a..e429452eae 100644 --- a/.github/workflows/involuntary-dissolutions-ci.yml +++ b/.github/workflows/involuntary-dissolutions-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 env: DATABASE_USERNAME: postgres DATABASE_PASSWORD: postgres @@ -99,7 +99,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/legal-api-cd.yml b/.github/workflows/legal-api-cd.yml index 9be7c2c570..f6f9173e0f 100644 --- a/.github/workflows/legal-api-cd.yml +++ b/.github/workflows/legal-api-cd.yml @@ -24,7 +24,7 @@ env: jobs: legal-api-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -67,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} legal-api-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index 9b58e4d715..c44b92fd30 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -69,7 +69,7 @@ jobs: BUSINESS_SCHEMA_NAME: digital_business_card BUSINESS_SCHEMA_VERSION: "1.0.0" - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: postgres: @@ -112,7 +112,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/sftp-icbc-report-cd.yml b/.github/workflows/sftp-icbc-report-cd.yml index b5c70f7a84..aa82ccee58 100644 --- a/.github/workflows/sftp-icbc-report-cd.yml +++ b/.github/workflows/sftp-icbc-report-cd.yml @@ -24,7 +24,7 @@ env: jobs: sftp-icbc-report-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} sftp-icbc-report-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/sftp-icbc-report-ci.yml b/.github/workflows/sftp-icbc-report-ci.yml index 7643027782..5901715657 100644 --- a/.github/workflows/sftp-icbc-report-ci.yml +++ b/.github/workflows/sftp-icbc-report-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -48,7 +48,7 @@ jobs: make flake8 testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -72,7 +72,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/sftp-nuans-report-cd.yml b/.github/workflows/sftp-nuans-report-cd.yml index b26330c4d8..c7f33fa562 100644 --- a/.github/workflows/sftp-nuans-report-cd.yml +++ b/.github/workflows/sftp-nuans-report-cd.yml @@ -24,7 +24,7 @@ env: jobs: sftp-nuans-report-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} sftp-nuans-report-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/sftp-nuans-report-ci.yml b/.github/workflows/sftp-nuans-report-ci.yml index eddfd6dcb4..ea89a11652 100644 --- a/.github/workflows/sftp-nuans-report-ci.yml +++ b/.github/workflows/sftp-nuans-report-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -48,7 +48,7 @@ jobs: make flake8 testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -72,7 +72,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/update-colin-filings-cd.yml b/.github/workflows/update-colin-filings-cd.yml index 72f32fc312..3678209c35 100644 --- a/.github/workflows/update-colin-filings-cd.yml +++ b/.github/workflows/update-colin-filings-cd.yml @@ -24,7 +24,7 @@ env: jobs: update-colin-filings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} update-colin-filings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/update-colin-filings-ci.yml b/.github/workflows/update-colin-filings-ci.yml index f40f45fe28..69d15a2ffa 100644 --- a/.github/workflows/update-colin-filings-ci.yml +++ b/.github/workflows/update-colin-filings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -73,7 +73,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/update-legal-filings-cd.yml b/.github/workflows/update-legal-filings-cd.yml index 18cdc2a743..d9f56367cd 100644 --- a/.github/workflows/update-legal-filings-cd.yml +++ b/.github/workflows/update-legal-filings-cd.yml @@ -24,7 +24,7 @@ env: jobs: update-legal-filings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -62,7 +62,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} update-legal-filings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: diff --git a/.github/workflows/update-legal-filings-ci.yml b/.github/workflows/update-legal-filings-ci.yml index 95ec9b62aa..b9158e7d04 100644 --- a/.github/workflows/update-legal-filings-ci.yml +++ b/.github/workflows/update-legal-filings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -73,7 +73,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 From 82d9c711e62608447e88f8ea2ba150e959ae3b54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 12:24:59 -0800 Subject: [PATCH 083/133] - added CLI tool step (#3265) Co-authored-by: Severin Beauvais --- .github/workflows/legal-api-cd.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/legal-api-cd.yml b/.github/workflows/legal-api-cd.yml index f6f9173e0f..4f80bd9a2e 100644 --- a/.github/workflows/legal-api-cd.yml +++ b/.github/workflows/legal-api-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | From 3df04fcd21dcbf0d3f62442c2aa76635e4ee2694 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 12:56:48 -0800 Subject: [PATCH 084/133] - added CLI tool steps (#3266) Co-authored-by: Severin Beauvais --- .github/workflows/business-pay-ocp-cd.yml | 11 +++++++++++ .github/workflows/colin-api-cd.yml | 11 +++++++++++ .github/workflows/data-reset-tool-cd.yml | 11 +++++++++++ .github/workflows/email-reminder-cd.yml | 11 +++++++++++ .github/workflows/entity-bn-cd.yml | 11 +++++++++++ .github/workflows/entity-digital-credentials-cd.yml | 11 +++++++++++ .github/workflows/entity-emailer-cd.yml | 11 +++++++++++ .github/workflows/entity-filer-cd.yml | 11 +++++++++++ .github/workflows/entity-pay-cd.yml | 11 +++++++++++ .github/workflows/expired-limited-restoration-cd.yml | 11 +++++++++++ .github/workflows/filings-notebook-report-cd.yml | 11 +++++++++++ .github/workflows/furnishings-cd.yml | 11 +++++++++++ .github/workflows/future-effective-filings-cd.yml | 11 +++++++++++ .github/workflows/involuntary-dissolutions-cd.yml | 11 +++++++++++ .github/workflows/sftp-icbc-report-cd.yml | 11 +++++++++++ .github/workflows/sftp-nuans-report-cd.yml | 11 +++++++++++ .github/workflows/update-colin-filings-cd.yml | 11 +++++++++++ .github/workflows/update-legal-filings-cd.yml | 11 +++++++++++ 18 files changed, 198 insertions(+) diff --git a/.github/workflows/business-pay-ocp-cd.yml b/.github/workflows/business-pay-ocp-cd.yml index 1c2676acf7..ff8dfbdbd1 100644 --- a/.github/workflows/business-pay-ocp-cd.yml +++ b/.github/workflows/business-pay-ocp-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/colin-api-cd.yml b/.github/workflows/colin-api-cd.yml index 59c8dfabd0..ed5a1443ce 100644 --- a/.github/workflows/colin-api-cd.yml +++ b/.github/workflows/colin-api-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/data-reset-tool-cd.yml b/.github/workflows/data-reset-tool-cd.yml index d5bda34531..4268199b14 100644 --- a/.github/workflows/data-reset-tool-cd.yml +++ b/.github/workflows/data-reset-tool-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/email-reminder-cd.yml b/.github/workflows/email-reminder-cd.yml index c31dd262a6..17691779f2 100644 --- a/.github/workflows/email-reminder-cd.yml +++ b/.github/workflows/email-reminder-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-bn-cd.yml b/.github/workflows/entity-bn-cd.yml index a9bc66d3a3..2807e67cd3 100644 --- a/.github/workflows/entity-bn-cd.yml +++ b/.github/workflows/entity-bn-cd.yml @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-digital-credentials-cd.yml b/.github/workflows/entity-digital-credentials-cd.yml index 78a829e399..e9d9b7388f 100644 --- a/.github/workflows/entity-digital-credentials-cd.yml +++ b/.github/workflows/entity-digital-credentials-cd.yml @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-emailer-cd.yml b/.github/workflows/entity-emailer-cd.yml index 4a0d9b9f62..3c41b87722 100644 --- a/.github/workflows/entity-emailer-cd.yml +++ b/.github/workflows/entity-emailer-cd.yml @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-filer-cd.yml b/.github/workflows/entity-filer-cd.yml index bbaa7705fb..fb967a593e 100644 --- a/.github/workflows/entity-filer-cd.yml +++ b/.github/workflows/entity-filer-cd.yml @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-pay-cd.yml b/.github/workflows/entity-pay-cd.yml index 232a4e1967..0b894e07a9 100644 --- a/.github/workflows/entity-pay-cd.yml +++ b/.github/workflows/entity-pay-cd.yml @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/expired-limited-restoration-cd.yml b/.github/workflows/expired-limited-restoration-cd.yml index bc3008ac9f..04293b6f90 100644 --- a/.github/workflows/expired-limited-restoration-cd.yml +++ b/.github/workflows/expired-limited-restoration-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/filings-notebook-report-cd.yml b/.github/workflows/filings-notebook-report-cd.yml index b1003e5b62..c3f1ed9e5a 100644 --- a/.github/workflows/filings-notebook-report-cd.yml +++ b/.github/workflows/filings-notebook-report-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/furnishings-cd.yml b/.github/workflows/furnishings-cd.yml index 8b0045fc2a..5257ba742d 100644 --- a/.github/workflows/furnishings-cd.yml +++ b/.github/workflows/furnishings-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/future-effective-filings-cd.yml b/.github/workflows/future-effective-filings-cd.yml index 20f33580fc..f4f9fbca4d 100644 --- a/.github/workflows/future-effective-filings-cd.yml +++ b/.github/workflows/future-effective-filings-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/involuntary-dissolutions-cd.yml b/.github/workflows/involuntary-dissolutions-cd.yml index 61c0ccaf05..d9c0eaa064 100644 --- a/.github/workflows/involuntary-dissolutions-cd.yml +++ b/.github/workflows/involuntary-dissolutions-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/sftp-icbc-report-cd.yml b/.github/workflows/sftp-icbc-report-cd.yml index aa82ccee58..2391fafd9d 100644 --- a/.github/workflows/sftp-icbc-report-cd.yml +++ b/.github/workflows/sftp-icbc-report-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/sftp-nuans-report-cd.yml b/.github/workflows/sftp-nuans-report-cd.yml index c7f33fa562..0829e9568b 100644 --- a/.github/workflows/sftp-nuans-report-cd.yml +++ b/.github/workflows/sftp-nuans-report-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/update-colin-filings-cd.yml b/.github/workflows/update-colin-filings-cd.yml index 3678209c35..057a11b77b 100644 --- a/.github/workflows/update-colin-filings-cd.yml +++ b/.github/workflows/update-colin-filings-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/update-legal-filings-cd.yml b/.github/workflows/update-legal-filings-cd.yml index d9f56367cd..1c67e81346 100644 --- a/.github/workflows/update-legal-filings-cd.yml +++ b/.github/workflows/update-legal-filings-cd.yml @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | From ac59d0f3e113b3eeffca5be6a87f089630777bad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 14:59:00 -0800 Subject: [PATCH 085/133] try setup-python v5 (#3267) --- .github/workflows/legal-api-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index c44b92fd30..244ac8a7e9 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -86,7 +86,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install docker-compose From 36318d3a4cc85af9bc2c718d66f8c02943e68c93 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Fri, 28 Feb 2025 15:25:46 -0800 Subject: [PATCH 086/133] 26144 Tombstone pipeline - duplicate row in filing query (#3268) --- data-tool/flows/corps_tombstone_flow.py | 4 ++-- data-tool/flows/tombstone/tombstone_queries.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 3e96ca86bb..f47b0d8bba 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -311,11 +311,11 @@ def update_auth(conn: Connection, config, corp_num: str, tombstone_data: dict): business_name=business_data['legal_name'], corp_type_code=business_data['legal_type'] ) - if entity_status == HTTPStatus.OK: + if entity_status == HTTPStatus.OK and (admin_email := tombstone_data.get('admin_email')): update_email_status = AuthService.update_contact_email( config=config, identifier=business_data['identifier'], - email=tombstone_data['admin_email'] + email=admin_email ) if update_email_status != HTTPStatus.OK: raise Exception(f"""Failed to update admin email in auth {business_data['identifier']}""") diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 8c8c615bcd..10761d3031 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -565,7 +565,7 @@ def get_jurisdictions_query(corp_num): def get_filings_query(corp_num): query = f""" - select + select -- event e.event_id as e_event_id, e.corp_num as e_corp_num, @@ -620,8 +620,8 @@ def get_filings_query(corp_num): left outer join payment p on p.event_id = e.event_id left outer join conv_ledger cl on cl.event_id = e.event_id left outer join conv_event ce on e.event_id = ce.event_id - left outer join corp_name cn_old on e.event_id = cn_old.end_event_id - left outer join corp_name cn_new on e.event_id = cn_new.start_event_id + left outer join corp_name cn_old on e.event_id = cn_old.end_event_id and cn_old.corp_name_typ_cd in ('CO', 'NB') + left outer join corp_name cn_new on e.event_id = cn_new.start_event_id and cn_new.corp_name_typ_cd in ('CO', 'NB') where 1 = 1 and e.corp_num = '{corp_num}' -- and e.corp_num = 'BC0068889' From 67a45a66401659da1eaf1f849d49349217c04ebc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 15:36:05 -0800 Subject: [PATCH 087/133] - changed to use Ubuntu 22.04 in order to retain build compatibility with requirements (#3269) Co-authored-by: Severin Beauvais --- .github/workflows/legal-api-ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index 244ac8a7e9..0dee39f8cf 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-24.04 + runs-on: ubuntu-22.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-24.04 + runs-on: ubuntu-22.04 strategy: matrix: @@ -69,7 +69,7 @@ jobs: BUSINESS_SCHEMA_NAME: digital_business_card BUSINESS_SCHEMA_VERSION: "1.0.0" - runs-on: ubuntu-24.04 + runs-on: ubuntu-22.04 services: postgres: @@ -112,7 +112,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-24.04 + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v3 From 72938cc9024b9042558904cb838cb6fe16e51068 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 16:03:56 -0800 Subject: [PATCH 088/133] - added missing Python version (#3271) Co-authored-by: Severin Beauvais --- .github/workflows/legal-api-ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index 0dee39f8cf..d49e75bad7 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -83,6 +83,10 @@ jobs: # needed because the postgres container does not provide a healthcheck options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + strategy: + matrix: + python-version: [3.8, 3.9] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} From 6c807871d161165aa8e406bd8a9a221c2a0e3515 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 16:20:51 -0800 Subject: [PATCH 089/133] - revert to Ubuntu 24.04 (#3272) - just run unit tests with Python 3.8 Co-authored-by: Severin Beauvais --- .github/workflows/legal-api-ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index d49e75bad7..f39a6a43c5 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -69,7 +69,7 @@ jobs: BUSINESS_SCHEMA_NAME: digital_business_card BUSINESS_SCHEMA_VERSION: "1.0.0" - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 services: postgres: @@ -85,7 +85,7 @@ jobs: strategy: matrix: - python-version: [3.8, 3.9] + python-version: [3.8] steps: - uses: actions/checkout@v3 @@ -116,7 +116,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 From 37fd12c6972aacb7b62965c3c0b6fc8e1c5302be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9verin=20Beauvais?= Date: Fri, 28 Feb 2025 16:52:20 -0800 Subject: [PATCH 090/133] - changed remaining setup-python to v5 (#3273) - added missing strategy.matrix.python-version Co-authored-by: Severin Beauvais --- .github/workflows/colin-api-ci.yml | 8 ++++++-- .github/workflows/data-reset-tool-ci.yml | 8 ++++++-- .github/workflows/email-reminder-ci.yml | 7 +++++-- .github/workflows/entity-bn-ci.yml | 8 ++++++-- .github/workflows/entity-digital-credentials-ci.yml | 8 ++++++-- .github/workflows/entity-emailer-ci.yml | 8 ++++++-- .github/workflows/entity-filer-ci.yml | 5 ++--- .github/workflows/entity-pay-ci.yml | 8 ++++++-- .github/workflows/expired-limited-restoration-ci.yml | 9 +++++++-- .github/workflows/filings-notebook-report-ci.yml | 9 +++++++-- .github/workflows/furnishings-ci.yml | 9 +++++++-- .github/workflows/future-effective-filings-ci.yml | 9 +++++++-- .github/workflows/involuntary-dissolutions-ci.yml | 9 +++++++-- .github/workflows/legal-api-ci.yml | 8 ++++---- .github/workflows/sftp-icbc-report-ci.yml | 9 +++++++-- .github/workflows/sftp-nuans-report-ci.yml | 9 +++++++-- .github/workflows/update-colin-filings-ci.yml | 9 +++++++-- .github/workflows/update-legal-filings-ci.yml | 9 +++++++-- 18 files changed, 110 insertions(+), 39 deletions(-) diff --git a/.github/workflows/colin-api-ci.yml b/.github/workflows/colin-api-ci.yml index b55ce2d36c..52763afb5f 100644 --- a/.github/workflows/colin-api-ci.yml +++ b/.github/workflows/colin-api-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -66,6 +66,10 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -81,7 +85,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/data-reset-tool-ci.yml b/.github/workflows/data-reset-tool-ci.yml index 9874bfe487..a8092af1d9 100644 --- a/.github/workflows/data-reset-tool-ci.yml +++ b/.github/workflows/data-reset-tool-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -52,10 +52,14 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/email-reminder-ci.yml b/.github/workflows/email-reminder-ci.yml index 5ee967fd17..83e5263868 100644 --- a/.github/workflows/email-reminder-ci.yml +++ b/.github/workflows/email-reminder-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -50,10 +50,13 @@ jobs: # testing: # needs: setup-job # runs-on: ubuntu-24.04 + # strategy: + # matrix: + # python-version: [3.8] # steps: # - uses: actions/checkout@v3 # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v1 + # uses: actions/setup-python@v5 # with: # python-version: ${{ matrix.python-version }} # - name: Install dependencies diff --git a/.github/workflows/entity-bn-ci.yml b/.github/workflows/entity-bn-ci.yml index 8b3890c536..c0674fbc4b 100644 --- a/.github/workflows/entity-bn-ci.yml +++ b/.github/workflows/entity-bn-ci.yml @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -69,6 +69,10 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -84,7 +88,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/entity-digital-credentials-ci.yml b/.github/workflows/entity-digital-credentials-ci.yml index 18a85be556..1afdc52c25 100644 --- a/.github/workflows/entity-digital-credentials-ci.yml +++ b/.github/workflows/entity-digital-credentials-ci.yml @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -65,6 +65,10 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -80,7 +84,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/entity-emailer-ci.yml b/.github/workflows/entity-emailer-ci.yml index 899e882226..45a45cf4f0 100644 --- a/.github/workflows/entity-emailer-ci.yml +++ b/.github/workflows/entity-emailer-ci.yml @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -75,6 +75,10 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -100,7 +104,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/entity-filer-ci.yml b/.github/workflows/entity-filer-ci.yml index e59c10fe35..90c50151fa 100644 --- a/.github/workflows/entity-filer-ci.yml +++ b/.github/workflows/entity-filer-ci.yml @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -74,7 +74,6 @@ jobs: ACCOUNT_SVC_CLIENT_SECRET: account_svc_client_secret BUSINESS_EVENTS_TOPIC: projects/project-id/topics/test - runs-on: ubuntu-24.04 strategy: @@ -96,7 +95,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install docker-compose diff --git a/.github/workflows/entity-pay-ci.yml b/.github/workflows/entity-pay-ci.yml index 0f53b4a757..0a492a0445 100644 --- a/.github/workflows/entity-pay-ci.yml +++ b/.github/workflows/entity-pay-ci.yml @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -71,6 +71,10 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -86,7 +90,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/expired-limited-restoration-ci.yml b/.github/workflows/expired-limited-restoration-ci.yml index 6ce8de2548..19fd1ff777 100644 --- a/.github/workflows/expired-limited-restoration-ci.yml +++ b/.github/workflows/expired-limited-restoration-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -50,10 +50,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/filings-notebook-report-ci.yml b/.github/workflows/filings-notebook-report-ci.yml index 3cb98bce83..c2b19db992 100644 --- a/.github/workflows/filings-notebook-report-ci.yml +++ b/.github/workflows/filings-notebook-report-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,10 +49,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/furnishings-ci.yml b/.github/workflows/furnishings-ci.yml index 676f3c11c6..58d8841ad4 100644 --- a/.github/workflows/furnishings-ci.yml +++ b/.github/workflows/furnishings-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -64,6 +64,11 @@ jobs: NATS_CLIENT_NAME: entity.job.tester NATS_ENTITY_EVENTS_SUBJECT: entity.events SECOND_NOTICE_DELAY: 5 + + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -77,7 +82,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/future-effective-filings-ci.yml b/.github/workflows/future-effective-filings-ci.yml index a09473283a..9de4695517 100644 --- a/.github/workflows/future-effective-filings-ci.yml +++ b/.github/workflows/future-effective-filings-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -50,10 +50,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/involuntary-dissolutions-ci.yml b/.github/workflows/involuntary-dissolutions-ci.yml index e429452eae..172ce77241 100644 --- a/.github/workflows/involuntary-dissolutions-ci.yml +++ b/.github/workflows/involuntary-dissolutions-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -65,6 +65,11 @@ jobs: NATS_ENTITY_EVENTS_SUBJECT: entity.events STAGE_1_DELAY: 42 STAGE_2_DELAY: 30 + + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -78,7 +83,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index f39a6a43c5..2a9d49dfac 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -71,6 +71,10 @@ jobs: runs-on: ubuntu-24.04 + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -83,10 +87,6 @@ jobs: # needed because the postgres container does not provide a healthcheck options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - strategy: - matrix: - python-version: [3.8] - steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/.github/workflows/sftp-icbc-report-ci.yml b/.github/workflows/sftp-icbc-report-ci.yml index 5901715657..a6a31b1102 100644 --- a/.github/workflows/sftp-icbc-report-ci.yml +++ b/.github/workflows/sftp-icbc-report-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,10 +49,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/sftp-nuans-report-ci.yml b/.github/workflows/sftp-nuans-report-ci.yml index ea89a11652..c33e2cc7c6 100644 --- a/.github/workflows/sftp-nuans-report-ci.yml +++ b/.github/workflows/sftp-nuans-report-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,10 +49,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/update-colin-filings-ci.yml b/.github/workflows/update-colin-filings-ci.yml index 69d15a2ffa..b5aad8b85d 100644 --- a/.github/workflows/update-colin-filings-ci.yml +++ b/.github/workflows/update-colin-filings-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -50,10 +50,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/update-legal-filings-ci.yml b/.github/workflows/update-legal-filings-ci.yml index b9158e7d04..201300e9f4 100644 --- a/.github/workflows/update-legal-filings-ci.yml +++ b/.github/workflows/update-legal-filings-ci.yml @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -50,10 +50,15 @@ jobs: testing: needs: setup-job runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies From f65af7e314169d9c7169fead53d3c50292022766 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Tue, 4 Mar 2025 08:24:06 -0800 Subject: [PATCH 091/133] 26262 - Minimize DB Versioning Logging (#3262) * remove debugger in __all__ * change to display logging when initialize a service (using FF value) * display logging when version changed --- legal-api/src/legal_api/models/db.py | 27 ++++++++++++++++++- .../sql-versioning/sql_versioning/__init__.py | 1 - 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/models/db.py b/legal-api/src/legal_api/models/db.py index 37f14813a2..44483b9d9e 100644 --- a/legal-api/src/legal_api/models/db.py +++ b/legal-api/src/legal_api/models/db.py @@ -50,6 +50,26 @@ class Transaction(db.Model): issued_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=True) +def print_versioning_info(): + """ + Print the current versioning status if not already printed. + + This should only be called within an application context. + """ + try: + from legal_api.services import flags as flag_service # pylint: disable=import-outside-toplevel + + current_service = current_app.config.get('SERVICE_NAME') + if current_service: + db_versioning = flag_service.value('db-versioning') + use_new_versioning = (bool(db_versioning) and bool(db_versioning.get(current_service))) + current_versioning = 'new' if use_new_versioning else 'old' + print(f'\033[31mService: {current_service}, db versioning={current_versioning}\033[0m') + except Exception as err: + # Don't crash if something goes wrong + print(f'\033[31mUnable to determine versioning type: {err}\033[0m') + + def init_db(app): """Initialize database using flask app and configure db mappers. @@ -59,6 +79,9 @@ def init_db(app): db.init_app(app) orm.configure_mappers() + with app.app_context(): + print_versioning_info() + # TODO: remove versioning switching logic # TODO: remove debugging variables, messages, and decorators @@ -136,7 +159,6 @@ def _check_versioning(cls): db_versioning = flags.value('db-versioning') use_new_versioning = (bool(db_versioning) and bool(db_versioning.get(current_service))) cls._current_versioning = 'new' if use_new_versioning else 'old' - print(f'\033[31mCurrent versioning={cls._current_versioning}\033[0m') @classmethod def _initialize_versioning(cls): @@ -159,6 +181,8 @@ def _switch_versioning(cls, previous, current): """ cls._versioning_control[previous]['disable']() cls._versioning_control[current]['enable']() + # Print when versioning changes + print(f'\033[31mVersioning changed: {previous} -> {current}\033[0m') @classmethod def lock_versioning(cls, session, transaction): @@ -254,4 +278,5 @@ def clear_transaction(session, transaction): # it should be called before data model initialized, otherwise, old versioning doesn't work properly setup_versioning() + # make_versioned(user_cls=None, manager=versioning_manager) diff --git a/python/common/sql-versioning/sql_versioning/__init__.py b/python/common/sql-versioning/sql_versioning/__init__.py index d255c8e3a9..afd7253b9f 100644 --- a/python/common/sql-versioning/sql_versioning/__init__.py +++ b/python/common/sql-versioning/sql_versioning/__init__.py @@ -21,7 +21,6 @@ "TransactionFactory", "TransactionManager", "Versioned", - "debug", "disable_versioning", "enable_versioning", "version_class" From 14055e8dd8de39d71042dd50a70449bee86d0912 Mon Sep 17 00:00:00 2001 From: Argus Chiu Date: Tue, 4 Mar 2025 10:11:34 -0800 Subject: [PATCH 092/133] 25707 Update tombstone pipeline to support bringing over officers + misc updates (#3274) * 25707 Update tombstone pipeline to support bringing over officers + misc updates * move role_mapping to top of function --- data-tool/flows/batch_delete_flow.py | 32 ++++++----- data-tool/flows/corps_tombstone_flow.py | 24 +++++++-- .../flows/tombstone/tombstone_base_data.py | 4 ++ .../flows/tombstone/tombstone_queries.py | 24 +++++++-- data-tool/flows/tombstone/tombstone_utils.py | 48 +++++++++++++++-- data-tool/requirements.txt | 1 + data-tool/requirements/prefect.txt | 1 + .../scripts/colin_corps_extract_postgres_ddl | 10 ++++ .../b0937b915e6b_add_offices_held_table.py | 54 +++++++++++++++++++ 9 files changed, 175 insertions(+), 23 deletions(-) create mode 100644 legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py diff --git a/data-tool/flows/batch_delete_flow.py b/data-tool/flows/batch_delete_flow.py index bb134a5a92..4f002a4529 100644 --- a/data-tool/flows/batch_delete_flow.py +++ b/data-tool/flows/batch_delete_flow.py @@ -96,7 +96,7 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): query_futures_one.append( execute_query.submit(conn, plan) ) - + results_one = {} for future in query_futures_one: result = future.result() @@ -121,6 +121,10 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): { 'source': 'amalgamating_businesses', 'params': { 'amalgamation_id': results_one['amalgamations']}, + }, + { + 'source': 'offices_held', + 'params': {'party_role_id': results_one['party_roles']}, } ] @@ -129,7 +133,7 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): query_futures_two.append( execute_query.submit(conn, plan) ) - + delete_futures = [] # delete for first query results for table, ids in results_one.items(): @@ -215,6 +219,10 @@ def lear_delete_versioned(conn: Connection, business_ids: list): 'source': 'share_series_version', 'params': {'transaction_id': transaction_ids}, }, + { + 'source': 'offices_held_version', + 'params': {'transaction_id': transaction_ids}, + }, # based on others { 'source': 'batch_processing', @@ -342,7 +350,7 @@ def auth_delete(db_engine: Engine, identifiers: list): delete_futures.append( execute_delete_plan.submit(conn, table, ids) ) - + # delete records in entities table delete_futures.append( execute_delete_plan.submit(conn, 'entities', entity_ids) @@ -436,7 +444,7 @@ def delete_entities(identifiers: list, auth_svc_url, headers, timeout=None): else: failed += 1 - print(f'👷 Auth entity delete complete for this round. Succeeded: {succeeded}. Failed: {failed}. Skipped: {skipped}') + print(f'👷 Auth entity delete complete for this round. Succeeded: {succeeded}. Failed: {failed}. Skipped: {skipped}') def filter_none(values: list) -> list: @@ -451,20 +459,20 @@ def execute_query(conn: Connection, template: dict) -> dict: :param template: A dictionary specifying the query structure. Expected keys in `template` include: - + - **source** (`str`): The table to query. - - **columns** (`list[str]`, optional): The columns to select from the `source` table. + - **columns** (`list[str]`, optional): The columns to select from the `source` table. Defaults to `['id']`. - - **params** (`dict`, optional): A dictionary with filter conditions + - **params** (`dict`, optional): A dictionary with filter conditions for the query. Defaults to `None`. - - **targets** (`list[str]`, optional): A list of tables where the results will be mapped + - **targets** (`list[str]`, optional): A list of tables where the results will be mapped to targets for delete operations. Defaults to `[source]`. :return: A dictionary containing the mapping results. The format is: `{ 'target_table_name': [id1, id2, ...] }` - - where each `target_table_name` is a table specified in `targets` or the origin table of a + + where each `target_table_name` is a table specified in `targets` or the origin table of a `_version` table. The associated value is a list of IDs for records to delete in that table. """ @@ -497,7 +505,7 @@ def execute_query(conn: Connection, template: dict) -> dict: if not rows: # if source table is version table and has no record, then won't generate plan for origin table ret = {t: [] for t in targets} - else: + else: cols = zip(*rows) ret = defaultdict(list) for t, c in zip(targets, cols): @@ -507,7 +515,7 @@ def execute_query(conn: Connection, template: dict) -> dict: if (origin := (t.rsplit('_version', 1)[0])) != t: ret[origin].extend(c) ret[t].extend(c) - + return ret diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index f47b0d8bba..4bb5e82e94 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -12,6 +12,7 @@ from prefect.futures import wait from prefect.context import get_run_context from prefect.task_runners import ConcurrentTaskRunner +from prefect_dask import DaskTaskRunner from sqlalchemy import Connection, text from sqlalchemy.engine import Engine @@ -65,7 +66,6 @@ def get_unprocessed_count(config, colin_engine: Engine) -> int: def get_corp_users(colin_engine: Engine, corp_nums: list) -> list[dict]: """Get user information.""" query = get_corp_users_query(corp_nums) - sql_text = text(query) with colin_engine.connect() as conn: @@ -139,6 +139,7 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict, users_mapper: dic address['office_id'] = office_id load_data(conn, 'addresses', address) + party_roles_map = {} for party in tombstone_data['parties']: mailing_address_id = None delivery_address_id = None @@ -152,12 +153,26 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict, users_mapper: dic party['parties']['mailing_address_id'] = mailing_address_id party['parties']['delivery_address_id'] = delivery_address_id + source_full_name = party['parties']['cp_full_name'] + del party['parties']['cp_full_name'] party_id = load_data(conn, 'parties', party['parties']) for party_role in party['party_roles']: party_role['business_id'] = business_id party_role['party_id'] = party_id - load_data(conn, 'party_roles', party_role) + party_role_id = load_data(conn, 'party_roles', party_role, expecting_id=True) + + # Create a unique key for mapping + key = (source_full_name, party_role['role']) + party_roles_map[key] = party_role_id + + for office_held in tombstone_data.get('offices_held', []): + # Map to party_role_id using the key + key = (office_held['cp_full_name'], 'officer') + party_role_id = party_roles_map.get(key) + office_held['party_role_id'] = party_role_id + del office_held['cp_full_name'] + load_data(conn,'offices_held', office_held) for share_class in tombstone_data['share_classes']: share_class['share_classes']['business_id'] = business_id @@ -380,7 +395,8 @@ def migrate_tombstone(config, lear_engine: Engine, corp_num: str, clean_data: di log_prints=True, persist_result=False, # use ConcurrentTaskRunner when using work pool based deployments - # task_runner=ConcurrentTaskRunner(max_workers=35) + # task_runner=ConcurrentTaskRunner(max_workers=100) + # task_runner=DaskTaskRunner(cluster_kwargs={"n_workers": 3, "threads_per_worker": 2}) ) def tombstone_flow(): """Entry of tombstone pipeline""" @@ -485,7 +501,7 @@ def tombstone_flow(): if __name__ == "__main__": - tombstone_flow() + tombstone_flow() # # Create deployment - only intended to test locally for parallel flows # deployment = tombstone_flow.to_deployment( diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index 8931a8741f..76b3883403 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -103,6 +103,10 @@ ] } +OFFICES_HELD = { + 'party_role_id': None, + 'title': None # enum +} # ======== share structure (composite) ======== # insert: share_class -> share_series(if any) diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 10761d3031..b1822af31c 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -64,7 +64,7 @@ def get_unprocessed_corps_subquery(flow_name, environment): from corp_involved_amalgamating ) """, - 'where': """ + 'where': """ and not exists ( select 1 from t3 @@ -97,7 +97,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): and cp.flow_name = '{flow_name}' and cp.environment = '{environment}' where 1 = 1 - {where_clause} + {where_clause} -- and c.corp_type_cd like 'BC%' -- some are 'Q%' -- and c.corp_num = 'BC0000621' -- state changes a lot -- and c.corp_num = 'BC0883637' -- one pary with multiple roles, but werid address_ids, same filing submitter but diff email @@ -458,7 +458,7 @@ def get_parties_and_addresses_query(corp_num): -- and e.corp_num = 'BC0883637' -- INC, DIR and e.corp_num = '{corp_num}' and cp.end_event_id is null - and cp.party_typ_cd in ('INC', 'DIR') + and cp.party_typ_cd in ('INC', 'DIR', 'OFF') --order by e.event_id order by cp_full_name, e.event_id ; @@ -739,12 +739,30 @@ def get_in_dissolution_query(corp_num): """ return query +def get_offices_held_query(corp_num): + query = f""" + SELECT cp.corp_party_id AS cp_corp_party_id, + concat_ws(' ', nullif(trim(cp.first_name), ''), nullif(trim(cp.middle_name), ''), + nullif(trim(cp.last_name), '')) as cp_full_name, + oh.officer_typ_cd as oh_officer_typ_cd, + e.event_id AS transaction_id + FROM event e + join corp_party cp on cp.start_event_id = e.event_id + join offices_held oh on oh.corp_party_id = cp.corp_party_id + WHERE 1 = 1 + and cp.corp_num = '{corp_num}' + and cp.end_event_id is null + AND cp.party_typ_cd IN ('OFF') + """ + return query + def get_corp_snapshot_filings_queries(config, corp_num): queries = { 'businesses': get_business_query(corp_num, config.CORP_NAME_SUFFIX), 'offices': get_offices_and_addresses_query(corp_num), 'parties': get_parties_and_addresses_query(corp_num), + 'offices_held': get_offices_held_query(corp_num), 'share_classes': get_share_classes_share_series_query(corp_num), 'aliases': get_aliases_query(corp_num), 'resolutions': get_resolutions_query(corp_num), diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 51cd081cb6..fcc25b380d 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -11,7 +11,7 @@ FILING_JSON, IN_DISSOLUTION, JURISDICTION, OFFICE, PARTY, PARTY_ROLE, RESOLUTION, - SHARE_CLASSES, USER) + SHARE_CLASSES, USER, OFFICES_HELD) from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, EVENT_FILING_LEAR_TARGET_MAPPING, LEAR_FILING_BUSINESS_UPDATE_MAPPING, @@ -111,11 +111,20 @@ def format_parties_data(data: dict) -> list[dict]: formatted_parties = [] + # Map role codes to role names + role_mapping = { + 'INC': 'incorporator', + 'DIR': 'director', + 'OFF': 'officer' + # Additional roles can be added here in the future + } + df = pd.DataFrame(parties_data) grouped_parties = df.groupby('cp_full_name') for _, group in grouped_parties: party = copy.deepcopy(PARTY) party_info = group.iloc[0].to_dict() + party['parties']['cp_full_name'] = party_info['cp_full_name'] party['parties']['first_name'] = party_info['cp_first_name'] party['parties']['middle_initial'] = party_info['cp_middle_name'] party['parties']['last_name'] = party_info['cp_last_name'] @@ -141,9 +150,11 @@ def format_parties_data(data: dict) -> list[dict]: formatted_party_roles = party['party_roles'] for _, r in group.iterrows(): - if (role_code := r['cp_party_typ_cd']) not in ['INC', 'DIR']: + if (role_code := r['cp_party_typ_cd']) not in ['INC', 'DIR', 'OFF']: continue - role = 'incorporator' if role_code == 'INC' else 'director' + + role = role_mapping[role_code] # Will raise KeyError if role_code not in mapping + party_role = copy.deepcopy(PARTY_ROLE) party_role['role'] = role party_role['appointment_date'] = r['cp_appointment_dt_str'] @@ -154,6 +165,34 @@ def format_parties_data(data: dict) -> list[dict]: return formatted_parties +def format_offices_held_data(data: dict) -> list[dict]: + offices_held_data = data['offices_held'] + + if not offices_held_data: + return [] + + formatted_offices_held = [] + + title_mapping = { + 'ASC': 'ASSISTANT_SECRETARY', + 'CEO': 'CEO', + 'CFO': 'CFO', + 'CHR': 'CHAIR', + 'OTH': 'OTHER_OFFICES', + 'PRE': 'PRESIDENT', + 'SEC': 'SECRETARY', + 'TRE': 'TREASURER', + 'VIP': 'VICE_PRESIDENT' + } + + for x in offices_held_data: + office_held = copy.deepcopy(OFFICES_HELD) + office_held['cp_full_name'] = x['cp_full_name'] + office_held['title'] = title_mapping[x['oh_officer_typ_cd']] # map to enum val + formatted_offices_held.append(office_held) + + return formatted_offices_held + def format_share_series_data(share_series_data: dict) -> dict: formatted_series = { @@ -602,7 +641,7 @@ def formatted_data_cleanup(data: dict) -> dict: 'state_filing_index': filings_business['state_filing_index'] } data['filings'] = filings_business['filings'] - + data['admin_email'] = data['businesses']['admin_email'] del data['businesses']['admin_email'] @@ -614,6 +653,7 @@ def get_data_formatters() -> dict: 'businesses': format_business_data, 'offices': format_offices_data, 'parties': format_parties_data, + 'offices_held': format_offices_held_data, 'share_classes': format_share_classes_data, 'aliases': format_aliases_data, 'resolutions': format_resolutions_data, diff --git a/data-tool/requirements.txt b/data-tool/requirements.txt index 17c45d16c9..9b877e0a7e 100755 --- a/data-tool/requirements.txt +++ b/data-tool/requirements.txt @@ -1,4 +1,5 @@ prefect==3.0.8 +prefect[dask] Babel==2.9.1 Flask>=2.0.0,<2.1.0 # Pin to 2.0.x series for flask-restx compatibility Flask-Babel==2.0.0 diff --git a/data-tool/requirements/prefect.txt b/data-tool/requirements/prefect.txt index 670302f365..9ead3f5fe2 100644 --- a/data-tool/requirements/prefect.txt +++ b/data-tool/requirements/prefect.txt @@ -1 +1,2 @@ prefect==3.0.8 +prefect[dask] diff --git a/data-tool/scripts/colin_corps_extract_postgres_ddl b/data-tool/scripts/colin_corps_extract_postgres_ddl index f4e8b18be2..708e951f4a 100644 --- a/data-tool/scripts/colin_corps_extract_postgres_ddl +++ b/data-tool/scripts/colin_corps_extract_postgres_ddl @@ -822,6 +822,8 @@ CREATE INDEX if not exists ix_corp_party_end_event_id ON corp_party (end_event_i CREATE INDEX if not exists ix_corp_party_appointment_dt ON corp_party (appointment_dt); +CREATE INDEX if not exists ix_corp_processing_id ON corp_processing (id); + CREATE INDEX if not exists ix_corp_processing_flow_run_id ON corp_processing (flow_run_id); CREATE INDEX if not exists ix_corp_processing_claimed_at ON corp_processing (claimed_at); @@ -897,3 +899,11 @@ CREATE INDEX if not exists ix_share_struct_cls_corp_num ON share_struct_cls (cor CREATE INDEX if not exists ix_share_struct_cls_start_event_id ON share_struct_cls (start_event_id); CREATE INDEX if not exists ix_share_struct_cls_share_class_id ON share_struct_cls (share_class_id); + +CREATE INDEX if not exists idx_corp_processing_flow_env_status ON corp_processing (flow_name, environment, processed_status, corp_num); + +CREATE INDEX if not exists idx_corp_processing_claim_batch ON corp_processing (environment, flow_name, flow_run_id, processed_status, claimed_at); + +CREATE INDEX if not exists idx_corp_state_active ON corp_state (end_event_id, corp_num); + +CREATE INDEX if not exists idx_corp_state_corp_num_end_event_id ON corp_state (corp_num, end_event_id); diff --git a/legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py b/legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py new file mode 100644 index 0000000000..54de8ea2b8 --- /dev/null +++ b/legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py @@ -0,0 +1,54 @@ +"""“add_offices_held_table” + +Revision ID: b0937b915e6b +Revises: ad21c1ed551e +Create Date: 2025-02-28 14:30:31.105670 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision = 'b0937b915e6b' +down_revision = 'ad21c1ed551e' +branch_labels = None +depends_on = None + +titles_enum = postgresql.ENUM('CEO', 'CFO', 'CHAIR', 'OTHER_OFFICES', 'TREASURER', 'VICE_PRESIDENT', + 'PRESIDENT', 'SECRETARY', 'ASSISTANT_SECRETARY', + name='titles_enum') + +def upgrade(): + titles_enum.create(op.get_bind(), checkfirst=True) + + op.create_table( + 'offices_held', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('party_role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['party_role_id'], ['party_roles.id']), + sa.PrimaryKeyConstraint('id')) + + op.add_column('offices_held', sa.Column('title', titles_enum, nullable=False)) + + op.create_table( + 'offices_held_version', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('party_role_id', sa.Integer(), nullable=False), + sa.Column('transaction_id', sa.BigInteger(), autoincrement=False, nullable=False), + sa.Column('end_transaction_id', sa.BigInteger(), nullable=True), + sa.Column('operation_type', sa.SmallInteger(), nullable=False), + sa.ForeignKeyConstraint(['party_role_id'], ['party_roles.id']), + sa.PrimaryKeyConstraint('id', 'transaction_id') + ) + + op.add_column('offices_held_version', sa.Column('title', titles_enum, nullable=False)) + + +def downgrade(): + op.drop_table('offices_held_version') + op.drop_table('offices_held') + titles_enum.drop(op.get_bind(), checkfirst=True) + + From fc3b0a1a5d72c895815e0f3439164975678d83ff Mon Sep 17 00:00:00 2001 From: meawong Date: Tue, 4 Mar 2025 10:17:20 -0800 Subject: [PATCH 093/133] 26269 - Show expiry date instead of effective date (#3277) --- .../template-parts/business-summary/stateTransition.html | 6 ++++-- legal-api/src/legal_api/reports/business_document.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/legal-api/report-templates/template-parts/business-summary/stateTransition.html b/legal-api/report-templates/template-parts/business-summary/stateTransition.html index c748ca7015..97fc113edf 100644 --- a/legal-api/report-templates/template-parts/business-summary/stateTransition.html +++ b/legal-api/report-templates/template-parts/business-summary/stateTransition.html @@ -35,10 +35,12 @@
    Incorporation Number: {{ filing.identifier }}
    - {% elif (filing.filingType == 'dissolution' and filing.filingSubType == 'involuntary') or - filing.filingType == 'putBackOff' %} + {% elif (filing.filingType == 'dissolution' and filing.filingSubType == 'involuntary') %} Effective Date: {{ filing.effectiveDateTime }} + {% elif filing.filingType == 'putBackOff' %} + Effective Date: + {{ filing.expiryDate }} {% else %} Filing Date: {{filing.filingDateTime}} diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index 328b5d8bb7..744fd30af5 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -487,7 +487,7 @@ def _format_state_filing(self, filing: Filing) -> dict: _get_summary_display_name(filing_type, None, None, reason) filing_info['reason'] = reason expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date_str) - filing_info['expiryDate'] = expiry_date.strftime('%B %d, %Y') + filing_info['expiryDate'] = expiry_date.strftime(OUTPUT_DATE_FORMAT) else: filing_info['filingName'] = BusinessDocument.\ _get_summary_display_name(filing_type, None, None, None) From 14f007b15cff4685ab7942a29aafbb2da136678e Mon Sep 17 00:00:00 2001 From: EasonPan Date: Tue, 4 Mar 2025 13:32:35 -0800 Subject: [PATCH 094/133] 26262 - change init_db importing approach (#3278) * change importing level from legal_api to legal_api.models.db for importing init_db in BE components --- jobs/email-reminder/email_reminder.py | 2 +- jobs/furnishings/src/furnishings/worker.py | 2 +- jobs/involuntary-dissolutions/involuntary_dissolutions.py | 2 +- queue_services/entity-bn/src/entity_bn/worker.py | 2 +- .../src/entity_digital_credentials/worker.py | 2 +- queue_services/entity-emailer/src/entity_emailer/worker.py | 3 ++- queue_services/entity-filer/src/entity_filer/worker.py | 3 +-- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/jobs/email-reminder/email_reminder.py b/jobs/email-reminder/email_reminder.py index 8589665582..b998bdadf4 100644 --- a/jobs/email-reminder/email_reminder.py +++ b/jobs/email-reminder/email_reminder.py @@ -20,8 +20,8 @@ import requests import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from flask import Flask -from legal_api import init_db from legal_api.models import Business, Filing, db # noqa: I001 +from legal_api.models.db import init_db from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags from legal_api.services.queue import QueueService diff --git a/jobs/furnishings/src/furnishings/worker.py b/jobs/furnishings/src/furnishings/worker.py index 244dbc0a04..eae49e1718 100644 --- a/jobs/furnishings/src/furnishings/worker.py +++ b/jobs/furnishings/src/furnishings/worker.py @@ -20,8 +20,8 @@ import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from croniter import croniter from flask import Flask -from legal_api import init_db from legal_api.models import Configuration +from legal_api.models.db import init_db from legal_api.services.flags import Flags from legal_api.services.queue import QueueService from sentry_sdk.integrations.logging import LoggingIntegration diff --git a/jobs/involuntary-dissolutions/involuntary_dissolutions.py b/jobs/involuntary-dissolutions/involuntary_dissolutions.py index a4fda9ced3..fdaca86509 100644 --- a/jobs/involuntary-dissolutions/involuntary_dissolutions.py +++ b/jobs/involuntary-dissolutions/involuntary_dissolutions.py @@ -21,9 +21,9 @@ import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from croniter import croniter from flask import Flask -from legal_api import init_db from legal_api.core.filing import Filing as CoreFiling from legal_api.models import Batch, BatchProcessing, Business, Configuration, Filing, Furnishing, db # noqa: I001 +from legal_api.models.db import init_db from legal_api.services.filings.validations.dissolution import DissolutionTypes from legal_api.services.flags import Flags from legal_api.services.involuntary_dissolution import InvoluntaryDissolutionService diff --git a/queue_services/entity-bn/src/entity_bn/worker.py b/queue_services/entity-bn/src/entity_bn/worker.py index 3e4c358983..f6b16f0ea3 100644 --- a/queue_services/entity-bn/src/entity_bn/worker.py +++ b/queue_services/entity-bn/src/entity_bn/worker.py @@ -32,9 +32,9 @@ import nats from entity_queue_common.service_utils import QueueException, logger from flask import Flask -from legal_api import init_db from legal_api.core import Filing as FilingCore from legal_api.models import Business +from legal_api.models.db import init_db from legal_api.services.flags import Flags from sentry_sdk import capture_message from sqlalchemy.exc import OperationalError diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py index 380d4d8a2b..4d806e0929 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py @@ -33,9 +33,9 @@ from entity_queue_common.service import QueueServiceManager from entity_queue_common.service_utils import QueueException, logger from flask import Flask -from legal_api import init_db from legal_api.core import Filing as FilingCore from legal_api.models import Business +from legal_api.models.db import init_db from legal_api.services import digital_credentials, flags from sqlalchemy.exc import OperationalError diff --git a/queue_services/entity-emailer/src/entity_emailer/worker.py b/queue_services/entity-emailer/src/entity_emailer/worker.py index d1e8518edd..e520eedf24 100644 --- a/queue_services/entity-emailer/src/entity_emailer/worker.py +++ b/queue_services/entity-emailer/src/entity_emailer/worker.py @@ -34,8 +34,9 @@ from entity_queue_common.service import QueueServiceManager from entity_queue_common.service_utils import EmailException, QueueException, logger from flask import Flask -from legal_api import db, init_db # noqa:F401,I001;pylint:disable=unused-import; +from legal_api import db # noqa:F401,I001;pylint:disable=unused-import; from legal_api.models import Filing, Furnishing +from legal_api.models.db import init_db from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags from sqlalchemy.exc import OperationalError diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 1d63140411..469c1063a4 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -36,10 +36,9 @@ from entity_queue_common.service_utils import FilingException, QueueException, logger from flask import Flask from gcp_queue import GcpQueue, SimpleCloudEvent, to_queue_message -from legal_api import init_db from legal_api.core import Filing as FilingCore from legal_api.models import Business, Filing, db -from legal_api.models.db import VersioningProxy +from legal_api.models.db import VersioningProxy, init_db from legal_api.services import Flags from legal_api.utils.datetime import datetime, timezone from sentry_sdk import capture_message From ecaeb59005fbd1e3d513441269cab272150278ba Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Tue, 4 Mar 2025 14:14:59 -0800 Subject: [PATCH 095/133] 26189 Fix misc issue from tombstone prelim dry run & misc updates (#3275) * 26189 - Tombstone - fix misc issue from prelim dry run & misc updates Signed-off-by: Hongjing Chen * max_share type conversion for versioned records Signed-off-by: Hongjing Chen * rebase & update down_revision of alembic scripts Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- .../flows/tombstone/tombstone_base_data.py | 1 + data-tool/flows/tombstone/tombstone_utils.py | 56 +++++++++++-------- ...dd_currency_additional_to_share_classes.py | 26 +++++++++ ...0259785_modify_data_type_for_max_shares.py | 42 ++++++++++++++ legal-api/src/legal_api/models/business.py | 22 +++++--- legal-api/src/legal_api/models/share_class.py | 6 +- .../src/legal_api/models/share_series.py | 4 +- .../legal_api/reports/business_document.py | 36 ++++++++---- .../services/business_details_version.py | 4 +- 9 files changed, 149 insertions(+), 48 deletions(-) create mode 100644 legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py create mode 100644 legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index 76b3883403..f51a08f247 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -130,6 +130,7 @@ 'par_value_flag': False, 'par_value': None, # float 'currency': None, + 'currency_additional': None, 'special_rights_flag': False, # FK 'business_id': None diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index fcc25b380d..51692ca21c 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -225,9 +225,13 @@ def format_share_classes_data(data: dict) -> list[dict]: max_shares = int(share_class_info['ssc_share_quantity']) if share_class_info['ssc_share_quantity'] else None par_value = float(share_class_info['ssc_par_value_amt']) if share_class_info['ssc_par_value_amt'] else None - # TODO: map NULL or custom input value of ssc_other_currency + currency_additioanl = None if (currency := share_class_info['ssc_currency_typ_cd']) == 'OTH': - currency = share_class_info['ssc_other_currency'] + if (other_currency := share_class_info['ssc_other_currency']) and other_currency.strip() == 'CAD': + currency = 'CAD' + else: + currency = 'OTHER' # TODO: to confirm the code used in LEAR in the end + currency_additioanl = other_currency share_class['share_classes']['name'] = share_class_info['ssc_class_nme'] share_class['share_classes']['priority'] = priority @@ -236,6 +240,7 @@ def format_share_classes_data(data: dict) -> list[dict]: share_class['share_classes']['par_value_flag'] = share_class_info['ssc_par_value_ind'] share_class['share_classes']['par_value'] = par_value share_class['share_classes']['currency'] = currency + share_class['share_classes']['currency_additional'] = currency_additioanl share_class['share_classes']['special_rights_flag'] = share_class_info['ssc_spec_rights_ind'] # Note: srs_share_class_id should be either None or equal to share_class_id @@ -312,11 +317,14 @@ def format_jurisdictions_data(data: dict, event_id: Decimal) -> dict: elif can_jurisdiction_code == 'OT' and len(other_jurisdiction_desc) == 6: formatted_jurisdiction['country'] = other_jurisdiction_desc[:2] formatted_jurisdiction['region'] = other_jurisdiction_desc[4:] + else: + # add placeholder for unavailable information + formatted_jurisdiction['country'] = 'UNKNOWN' return formatted_jurisdiction -def format_filings_data(data: dict) -> list[dict]: +def format_filings_data(data: dict) -> dict: # filing info in business business_update_dict = {} @@ -866,23 +874,23 @@ def load_data(conn: Connection, conflict_error = False, expecting_id: bool = True) -> Optional[int]: columns = ', '.join(data.keys()) - values = ', '.join([format_value(v) for v in data.values()]) + placeholders = ', '.join([f':{key}' for key in data.keys()]) if conflict_column: - conflict_value = format_value(data[conflict_column]) - check_query = f"select id from {table_name} where {conflict_column} = {conflict_value}" - check_result = conn.execute(text(check_query)).scalar() + conflict_value = data[conflict_column] + check_query = f"select id from {table_name} where {conflict_column} = :conflict_value" + check_result = conn.execute(text(check_query), {'conflict_value': format_value(conflict_value)}).scalar() if check_result: if not conflict_error: return check_result else: raise Exception('Trying to reload corp existing in db, run delete script first') - query = f"""insert into {table_name} ({columns}) values ({values})""" + query = f"""insert into {table_name} ({columns}) values ({placeholders})""" if expecting_id: query = query + ' returning id' - result = conn.execute(text(query)) + result = conn.execute(text(query), format_params(data)) if expecting_id: id = result.scalar() @@ -892,25 +900,27 @@ def load_data(conn: Connection, def update_data(conn: Connection, table_name: str, data: dict, column: str, value: any) -> int: - update_pairs = [f'{k} = {format_value(v)}' for k, v in data.items()] + update_pairs = [f'{k} = :{k}' for k in data.keys()] update_pairs_str = ', '.join(update_pairs) - query = f"""update {table_name} set {update_pairs_str} where {column}={format_value(value)} returning id""" + query = f"""update {table_name} set {update_pairs_str} where {column}=:condition_value returning id""" - result = conn.execute(text(query)) + params = copy.deepcopy(data) + params['condition_value'] = value + + result = conn.execute(text(query), format_params(params)) id = result.scalar() return id def format_value(value) -> str: - if value is None: - return 'NULL' - elif isinstance(value, (int, float)): - return str(value) - elif isinstance(value, dict): - value = json.dumps(value).replace("'", "''") - return f"'{value}'" - else: - # Note: handle single quote issue - value = str(value).replace("'", "''") - return f"'{value}'" + if isinstance(value, dict): + return json.dumps(value) + return value + + +def format_params(data: dict) -> dict: + formatted = {} + for k, v in data.items(): + formatted[k] = format_value(v) + return formatted diff --git a/legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py b/legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py new file mode 100644 index 0000000000..bb9907d610 --- /dev/null +++ b/legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py @@ -0,0 +1,26 @@ +"""add_currency_additional_to_share_classes + +Revision ID: 24b59f535ec3 +Revises: f1d010259785 +Create Date: 2025-02-28 23:28:54.053129 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '24b59f535ec3' +down_revision = 'f1d010259785' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column('share_classes', sa.Column('currency_additional', sa.String(length=40))) + op.add_column('share_classes_version', sa.Column('currency_additional', sa.String(length=40))) + + +def downgrade(): + op.drop_column('share_classes', 'currency_additional') + op.drop_column('share_classes_version', 'currency_additional') diff --git a/legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py b/legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py new file mode 100644 index 0000000000..e51071d42e --- /dev/null +++ b/legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py @@ -0,0 +1,42 @@ +"""modify_data_type_for_max_shares + +Revision ID: f1d010259785 +Revises: b0937b915e6b +Create Date: 2025-02-28 22:29:38.543965 + +""" +from alembic import op + + +# revision identifiers, used by Alembic. +revision = 'f1d010259785' +down_revision = 'b0937b915e6b' +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute('ALTER TABLE share_classes ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + op.execute('ALTER TABLE share_series ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + + op.execute('ALTER TABLE share_classes_version ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + op.execute('ALTER TABLE share_series_version ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + + +def downgrade(): + op.execute("UPDATE share_classes SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_classes SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + op.execute("UPDATE share_classes_version SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_classes_version SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + + op.execute("UPDATE share_series SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_series SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + op.execute("UPDATE share_series_version SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_series_version SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + + op.execute("ALTER TABLE share_classes ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + op.execute("ALTER TABLE share_series ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + op.execute("ALTER TABLE share_classes_version ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + op.execute("ALTER TABLE share_series_version ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + + diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index c5b66ec521..169e260362 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -329,6 +329,8 @@ def business_legal_name(self): @property def next_anniversary(self): """Retrieve the next anniversary date for which an AR filing is due.""" + if not self.founding_date and not self.last_ar_date: + return None last_anniversary = self.founding_date if self.last_ar_date: last_anniversary = self.last_ar_date @@ -581,20 +583,24 @@ def json(self, slim=False): if slim: return slim_json - ar_min_date, ar_max_date = self.get_ar_dates( - (self.last_ar_year if self.last_ar_year else self.founding_date.year) + 1 - ) + ar_min_date = None + ar_max_date = None + if self.last_ar_year or self.founding_date: + ar_min_date, ar_max_date = self.get_ar_dates( + (self.last_ar_year if self.last_ar_year else self.founding_date.year) + 1 + ) + d = { **slim_json, - 'arMinDate': ar_min_date.isoformat(), - 'arMaxDate': ar_max_date.isoformat(), - 'foundingDate': self.founding_date.isoformat(), + 'arMinDate': ar_min_date.isoformat() if ar_min_date else '', + 'arMaxDate': ar_max_date.isoformat() if ar_max_date else '', + 'foundingDate': self.founding_date.isoformat() if self.founding_date else '', 'hasRestrictions': self.restriction_ind, 'complianceWarnings': self.compliance_warnings, 'warnings': self.warnings, 'lastAnnualGeneralMeetingDate': datetime.date(self.last_agm_date).isoformat() if self.last_agm_date else '', 'lastAnnualReportDate': datetime.date(self.last_ar_date).isoformat() if self.last_ar_date else '', - 'lastLedgerTimestamp': self.last_ledger_timestamp.isoformat(), + 'lastLedgerTimestamp': self.last_ledger_timestamp.isoformat() if self.last_ledger_timestamp else '', 'lastAddressChangeDate': '', 'lastDirectorChangeDate': '', 'naicsKey': self.naics_key, @@ -602,7 +608,7 @@ def json(self, slim=False): 'naicsDescription': self.naics_description, 'nextAnnualReport': LegislationDatetime.as_legislation_timezone_from_date( self.next_anniversary - ).astimezone(timezone.utc).isoformat(), + ).astimezone(timezone.utc).isoformat() if self.next_anniversary else '', 'noDissolution': self.no_dissolution, 'associationType': self.association_type, 'allowedActions': self.allowable_actions, diff --git a/legal-api/src/legal_api/models/share_class.py b/legal-api/src/legal_api/models/share_class.py index 43ecc5da28..8e48b749a2 100644 --- a/legal-api/src/legal_api/models/share_class.py +++ b/legal-api/src/legal_api/models/share_class.py @@ -35,10 +35,11 @@ class ShareClass(db.Model, Versioned): # pylint: disable=too-many-instance-attr name = db.Column('name', db.String(1000), index=True) priority = db.Column('priority', db.Integer, nullable=True) max_share_flag = db.Column('max_share_flag', db.Boolean, unique=False, default=False) - max_shares = db.Column('max_shares', db.Integer, nullable=True) + max_shares = db.Column('max_shares', db.Numeric(20), nullable=True) par_value_flag = db.Column('par_value_flag', db.Boolean, unique=False, default=False) par_value = db.Column('par_value', db.Float, nullable=True) currency = db.Column('currency', db.String(10), nullable=True) + currency_additional = db.Column('currency_additional', db.String(40), nullable=True) special_rights_flag = db.Column('special_rights_flag', db.Boolean, unique=False, default=False) # parent keys @@ -62,7 +63,7 @@ def json(self): 'name': self.name, 'priority': self.priority, 'hasMaximumShares': self.max_share_flag, - 'maxNumberOfShares': self.max_shares, + 'maxNumberOfShares': int(self.max_shares) if self.max_shares else None, 'hasParValue': self.par_value_flag, 'parValue': self.par_value, 'currency': self.currency, @@ -121,3 +122,4 @@ def receive_before_change(mapper, connection, target): # pylint: disable=unused else: share_class.par_value = None share_class.currency = None + share_class.currency_additional = None diff --git a/legal-api/src/legal_api/models/share_series.py b/legal-api/src/legal_api/models/share_series.py index 029bd6069c..de32a0940c 100644 --- a/legal-api/src/legal_api/models/share_series.py +++ b/legal-api/src/legal_api/models/share_series.py @@ -33,7 +33,7 @@ class ShareSeries(db.Model, Versioned): # pylint: disable=too-many-instance-att name = db.Column('name', db.String(1000), index=True) priority = db.Column('priority', db.Integer, nullable=True) max_share_flag = db.Column('max_share_flag', db.Boolean, unique=False, default=False) - max_shares = db.Column('max_shares', db.Integer, nullable=True) + max_shares = db.Column('max_shares', db.Numeric(20), nullable=True) special_rights_flag = db.Column('special_rights_flag', db.Boolean, unique=False, default=False) # parent keys @@ -52,7 +52,7 @@ def json(self): 'name': self.name, 'priority': self.priority, 'hasMaximumShares': self.max_share_flag, - 'maxNumberOfShares': self.max_shares, + 'maxNumberOfShares': int(self.max_shares) if self.max_shares else None, 'hasRightsOrRestrictions': self.special_rights_flag } return share_series diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index 744fd30af5..9705005e37 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -264,9 +264,13 @@ def _set_dates(self, business: dict): # pylint: disable=too-many-branches business['formatted_registration_date'] = LegislationDatetime.\ format_as_report_string(datetime.fromisoformat(registration_datetime_str)) # founding dates - founding_datetime = LegislationDatetime.as_legislation_timezone(self._business.founding_date) - business['formatted_founding_date_time'] = LegislationDatetime.format_as_report_string(founding_datetime) - business['formatted_founding_date'] = founding_datetime.strftime(OUTPUT_DATE_FORMAT) + if self._business.founding_date: + founding_datetime = LegislationDatetime.as_legislation_timezone(self._business.founding_date) + business['formatted_founding_date_time'] = LegislationDatetime.format_as_report_string(founding_datetime) + business['formatted_founding_date'] = founding_datetime.strftime(OUTPUT_DATE_FORMAT) + else: + business['formatted_founding_date_time'] = 'Not Available' + business['formatted_founding_date'] = 'Not Available' # dissolution dates if self._business.dissolution_date: dissolution_datetime = LegislationDatetime.as_legislation_timezone(self._business.dissolution_date) @@ -565,18 +569,26 @@ def _set_continuation_in_details(self, business: dict): continuation_in_filing = continuation_in_filing[0] jurisdiction = Jurisdiction.get_continuation_in_jurisdiction(continuation_in_filing.business_id) + if not jurisdiction: + return + # Format country and region region_code = jurisdiction.region country_code = jurisdiction.country - country = pycountry.countries.get(alpha_2=country_code) - region = None - if region_code and region_code.upper() != 'FEDERAL': - region = pycountry.subdivisions.get(code=f'{country_code}-{region_code}') - location_jurisdiction = f'{region.name}, {country.name}' if region else country.name + location_jurisdiction = 'Not Available' + if country_code and country_code.upper() != 'UNKNOWN': + country = pycountry.countries.get(alpha_2=country_code) + region = None + if region_code and region_code.upper() != 'FEDERAL': + region = pycountry.subdivisions.get(code=f'{country_code}-{region_code}') + location_jurisdiction = f'{region.name}, {country.name}' if region else country.name # Format incorporation date - incorp_date = LegislationDatetime.as_legislation_timezone(jurisdiction.incorporation_date) - formatted_incorporation_date = incorp_date.strftime(OUTPUT_DATE_FORMAT) + if jurisdiction.incorporation_date: + incorp_date = LegislationDatetime.as_legislation_timezone(jurisdiction.incorporation_date) + formatted_incorporation_date = incorp_date.strftime(OUTPUT_DATE_FORMAT) + else: + formatted_incorporation_date = 'Not Available' # Format Jurisdiction data jurisdiction_info = { @@ -586,7 +598,7 @@ def _set_continuation_in_details(self, business: dict): 'legal_name': jurisdiction.legal_name or 'Not Available', 'tax_id': jurisdiction.tax_id, 'incorporation_date': formatted_incorporation_date, - 'expro_identifier': jurisdiction.expro_identifier, + 'expro_identifier': jurisdiction.expro_identifier or 'Not Available', 'expro_legal_name': jurisdiction.expro_legal_name or 'Not Available', 'business_id': jurisdiction.business_id, 'filing_id': jurisdiction.filing_id, @@ -597,10 +609,12 @@ def _set_continuation_in_details(self, business: dict): @staticmethod def _format_address(address): + address['streetAddress'] = address.get('streetAddress') or '' address['streetAddressAdditional'] = address.get('streetAddressAdditional') or '' address['addressCity'] = address.get('addressCity') or '' address['addressRegion'] = address.get('addressRegion') or '' address['deliveryInstructions'] = address.get('deliveryInstructions') or '' + address['postalCode'] = address.get('postalCode') or '' country = address['addressCountry'] if country: diff --git a/legal-api/src/legal_api/services/business_details_version.py b/legal-api/src/legal_api/services/business_details_version.py index 61c7c3358b..048bcbb109 100644 --- a/legal-api/src/legal_api/services/business_details_version.py +++ b/legal-api/src/legal_api/services/business_details_version.py @@ -549,7 +549,7 @@ def share_class_revision_json(share_class_revision) -> dict: 'name': share_class_revision.name, 'priority': share_class_revision.priority, 'hasMaximumShares': share_class_revision.max_share_flag, - 'maxNumberOfShares': share_class_revision.max_shares, + 'maxNumberOfShares': int(share_class_revision.max_shares) if share_class_revision.max_shares else None, 'hasParValue': share_class_revision.par_value_flag, 'parValue': share_class_revision.par_value, 'currency': share_class_revision.currency, @@ -565,7 +565,7 @@ def share_series_revision_json(share_series_revision) -> dict: 'name': share_series_revision.name, 'priority': share_series_revision.priority, 'hasMaximumShares': share_series_revision.max_share_flag, - 'maxNumberOfShares': share_series_revision.max_shares, + 'maxNumberOfShares': int(share_series_revision.max_shares) if share_series_revision.max_shares else None, 'hasRightsOrRestrictions': share_series_revision.special_rights_flag } return share_series From 9cf6551066c98f5054778ddc0cde830a6659dd7f Mon Sep 17 00:00:00 2001 From: meawong Date: Tue, 4 Mar 2025 14:56:50 -0800 Subject: [PATCH 096/133] 26146 - Allow Amalgamations with C Business as Primary or Holding (#3276) * 26146 - Allow Amalgamating C business and fix validation * 26146 - PR fixes --- .../validations/amalgamation_application.py | 20 +++++++++++---- .../test_amalgamation_application.py | 25 +++++++++++++------ 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py b/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py index e7850e2cd7..e0909889ea 100644 --- a/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py +++ b/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py @@ -170,11 +170,21 @@ def validate_amalgamating_businesses( # pylint: disable=too-many-branches,too-m 'error': 'Adopt a name that have the same business type as the resulting business.', 'path': f'/filing/{filing_type}/nameRequest/legalName' }) - elif primary_or_holding_business and primary_or_holding_business.legal_type != legal_type: - msg.append({ - 'error': 'Legal type should be same as the legal type in primary or holding business.', - 'path': f'/filing/{filing_type}/nameRequest/legalType' - }) + + if primary_or_holding_business: + continued_types_map = { + Business.LegalTypes.CONTINUE_IN.value: Business.LegalTypes.COMP.value, + Business.LegalTypes.BCOMP_CONTINUE_IN.value: Business.LegalTypes.BCOMP.value, + Business.LegalTypes.ULC_CONTINUE_IN.value: Business.LegalTypes.BC_ULC_COMPANY.value, + Business.LegalTypes.CCC_CONTINUE_IN.value: Business.LegalTypes.BC_CCC.value + } + legal_type_to_compare = continued_types_map.get(primary_or_holding_business.legal_type, + primary_or_holding_business.legal_type) + if legal_type_to_compare != legal_type: + msg.append({ + 'error': 'Legal type should be same as the legal type in primary or holding business.', + 'path': f'/filing/{filing_type}/nameRequest/legalType' + }) msg.extend(_validate_amalgamation_type(amalgamation_type, amalgamating_business_roles, diff --git a/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py b/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py index 16a7fe6a7a..52e235b694 100644 --- a/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py +++ b/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py @@ -1391,15 +1391,26 @@ def mock_find_by_identifier(identifier): @pytest.mark.parametrize( - 'legal_type, amalgamation_type, expected_code', + 'legal_type, mock_legal_type, amalgamation_type, expected_code', [ - (Business.LegalTypes.BCOMP.value, Amalgamation.AmalgamationTypes.vertical.name, HTTPStatus.BAD_REQUEST), - (Business.LegalTypes.BCOMP.value, Amalgamation.AmalgamationTypes.horizontal.name, HTTPStatus.BAD_REQUEST), - (Business.LegalTypes.COMP.value, Amalgamation.AmalgamationTypes.vertical.name, None), - (Business.LegalTypes.COMP.value, Amalgamation.AmalgamationTypes.horizontal.name, None) + (Business.LegalTypes.BCOMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.vertical.name, HTTPStatus.BAD_REQUEST), + (Business.LegalTypes.BCOMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.horizontal.name, HTTPStatus.BAD_REQUEST), + (Business.LegalTypes.COMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.vertical.name, None), + (Business.LegalTypes.COMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.horizontal.name, None), + (Business.LegalTypes.COMP.value, Business.LegalTypes.CONTINUE_IN.value, + Amalgamation.AmalgamationTypes.horizontal.name, None), + (Business.LegalTypes.BCOMP.value, Business.LegalTypes.BCOMP_CONTINUE_IN.value, + Amalgamation.AmalgamationTypes.horizontal.name, None), + (Business.LegalTypes.BC_ULC_COMPANY.value, Business.LegalTypes.ULC_CONTINUE_IN.value, + Amalgamation.AmalgamationTypes.horizontal.name, None) ] ) -def test_amalgamation_legal_type_mismatch(mocker, app, session, jwt, legal_type, amalgamation_type, expected_code): +def test_amalgamation_legal_type_mismatch(mocker, app, session, jwt, legal_type, mock_legal_type, + amalgamation_type, expected_code): """Assert amalgamation legal type validation for short form.""" account_id = '123456' filing = {'filing': {}} @@ -1417,7 +1428,7 @@ def test_amalgamation_legal_type_mismatch(mocker, app, session, jwt, legal_type, def mock_find_by_identifier(identifier): return Business(identifier=identifier, - legal_type=Business.LegalTypes.COMP.value) + legal_type=mock_legal_type) mocker.patch('legal_api.services.filings.validations.amalgamation_application.validate_name_request', return_value=[]) From a61a1aa5c2c4573b2d5642e58b4a4819d3f9b113 Mon Sep 17 00:00:00 2001 From: EasonPan Date: Tue, 4 Mar 2025 15:53:24 -0800 Subject: [PATCH 097/133] 26262 - change logging approach (#3279) * change logging approach to use logger.info or logger.error instead of print() --- legal-api/src/legal_api/models/db.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/legal-api/src/legal_api/models/db.py b/legal-api/src/legal_api/models/db.py index 44483b9d9e..e8c4007d5e 100644 --- a/legal-api/src/legal_api/models/db.py +++ b/legal-api/src/legal_api/models/db.py @@ -64,10 +64,10 @@ def print_versioning_info(): db_versioning = flag_service.value('db-versioning') use_new_versioning = (bool(db_versioning) and bool(db_versioning.get(current_service))) current_versioning = 'new' if use_new_versioning else 'old' - print(f'\033[31mService: {current_service}, db versioning={current_versioning}\033[0m') + current_app.logger.info(f'\033[31mService: {current_service}, db versioning={current_versioning}\033[0m') except Exception as err: # Don't crash if something goes wrong - print(f'\033[31mUnable to determine versioning type: {err}\033[0m') + current_app.logger.error('Unable to read flags: %s' % repr(err), exc_info=True) def init_db(app): @@ -182,7 +182,7 @@ def _switch_versioning(cls, previous, current): cls._versioning_control[previous]['disable']() cls._versioning_control[current]['enable']() # Print when versioning changes - print(f'\033[31mVersioning changed: {previous} -> {current}\033[0m') + current_app.logger.info(f'\033[31mVersioning changed: {previous} -> {current}\033[0m') @classmethod def lock_versioning(cls, session, transaction): From 970853c5b01a78baaf3c181ffda35df04f19287a Mon Sep 17 00:00:00 2001 From: Arwen Qin <122495122+ArwenQin@users.noreply.github.com> Date: Wed, 5 Mar 2025 15:43:41 -0800 Subject: [PATCH 098/133] 26248 - update NoW language (#3284) Signed-off-by: Qin --- .../notice-of-withdrawal/recordToBeWithdrawn.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html b/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html index 179f393538..4e78cb1674 100644 --- a/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html +++ b/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html @@ -1,5 +1,5 @@
    -
    Record to be Withdrawn
    +
    Withdrawn Record
    -
    \ No newline at end of file + From 2812c292e0be41bb09010051a92e8433e38f64c8 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Thu, 6 Mar 2025 08:37:38 -0800 Subject: [PATCH 099/133] 26282 Tombstone pipeline - appointment date in PST (#3281) --- data-tool/flows/tombstone/tombstone_queries.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index b1822af31c..0eb8d5b874 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -380,8 +380,8 @@ def get_parties_and_addresses_query(corp_num): when cp.appointment_dt is null and f.effective_dt is not null then date_trunc('day', f.effective_dt) when cp.appointment_dt is null and f.effective_dt is null then date_trunc('day', e.event_timerstamp) else null - end)::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cp_appointment_dt_str, - to_char(cp.cessation_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cp_cessation_dt_str, + end), 'YYYY-MM-DD') as cp_appointment_dt_str, + to_char(cp.cessation_dt, 'YYYY-MM-DD') as cp_cessation_dt_str, cp.last_name as cp_last_name, cp.middle_name as cp_middle_name, cp.first_name as cp_first_name, From 10d0c0e0265ed08f9c41ca5f768076e1594326e2 Mon Sep 17 00:00:00 2001 From: meawong Date: Thu, 6 Mar 2025 15:01:05 -0800 Subject: [PATCH 100/133] 26298 - Display Historical Date instead of Effective Date (#3280) * 26298 - Display historical date instead of effective date * 26298 - Fix lint errors --- .../template-parts/business-summary/stateTransition.html | 2 +- legal-api/src/legal_api/reports/business_document.py | 1 + legal-api/src/legal_api/utils/legislation_datetime.py | 8 ++++++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/legal-api/report-templates/template-parts/business-summary/stateTransition.html b/legal-api/report-templates/template-parts/business-summary/stateTransition.html index 97fc113edf..a9556044f3 100644 --- a/legal-api/report-templates/template-parts/business-summary/stateTransition.html +++ b/legal-api/report-templates/template-parts/business-summary/stateTransition.html @@ -40,7 +40,7 @@ {{ filing.effectiveDateTime }} {% elif filing.filingType == 'putBackOff' %} Effective Date: - {{ filing.expiryDate }} + {{ filing.historicalDate }} {% else %} Filing Date: {{filing.filingDateTime}} diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index 9705005e37..0de9354e97 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -492,6 +492,7 @@ def _format_state_filing(self, filing: Filing) -> dict: filing_info['reason'] = reason expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date_str) filing_info['expiryDate'] = expiry_date.strftime(OUTPUT_DATE_FORMAT) + filing_info['historicalDate'] = LegislationDatetime.format_as_next_legislation_day(expiry_date_str) else: filing_info['filingName'] = BusinessDocument.\ _get_summary_display_name(filing_type, None, None, None) diff --git a/legal-api/src/legal_api/utils/legislation_datetime.py b/legal-api/src/legal_api/utils/legislation_datetime.py index e283fc883c..4e1fb44dc2 100644 --- a/legal-api/src/legal_api/utils/legislation_datetime.py +++ b/legal-api/src/legal_api/utils/legislation_datetime.py @@ -81,6 +81,14 @@ def as_utc_timezone_from_legislation_date_str(date_string: str) -> datetime: _date_time = LegislationDatetime.as_legislation_timezone_from_date_str(date_string) return LegislationDatetime.as_utc_timezone(_date_time) + @staticmethod + def format_as_next_legislation_day(date_string: str) -> str: + """Return the next day in this format (eg: `August 5, 2021`).""" + input_date = datetime.fromisoformat(date_string) + next_day = input_date + timedelta(days=1) + + return next_day.strftime('%B %d, %Y') + @staticmethod def format_as_report_string(date_time: datetime) -> str: """Return a datetime string in this format (eg: `August 5, 2021 at 11:00 am Pacific time`).""" From c9c67654900b78af5713961ee2be2aba1a2832ac Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Fri, 7 Mar 2025 12:14:17 -0800 Subject: [PATCH 101/133] 26321 Hide business summary for tombstone corps (#3288) --- legal-api/flags.json | 1 + .../v2/business/business_documents.py | 21 +++++++++++++++++-- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/legal-api/flags.json b/legal-api/flags.json index 3ca7983767..35669843b1 100644 --- a/legal-api/flags.json +++ b/legal-api/flags.json @@ -5,6 +5,7 @@ "integer-flag": 10, "enable-legal-name-fix": true, "disable-nr-check": false, + "enable-business-summary-for-migrated-corps": true, "enable-involuntary-dissolution-filter": false, "enable-new-ben-statements": false, "involuntary-dissolution-filter": { diff --git a/legal-api/src/legal_api/resources/v2/business/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_documents.py index 4563a8a4bb..29a16273ff 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_documents.py @@ -21,7 +21,7 @@ from legal_api.models import Business, Filing from legal_api.models.document import Document, DocumentType from legal_api.reports.business_document import BusinessDocument -from legal_api.services import authorized +from legal_api.services import authorized, flags from legal_api.services.business import validate_document_request from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime @@ -56,6 +56,15 @@ def get_business_documents(identifier: str, document_name: str = None): response_message = {'errors': err.msg} return jsonify(response_message), err.code + # Hide business summary for tombstone corps + if ( + not flags.is_on('enable-business-summary-for-migrated-corps') and + business.is_tombstone and + business.legal_type in Business.CORPS and + document_name == 'summary' + ): + return {}, HTTPStatus.NOT_FOUND + if document_name: if 'application/pdf' in request.accept_mimetypes: return BusinessDocument(business, document_name).get_pdf() @@ -70,9 +79,17 @@ def _get_document_list(business): base_url = base_url[:base_url.find('/api')] doc_url = url_for('API2.get_business_documents', **{'identifier': business.identifier, 'document_name': None}) - business_documents = ['summary'] documents = {'documents': {}} + # Hide business summary for tombstone corps + if ( + not flags.is_on('enable-business-summary-for-migrated-corps') and + business.is_tombstone and + business.legal_type in Business.CORPS + ): + return jsonify(documents), HTTPStatus.OK + + business_documents = ['summary'] for doc in business_documents: documents['documents'][doc] = f'{base_url}{doc_url}/{doc}' From b3f2eb98fbadfc0f66b5445117bb9374009c1958 Mon Sep 17 00:00:00 2001 From: Hongjing <60866283+chenhongjing@users.noreply.github.com> Date: Mon, 10 Mar 2025 08:10:49 -0700 Subject: [PATCH 102/133] 26350 Tombstone - implement administer corp event/filing (#3285) * 26350 - Tombstone - implement administer corp event/filing Signed-off-by: Hongjing Chen * add other admin event types(admin, adfirm) Signed-off-by: Hongjing Chen --------- Signed-off-by: Hongjing Chen --- data-tool/flows/tombstone/tombstone_mappings.py | 9 +++++++++ data-tool/flows/tombstone/tombstone_utils.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index e33f688423..d713df32c6 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -118,6 +118,9 @@ class EventFilings(str, Enum): FILE_ICORC = 'FILE_ICORC' # TODO: Legacy Other - unsupported + ADCORP_NULL = 'ADCORP_NULL' + ADFIRM_NULL = 'ADFIRM_NULL' + ADMIN_NULL = 'ADMIN_NULL' FILE_AM_TR = 'FILE_AM_TR' # TODO: Liquidation - unsupported @@ -259,6 +262,9 @@ def has_value(cls, value): EventFilings.FILE_ICORC: 'incorporationApplication', # TODO: Legacy Other - unsupported + EventFilings.ADCORP_NULL: 'legacyOther', + EventFilings.ADFIRM_NULL: 'legacyOther', + EventFilings.ADMIN_NULL: 'legacyOther', EventFilings.FILE_AM_TR: 'legacyOther', # TODO: Liquidation - unsupported @@ -369,6 +375,9 @@ def has_value(cls, value): EventFilings.FILE_ICORC: 'Incorporation Application for a Community Contribution Company', # TODO: Legacy Other - unsupported + EventFilings.ADCORP_NULL: None, + EventFilings.ADFIRM_NULL: None, + EventFilings.ADMIN_NULL: None, EventFilings.FILE_AM_TR: 'Amendment - Transition', # TODO: Liquidation - unsupported (need to check if anything missing) diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 51692ca21c..425e398282 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -371,7 +371,7 @@ def format_filings_data(data: dict) -> dict: if ( raw_filing_type == 'conversion' or raw_filing_subtype == 'involuntary' - or (raw_filing_type == 'putBackOff' and event_file_type == 'SYSDL_NULL') + or event_file_type in ['SYSDL_NULL', 'ADCORP_NULL', 'ADFIRM_NULL', 'ADMIN_NULL'] ): hide_in_ledger = True else: From d95fd2ba62a5f6b063a99fb95ede2e95a8dd5b0f Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Mon, 10 Mar 2025 10:29:24 -0700 Subject: [PATCH 103/133] 26331 - Appoint receiver filing processor (#3287) * 26331 - appont receiver filing processor * 26331 - update function call --- legal-api/requirements.txt | 2 +- queue_services/entity-filer/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 2 +- .../filing_processors/appoint_receiver.py | 31 ++++++++++++ .../filing_components/__init__.py | 1 + .../entity-filer/src/entity_filer/worker.py | 4 ++ .../test_appoint_receiver.py | 49 +++++++++++++++++++ 7 files changed, 88 insertions(+), 3 deletions(-) create mode 100644 queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py create mode 100644 queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index c65e9cb282..ab4c3f2917 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.35#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.37#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-filer/requirements.txt b/queue_services/entity-filer/requirements.txt index d44df25ab9..6c82924616 100755 --- a/queue_services/entity-filer/requirements.txt +++ b/queue_services/entity-filer/requirements.txt @@ -24,7 +24,7 @@ minio==7.0.2 PyPDF2==1.26.0 reportlab==3.6.12 git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.34#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.37#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-filer/requirements/bcregistry-libraries.txt b/queue_services/entity-filer/requirements/bcregistry-libraries.txt index a8e57e2c9c..fb41e35576 100644 --- a/queue_services/entity-filer/requirements/bcregistry-libraries.txt +++ b/queue_services/entity-filer/requirements/bcregistry-libraries.txt @@ -1,5 +1,5 @@ git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.34#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.37#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py b/queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py new file mode 100644 index 0000000000..a40a55023c --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py @@ -0,0 +1,31 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""File processing rules and actions for the appoint receiver.""" +from typing import Dict + +from legal_api.models import Business, Filing + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components.parties import update_parties + + +def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): + # pylint: disable=too-many-branches; + """Render the appoint_receiver onto the business model objects.""" + appoint_receiver_filing = filing.get('appointReceiver') + if not appoint_receiver_filing.get('parties'): + return + + if parties := appoint_receiver_filing.get('parties'): + update_parties(business, parties, filing_rec, False) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py index bb1fa8fccc..ff6a924bb4 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py @@ -38,6 +38,7 @@ 'proprietor': PartyRole.RoleTypes.PROPRIETOR.value, 'partner': PartyRole.RoleTypes.PARTNER.value, 'applicant': PartyRole.RoleTypes.APPLICANT.value, + 'receiver': PartyRole.RoleTypes.RECEIVER.value, } diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 469c1063a4..7e45b15e63 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -53,6 +53,7 @@ alteration, amalgamation_application, annual_report, + appoint_receiver, change_of_address, change_of_directors, change_of_name, @@ -360,6 +361,9 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, elif filing.get('transparencyRegister'): transparency_register.process(business, filing_submission, filing_core_submission.json) + elif filing.get('appointReceiver'): + appoint_receiver.process(business, filing, filing_submission, filing_meta) + if filing.get('specialResolution'): special_resolution.process(business, filing, filing_submission) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py b/queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py new file mode 100644 index 0000000000..cc3992d2ff --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py @@ -0,0 +1,49 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Appoint Receiver filing.""" + +import copy +import random + +from registry_schemas.example_data import APPOINT_RECEIVER, FILING_TEMPLATE + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import appoint_receiver +from tests.unit import create_business, create_filing + + +def test_appoint_receiver_filing_process(app, session): + """Assert that the appoint receiver object is correctly populated to model objects.""" + # Setup + identifier = 'BC1234567' + business = create_business(identifier, legal_type='BC') + + # Create filing + filing_json = copy.deepcopy(FILING_TEMPLATE) + filing_json['filing']['header']['name'] = 'appointReceiver' + filing_json['filing']['business']['identifier'] = identifier + filing_json['filing']['appointReceiver'] = copy.deepcopy(APPOINT_RECEIVER) + + payment_id = str(random.SystemRandom().getrandbits(0x58)) + filing = create_filing(payment_id, filing_json, business_id=business.id) + + filing_meta = FilingMeta() + + # Test + appoint_receiver.process(business, filing_json['filing'], filing, filing_meta) + business.save() + + # Assertions + assert len(business.party_roles.all()) == 1 + assert business.party_roles[0].role == 'receiver' From e44150a8de526cf8bd37c2987fc8b2b9de2ae186 Mon Sep 17 00:00:00 2001 From: Hongjing Chen Date: Mon, 10 Mar 2025 13:35:38 -0700 Subject: [PATCH 104/133] 26359 - Fail flow if migration fails Signed-off-by: Hongjing Chen --- data-tool/flows/corps_tombstone_flow.py | 35 ++++++++++++++++++------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 4bb5e82e94..f34271c3d9 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -12,6 +12,7 @@ from prefect.futures import wait from prefect.context import get_run_context from prefect.task_runners import ConcurrentTaskRunner +from prefect.states import Failed from prefect_dask import DaskTaskRunner from sqlalchemy import Connection, text from sqlalchemy.engine import Engine @@ -348,7 +349,7 @@ def migrate_corp_users(colin_engine: Engine, lear_engine: Engine, corp_nums: lis print(f'👷 Complete collecting and migrating users for {len(corp_nums)} corps: {", ".join(corp_nums[:5])}...') except Exception as e: print(f'❌ Error collecting and migrating users: {repr(e)}') - return None + raise e return users_mapper @@ -428,6 +429,8 @@ def tombstone_flow(): cnt = 0 migrated_cnt = 0 + total_corp_failed = 0 + is_user_failed = False while cnt < batches: # Claim next batch of reserved corps for current flow corp_nums = processing_service.claim_batch(flow_run_id, batch_size) @@ -437,12 +440,20 @@ def tombstone_flow(): print(f'👷 Start processing {len(corp_nums)} corps: {", ".join(corp_nums[:5])}...') - users_mapper = migrate_corp_users(colin_engine, lear_engine, corp_nums) - - # TODO: skip the following migration or continue? - if users_mapper is None: - print(f'❗ Skip populating user info for corps in this round due to user migration error.') - users_mapper = {} + try: + users_mapper = migrate_corp_users(colin_engine, lear_engine, corp_nums) + except Exception as e: + # skip migration if there's user migration error + print('❗ Skip corp migration in this round due to user migration error.') + for corp_num in corp_nums: + processing_service.update_corp_status( + flow_run_id, + corp_num, + ProcessingStatuses.FAILED, + error=f'Failed due to user migration error in round {cnt}: {repr(e)}' + ) + is_user_failed = True + continue data_futures = [] for corp_num in corp_nums: @@ -464,7 +475,7 @@ def tombstone_flow(): flow_run_id, corp_num, ProcessingStatuses.FAILED, - error=f"Migration failed - Skip due to data collection error: {repr(clean_data)}" + error=f'Failed due to data collection error: {repr(clean_data)}' ) print(f'❗ Skip migrating {corp_num} due to data collection error.') @@ -485,10 +496,11 @@ def tombstone_flow(): flow_run_id, corp_num, ProcessingStatuses.FAILED, - error=f"Migration failed - {repr(e)}" + error=f'Failed - {repr(e)}' ) failed = len(corp_futures) - succeeded + total_corp_failed += failed + skipped print(f'🌟 Complete round {cnt}. Succeeded: {succeeded}. Failed: {failed}. Skip: {skipped}') cnt += 1 migrated_cnt += succeeded @@ -496,6 +508,11 @@ def tombstone_flow(): print(f'🌰 Complete {cnt} rounds, migrate {migrated_cnt} corps.') print(f"🌰 All unsupport event file types: {', '.join(unsupported_event_file_types)}") + if is_user_failed: + return Failed(message='Failed due to user migration error.') + if total_corp_failed > 0: + return Failed(message=f'{total_corp_failed} corps failed due to corp migration error.') + except Exception as e: raise e From 5717880745cc70d9da0c6e8bb6ffd9f78acbcf4d Mon Sep 17 00:00:00 2001 From: Karim El Jazzar <122301442+JazzarKarim@users.noreply.github.com> Date: Mon, 10 Mar 2025 15:24:39 -0700 Subject: [PATCH 105/133] bumped up version numbers for release 24.5 (#3295) --- legal-api/src/legal_api/version.py | 2 +- queue_services/entity-filer/src/entity_filer/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index 60f74b6dac..4ec6d14652 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.143.0' # pylint: disable=invalid-name +__version__ = '2.144.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index ae7b37c91a..248e5567af 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.143.0' # pylint: disable=invalid-name +__version__ = '2.144.0' # pylint: disable=invalid-name From d269216bb80cd4a545c4406a3c80c17a3fcd3737 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Tue, 11 Mar 2025 11:19:38 -0700 Subject: [PATCH 106/133] 25801 Tombstone pipeline - continuation out (#3292) --- .../flows/tombstone/tombstone_mappings.py | 2 - .../flows/tombstone/tombstone_queries.py | 26 ++++++++++++- data-tool/flows/tombstone/tombstone_utils.py | 39 +++++++++++++++++++ .../src/business_pay/resources/pay_filer.py | 2 +- .../filing_processors/continuation_out.py | 1 - .../test_continuation_out.py | 1 - 6 files changed, 65 insertions(+), 6 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index d713df32c6..3f98a8ff4b 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -434,8 +434,6 @@ def has_value(cls, value): 'changeOfDirectors': ['last_cod_date'], 'agmExtension': ['last_agm_date'], 'amalgamationApplication': ['last_coa_date', 'last_cod_date'], - # TODO: 'dissolution_date' - continuation out - # TODO: 'continuation_out_date' - continuation out 'continuationIn': ['last_coa_date', 'last_cod_date'], 'dissolution': ['dissolution_date'], 'putBackOff': ['restoration_expiry_date', 'dissolution_date'], diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 0eb8d5b874..d2b48339d1 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -613,7 +613,13 @@ def get_filings_query(corp_num): to_char(ce.effective_dt at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as ce_effective_dt_str, -- corp name change cn_old.corp_name as old_corp_name, - cn_new.corp_name as new_corp_name + cn_new.corp_name as new_corp_name, + + -- continuation out + co.can_jur_typ_cd as cont_out_can_jur_typ_cd, + to_char(co.cont_out_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cont_out_dt, + co.othr_juri_desc as cont_out_othr_juri_desc, + co.home_company_nme as cont_out_home_company_nme from event e left outer join filing f on e.event_id = f.event_id left outer join filing_user u on u.event_id = e.event_id @@ -622,6 +628,7 @@ def get_filings_query(corp_num): left outer join conv_event ce on e.event_id = ce.event_id left outer join corp_name cn_old on e.event_id = cn_old.end_event_id and cn_old.corp_name_typ_cd in ('CO', 'NB') left outer join corp_name cn_new on e.event_id = cn_new.start_event_id and cn_new.corp_name_typ_cd in ('CO', 'NB') + left outer join cont_out co on co.start_event_id = e.event_id where 1 = 1 and e.corp_num = '{corp_num}' -- and e.corp_num = 'BC0068889' @@ -757,6 +764,22 @@ def get_offices_held_query(corp_num): return query +def get_cont_out_query(corp_num): + query = f""" + select + co.can_jur_typ_cd, + to_char(co.cont_out_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cont_out_dt, + co.othr_juri_desc, + co.home_company_nme + from cont_out co + join corp_state cs on cs.corp_num = co.corp_num and cs.end_event_id is null + where co.corp_num = '{corp_num}' + and co.end_event_id is null + and cs.state_type_cd in ('HCO', 'HAO') + """ + return query + + def get_corp_snapshot_filings_queries(config, corp_num): queries = { 'businesses': get_business_query(corp_num, config.CORP_NAME_SUFFIX), @@ -772,6 +795,7 @@ def get_corp_snapshot_filings_queries(config, corp_num): 'business_comments': get_business_comments_query(corp_num), 'filing_comments': get_filing_comments_query(corp_num), 'in_dissolution': get_in_dissolution_query(corp_num), + 'cont_out': get_cont_out_query(corp_num), } return queries diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 425e398282..dd0e7e13df 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -642,6 +642,35 @@ def format_users_data(users_data: list) -> list: return formatted_users +def format_cont_out_data(data: dict) -> dict: + cont_data = data.get('cont_out', []) + if not cont_data: + return {} + + cont_data = cont_data[0] + country, region = map_country_region(cont_data['can_jur_typ_cd']) + + formatted_cont_out = { + 'foreign_jurisdiction': country, + 'foreign_jurisdiction_region': region, + 'foreign_legal_name': cont_data['home_company_nme'], + 'continuation_out_date': cont_data['cont_out_dt'], + } + + return formatted_cont_out + + +def map_country_region(can_jur_typ_cd): + if can_jur_typ_cd != 'OT': + country = 'CA' + region = 'FEDERAL' if can_jur_typ_cd == 'FD' else can_jur_typ_cd + else: # placeholder for other + country = 'UNKNOWN' + region = 'UNKNOWN' + + return country, region + + def formatted_data_cleanup(data: dict) -> dict: filings_business = data['filings'] data['updates'] = { @@ -653,6 +682,7 @@ def formatted_data_cleanup(data: dict) -> dict: data['admin_email'] = data['businesses']['admin_email'] del data['businesses']['admin_email'] + data['businesses'].update(data['cont_out']) return data @@ -668,6 +698,7 @@ def get_data_formatters() -> dict: 'filings': format_filings_data, 'comments': format_business_comments_data, # only for business level, filing level will be formatted ith filings 'in_dissolution': format_in_dissolution_data, + 'cont_out': format_cont_out_data, } return ret @@ -802,6 +833,14 @@ def build_filing_json_meta_data(raw_filing_type: str, filing_type: str, filing_s 'reason': 'Limited Restoration Expired', 'expiryDate': effective_date[:10] } + elif filing_type == 'continuationOut': + country, region = map_country_region(data['cont_out_can_jur_typ_cd']) + meta_data['continuationOut'] = { + 'country': country, + 'region': region, + 'legalName': data['cont_out_home_company_nme'], + 'continuationOutDate': data['cont_out_dt'][:10] + } if withdrawn_ts_str := data['f_withdrawn_event_ts_str']: withdrawn_ts = datetime.strptime(withdrawn_ts_str, '%Y-%m-%d %H:%M:%S%z') diff --git a/queue_services/business-pay/src/business_pay/resources/pay_filer.py b/queue_services/business-pay/src/business_pay/resources/pay_filer.py index b439d6f90d..c5e3919e8f 100644 --- a/queue_services/business-pay/src/business_pay/resources/pay_filer.py +++ b/queue_services/business-pay/src/business_pay/resources/pay_filer.py @@ -109,7 +109,7 @@ async def worker(): logger.debug(f"Removed From Queue: no payment info in ce: {str(ce)}") return {}, HTTPStatus.OK - if payment_token.corp_type_code in ["MHR"]: + if payment_token.corp_type_code in ["MHR", "BTR", "BUS", "STRR"]: logger.debug( f"ignoring message for corp_type_code:{payment_token.corp_type_code}, {str(ce)}") return {}, HTTPStatus.OK diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py index c3b444c84b..55a46b16b4 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py @@ -40,7 +40,6 @@ def process(business: Business, continuation_out_filing: Filing, filing: Dict, f business.state = Business.State.HISTORICAL business.state_filing_id = continuation_out_filing.id - business.dissolution_date = continuation_out_date business.jurisdiction = foreign_jurisdiction_country business.foreign_legal_name = legal_name diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py b/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py index f056a7105e..5293a78a9a 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py @@ -60,7 +60,6 @@ async def test_worker_continuation_out(app, session): assert business.foreign_jurisdiction_region == foreign_jurisdiction_json['region'].upper() assert business.foreign_legal_name == filing_json['filing']['continuationOut']['legalName'] assert business.continuation_out_date == continuation_out_date - assert business.dissolution_date == continuation_out_date assert filing_meta.continuation_out['country'] == foreign_jurisdiction_json['country'] assert filing_meta.continuation_out['region'] == foreign_jurisdiction_json['region'] From 518044d33384dd1aee186dad7985c4f1cecf6ecb Mon Sep 17 00:00:00 2001 From: ketaki-deodhar <116035339+ketaki-deodhar@users.noreply.github.com> Date: Tue, 11 Mar 2025 12:45:43 -0700 Subject: [PATCH 107/133] 25959 (#3293) * 25959 - cease receiver initial implementation * 25393 - add filing type in enum and add unit tets * 25959 - fix unit test --- legal-api/src/legal_api/core/filing.py | 1 + legal-api/src/legal_api/core/meta/filing.py | 15 ++++++ legal-api/src/legal_api/models/filing.py | 15 ++++++ legal-api/src/legal_api/services/authz.py | 6 +++ .../filings/validations/cease_receiver.py | 43 +++++++++++++++++ .../filings/validations/validation.py | 4 ++ .../tests/unit/resources/v2/test_business.py | 4 ++ .../tests/unit/services/test_authorization.py | 47 ++++++++++++++----- 8 files changed, 124 insertions(+), 11 deletions(-) create mode 100644 legal-api/src/legal_api/services/filings/validations/cease_receiver.py diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index 5af982f91b..c87bb84207 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -73,6 +73,7 @@ class FilingTypes(str, Enum): AMENDEDCHANGEOFDIRECTORS = 'amendedChangeOfDirectors' ANNUALREPORT = 'annualReport' APPOINTRECEIVER = 'appointReceiver' + CEASERECEIVER = 'ceaseReceiver' CHANGEOFADDRESS = 'changeOfAddress' CHANGEOFDIRECTORS = 'changeOfDirectors' CHANGEOFNAME = 'changeOfName' diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index bb521d87e9..bb000f926c 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -220,6 +220,21 @@ class FilingTitles(str, Enum): 'CCC': 'NOARM' } }, + 'ceaseReceiver': { + 'name': 'ceaseReceiver', + 'title': 'Cease Receiver Filing', + 'displayName': 'Cease Receiver', + 'codes': { + 'BEN': 'NOCER', + 'BC': 'NOCER', + 'ULC': 'NOCER', + 'CC': 'NOCER', + 'CBEN': 'NOCER', + 'C': 'NOCER', + 'CUL': 'NOCER', + 'CCC': 'NOCER' + } + }, 'changeOfAddress': { 'name': 'changeOfAddress', 'title': 'Change of Address Filing', diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index e1bcf268b3..7ffcc15e8b 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -185,6 +185,21 @@ class Source(Enum): 'CCC': 'NOARM' } }, + 'ceaseReceiver': { + 'name': 'ceaseReceiver', + 'title': 'Cease Receiver Filing', + 'displayName': 'Cease Receiver', + 'codes': { + 'BEN': 'NOCER', + 'BC': 'NOCER', + 'ULC': 'NOCER', + 'CC': 'NOCER', + 'CBEN': 'NOCER', + 'C': 'NOCER', + 'CUL': 'NOCER', + 'CCC': 'NOCER' + } + }, 'changeOfAddress': { 'name': 'changeOfAddress', 'title': 'Change of Address Filing', diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index ce6bcd6b41..0cf7fa3ea6 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -213,6 +213,12 @@ def get_allowable_filings_dict(): 'business': [BusinessBlocker.DEFAULT] } }, + 'ceaseReceiver': { + 'legalTypes': ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.DEFAULT] + } + }, 'changeOfAddress': { 'legalTypes': ['CP', 'BEN', 'BC', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { diff --git a/legal-api/src/legal_api/services/filings/validations/cease_receiver.py b/legal-api/src/legal_api/services/filings/validations/cease_receiver.py new file mode 100644 index 0000000000..5ec47269fb --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/cease_receiver.py @@ -0,0 +1,43 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Validation for the Cease Receiver filing.""" +from typing import Optional + +from legal_api.errors import Error + + +def validate(_: dict) -> Optional[Error]: + """Validate the Cease Receiver filing.""" + # NOTE: There isn't anything to validate outside what is already validated via the schema yet + return None diff --git a/legal-api/src/legal_api/services/filings/validations/validation.py b/legal-api/src/legal_api/services/filings/validations/validation.py index 61045ed24f..8b1a0e2b44 100644 --- a/legal-api/src/legal_api/services/filings/validations/validation.py +++ b/legal-api/src/legal_api/services/filings/validations/validation.py @@ -28,6 +28,7 @@ from .amalgamation_application import validate as amalgamation_application_validate from .annual_report import validate as annual_report_validate from .appoint_receiver import validate as appoint_receiver_validate +from .cease_receiver import validate as cease_receiver_validate from .change_of_address import validate as coa_validate from .change_of_directors import validate as cod_validate from .change_of_name import validate as con_validate @@ -198,6 +199,9 @@ def validate(business: Business, # pylint: disable=too-many-branches,too-many-s elif k == Filing.FILINGS['appointReceiver'].get('name'): err = appoint_receiver_validate(filing_json) # pylint: disable=assignment-from-none + elif k == Filing.FILINGS['ceaseReceiver'].get('name'): + err = cease_receiver_validate(filing_json) # pylint: disable=assignment-from-none + if err: return err diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index 2273a50eef..2fc9d7ba13 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -592,6 +592,10 @@ def test_get_could_file(session, client, jwt): "displayName": "Appoint Receiver", "name": "appointReceiver" }, + { + "displayName": "Cease Receiver", + "name": "ceaseReceiver" + }, { "displayName": "Address Change", "name": "changeOfAddress" diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 775bf6809a..4118af1372 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -165,6 +165,7 @@ class FilingKey(str, Enum): TRANSPARENCY_REGISTER_CHANGE = 'TRANSPARENCY_REGISTER_CHANGE' TRANSPARENCY_REGISTER_INITIAL = 'TRANSPARENCY_REGISTER_INITIAL' APPOINT_RECEIVER = 'APPOINT_RECEIVER' + CEASE_RECEIVER = 'CEASE_RECEIVER' EXPECTED_DATA = { @@ -244,7 +245,8 @@ class FilingKey(str, Enum): FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, - FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'} + FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'}, + FilingKey.CEASE_RECEIVER: {'displayName': 'Cease Receiver', 'feeCode': 'NOCER', 'name': 'ceaseReceiver'} } EXPECTED_DATA_CONT_IN = { @@ -329,7 +331,8 @@ class FilingKey(str, Enum): FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, - FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'} + FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'}, + FilingKey.CEASE_RECEIVER: {'displayName': 'Cease Receiver', 'feeCode': 'NOCER', 'name': 'ceaseReceiver'} } BLOCKER_FILING_STATUSES = factory_incomplete_statuses() @@ -559,18 +562,18 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me 'registrarsNotation', 'registrarsOrder', 'specialResolution']), ('staff_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', 'changeOfAddress', - 'changeOfDirectors', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', - {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', 'putBackOff', - 'registrarsNotation', 'registrarsOrder', 'transition', + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', + 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationOut', + 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, + 'incorporationApplication', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', 'changeOfAddress', - 'changeOfDirectors', 'continuationIn', 'consentContinuationOut', 'continuationOut', 'correction', - 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'putBackOff', 'registrarsNotation', - 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, - 'noticeOfWithdrawal']), + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', + 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentContinuationOut', + 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, + 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', + {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_llc', Business.State.ACTIVE, ['LLC'], 'staff', [STAFF_ROLE], []), ('staff_active_firms', Business.State.ACTIVE, ['SP', 'GP'], 'staff', [STAFF_ROLE], ['adminFreeze', 'changeOfRegistration', 'conversion', 'correction', 'courtOrder', @@ -752,6 +755,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['CP', 'BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'general', [BASIC_USER], True), ('user_active', Business.State.ACTIVE, 'annualReport', None, ['LLC'], 'general', [BASIC_USER], False), + + ('staff_active_allowed', Business.State.ACTIVE, 'ceaseReceiver', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), + ('staff_active', Business.State.ACTIVE, 'ceaseReceiver', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), ('user_active_allowed', Business.State.ACTIVE, 'changeOfAddress', None, ['CP', 'BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'general', [BASIC_USER], True), @@ -967,6 +975,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -989,6 +998,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -1269,6 +1279,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -1291,6 +1302,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -1600,6 +1612,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.ALTERATION, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -1616,6 +1629,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.ALTERATION, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -2080,6 +2094,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2101,6 +2116,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2248,6 +2264,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2273,6 +2290,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2297,6 +2315,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2319,6 +2338,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2630,6 +2650,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2670,6 +2691,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, @@ -2738,6 +2760,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -2850,6 +2873,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.ALTERATION, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, @@ -2897,6 +2921,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.ALTERATION, FilingKey.AR_CORPS, FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, From b457fd5e5ee4d7a9fd82a84dac0c74d3d7ac056d Mon Sep 17 00:00:00 2001 From: Hongjing Chen Date: Tue, 11 Mar 2025 13:26:19 -0700 Subject: [PATCH 108/133] 26143 & 26323 - Tombstone - update to allow skipped event_file types & mark processed_status partial if unsupported filings exist Signed-off-by: Hongjing Chen --- .../common/corp_processing_queue_service.py | 1 + data-tool/flows/corps_tombstone_flow.py | 37 ++++++++++++------- .../flows/tombstone/tombstone_base_data.py | 4 +- .../flows/tombstone/tombstone_mappings.py | 25 +++++++++++++ .../flows/tombstone/tombstone_queries.py | 4 +- data-tool/flows/tombstone/tombstone_utils.py | 20 +++++++--- 6 files changed, 70 insertions(+), 21 deletions(-) diff --git a/data-tool/flows/common/corp_processing_queue_service.py b/data-tool/flows/common/corp_processing_queue_service.py index 62636891d5..cc348149c5 100644 --- a/data-tool/flows/common/corp_processing_queue_service.py +++ b/data-tool/flows/common/corp_processing_queue_service.py @@ -8,6 +8,7 @@ class ProcessingStatuses(str, Enum): PROCESSING = 'PROCESSING' COMPLETED = 'COMPLETED' FAILED = 'FAILED' + PARTIAL = 'PARTIAL' class CorpProcessingQueueService: def __init__(self, environment: str, db_engine, flow_name: str): diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index f34271c3d9..078c3e03dc 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -25,7 +25,7 @@ from tombstone.tombstone_utils import (build_epoch_filing, format_users_data, formatted_data_cleanup, get_data_formatters, load_data, - unsupported_event_file_types, + all_unsupported_types, update_data) @@ -388,7 +388,9 @@ def migrate_tombstone(config, lear_engine: Engine, corp_num: str, clean_data: di print(f'❌ Error migrating corp snapshot and filings data for {corp_num}: {repr(e)}') return corp_num, e print(f'✅ Complete migrating {corp_num}!') - return corp_num, None + + additional_info = clean_data['unsupported_types'] + return corp_num, additional_info @flow( @@ -462,7 +464,7 @@ def tombstone_flow(): ) corp_futures = [] - skipped = 0 + failed = 0 for f in data_futures: corp_num, clean_data = f.result() if clean_data and not isinstance(clean_data, Exception): @@ -470,7 +472,7 @@ def tombstone_flow(): migrate_tombstone.submit(config, lear_engine, corp_num, clean_data, users_mapper) ) else: - skipped += 1 + failed += 1 processing_service.update_corp_status( flow_run_id, corp_num, @@ -480,33 +482,42 @@ def tombstone_flow(): print(f'❗ Skip migrating {corp_num} due to data collection error.') wait(corp_futures) - succeeded = 0 + complete = 0 + partial = 0 for f in corp_futures: corp_num, e = f.result() if not e: - succeeded += 1 + complete += 1 processing_service.update_corp_status( flow_run_id, corp_num, ProcessingStatuses.COMPLETED ) - else: + elif isinstance(e, Exception): # Handle error case if needed + failed += 1 processing_service.update_corp_status( flow_run_id, corp_num, ProcessingStatuses.FAILED, - error=f'Failed - {repr(e)}' + error=f'Failed due to {repr(e)}' + ) + else: + partial += 1 + processing_service.update_corp_status( + flow_run_id, + corp_num, + ProcessingStatuses.PARTIAL, + error=f"Partial due to unsupported event_file types: {', '.join(e)}" ) - failed = len(corp_futures) - succeeded - total_corp_failed += failed + skipped - print(f'🌟 Complete round {cnt}. Succeeded: {succeeded}. Failed: {failed}. Skip: {skipped}') + total_corp_failed += failed + print(f'🌟 Complete round {cnt}. Complete: {complete}. Partial: {partial}. Failed: {failed}.') cnt += 1 - migrated_cnt += succeeded + migrated_cnt += complete + partial print(f'🌰 Complete {cnt} rounds, migrate {migrated_cnt} corps.') - print(f"🌰 All unsupport event file types: {', '.join(unsupported_event_file_types)}") + print(f"🌰 All unsupport event file types: {', '.join(all_unsupported_types)}") if is_user_failed: return Failed(message='Failed due to user migration error.') diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index f51a08f247..362f246ce6 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -216,6 +216,7 @@ # business info to update }, 'state_filing_index': -1, + 'unsupported_types': None, } AMALGAMATION = { @@ -303,5 +304,6 @@ 'updates': { 'businesses': BUSINESS, 'state_filing_index': -1 - } + }, + 'unsupported_types': None, } diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index 3f98a8ff4b..90864ab99f 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -428,6 +428,31 @@ def has_value(cls, value): } +SKIPPED_EVENT_FILE_TYPES = [ + # XPRO + 'FILE_CHGJU', + 'FILE_NWPTA', + 'FILE_PARES', + 'FILE_TILAT', + 'FILE_TILHO', + 'FILE_TILMA', + 'SYST_CANPS', + 'SYST_CHGJU', + 'SYST_CHGPN', + 'SYST_CO_PN', + 'SYST_LNKPS', + 'SYST_NWPTA', + 'SYST_PARES', + 'SYST_RIPFL', + 'SYST_TILAT', + 'SYST_TILHO', + 'SYST_NULL', + # Others + 'FILE_COGS1', + # TODO: may need to add more +] + + LEAR_FILING_BUSINESS_UPDATE_MAPPING = { 'incorporationApplication': ['last_coa_date', 'last_cod_date'], 'changeOfAddress': ['last_coa_date'], diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index d2b48339d1..6a2179ae3e 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -40,7 +40,7 @@ def get_unprocessed_corps_subquery(flow_name, environment): on cia2.ting_corp_num = cp.corp_num and cp.flow_name = '{flow_name}' and cp.environment = '{environment}' - and cp.processed_status = 'COMPLETED' + and cp.processed_status in ('COMPLETED', 'PARTIAL') where cia2.ted_corp_num = cia1.ted_corp_num and (cia2.ting_corp_num like 'BC%' or cia2.ting_corp_num like 'Q%' or cia2.ting_corp_num like 'C%') and cp.corp_num is null @@ -156,7 +156,7 @@ def get_total_unprocessed_count_query(flow_name, environment): and cp.environment = '{environment}' where 1 = 1 and cs.end_event_id is null - and ((cp.processed_status is null or cp.processed_status != 'COMPLETED')) + and ((cp.processed_status is null or cp.processed_status not in ('COMPLETED', 'PARTIAL'))) """ return query diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index dd0e7e13df..aca8b45352 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -17,9 +17,10 @@ LEAR_FILING_BUSINESS_UPDATE_MAPPING, LEAR_STATE_FILINGS, LEGAL_TYPE_CHANGE_FILINGS, + SKIPPED_EVENT_FILE_TYPES, EventFilings) -unsupported_event_file_types = set() +all_unsupported_types = set() def format_business_data(data: dict) -> dict: @@ -327,6 +328,7 @@ def format_jurisdictions_data(data: dict, event_id: Decimal) -> dict: def format_filings_data(data: dict) -> dict: # filing info in business business_update_dict = {} + current_unsupported_types = set() filings_data = data['filings'] formatted_filings = [] @@ -335,12 +337,17 @@ def format_filings_data(data: dict) -> dict: withdrawn_filing_idx = -1 for x in filings_data: event_file_type = x['event_file_type'] + # skip event_file_type that we don't need to support + if event_file_type in SKIPPED_EVENT_FILE_TYPES: + print(f'💡 Skip event filing type: {event_file_type}') + continue # TODO: build a new complete filing event mapper (WIP) raw_filing_type, raw_filing_subtype = get_target_filing_type(event_file_type) - # skip the unsupported ones + # skip the unsupported ones (need to support in the future) if not raw_filing_type: - print(f'❗ Skip event filing type: {event_file_type}') - unsupported_event_file_types.add(event_file_type) + print(f'❗ Unsupported event filing type: {event_file_type}') + current_unsupported_types.add(event_file_type) + all_unsupported_types.add(event_file_type) continue # get converted filing_type and filing_subtype @@ -442,7 +449,8 @@ def format_filings_data(data: dict) -> dict: return { 'filings': formatted_filings, 'update_business_info': business_update_dict, - 'state_filing_index': state_filing_idx + 'state_filing_index': state_filing_idx, + 'unsupported_types': current_unsupported_types, } @@ -677,6 +685,8 @@ def formatted_data_cleanup(data: dict) -> dict: 'businesses': filings_business['update_business_info'], 'state_filing_index': filings_business['state_filing_index'] } + data['unsupported_types'] = filings_business['unsupported_types'] + data['filings'] = filings_business['filings'] data['admin_email'] = data['businesses']['admin_email'] From 9a93c11653e88bc70d1c646363837d6aedd19e73 Mon Sep 17 00:00:00 2001 From: Hongjing Chen Date: Tue, 11 Mar 2025 13:51:09 -0700 Subject: [PATCH 109/133] quote fix for filing display name after prelim dry run fix Signed-off-by: Hongjing Chen --- data-tool/flows/tombstone/tombstone_mappings.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index 90864ab99f..ccda92b488 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -358,8 +358,8 @@ def has_value(cls, value): # TODO: Delay of Dissolution - unsupported (need confirmation) # no ledger item in colin - EventFilings.DISD1_DISDE: "Registrar''s Notation - Dissolution or Cancellation Delay", # has prefix "Registrar's Notation - " - EventFilings.DISD2_DISDE: "Registrar''s Notation - Dissolution or Cancellation Delay", + EventFilings.DISD1_DISDE: "Registrar's Notation - Dissolution or Cancellation Delay", # has prefix "Registrar's Notation - " + EventFilings.DISD2_DISDE: "Registrar's Notation - Dissolution or Cancellation Delay", EventFilings.FILE_ADVD2: 'Application for Dissolution (Voluntary Dissolution)', EventFilings.FILE_ADVDS: 'Application for Dissolution (Voluntary Dissolution)', @@ -381,8 +381,8 @@ def has_value(cls, value): EventFilings.FILE_AM_TR: 'Amendment - Transition', # TODO: Liquidation - unsupported (need to check if anything missing) - # NOLDS: "Notice of Location of Dissolved Company''s Records" - # NOCDS: "Notice of Change Respecting Dissolved Company''s Records" + # NOLDS: "Notice of Location of Dissolved Company's Records" + # NOCDS: "Notice of Change Respecting Dissolved Company's Records" # NOTRA: 'Notice of Transfer of Records' # NOAPL: 'Notice of Appointment of Liquidator' # NOCAL: 'Notice of Change of Address of Liquidator And/Or Liquidation Records Office' @@ -411,8 +411,8 @@ def has_value(cls, value): EventFilings.FILE_AM_PO: 'Amendment - Put Back On', EventFilings.FILE_CO_PO: 'Correction - Put Back On', - EventFilings.FILE_REGSN: "Registrar''s Notation", - EventFilings.FILE_REGSO: "Registrar''s Order", + EventFilings.FILE_REGSN: "Registrar's Notation", + EventFilings.FILE_REGSO: "Registrar's Order", EventFilings.FILE_RESTL: 'Restoration Application - Limited', EventFilings.FILE_RESTF: 'Restoration Application - Full', From effb917525ac9a0f73879e6e0ca4beb17003273e Mon Sep 17 00:00:00 2001 From: Hongjing Chen Date: Tue, 11 Mar 2025 14:33:06 -0700 Subject: [PATCH 110/133] add a new list for event_file types that are not mapped to filing Signed-off-by: Hongjing Chen --- data-tool/flows/tombstone/tombstone_mappings.py | 11 ++++++++++- data-tool/flows/tombstone/tombstone_utils.py | 9 +++++---- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index ccda92b488..a752ce0dbd 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -447,9 +447,18 @@ def has_value(cls, value): 'SYST_TILAT', 'SYST_TILHO', 'SYST_NULL', + 'TRESP_NULL', + 'TRESP_COUTI', # Others 'FILE_COGS1', - # TODO: may need to add more + # TODO: decide on the final list +] + + +NO_FILING_EVENT_FILE_TYPES = [ + 'SYSD1_NULL', + 'SYSD2_NULL', + # TODO: decide on the final list ] diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index aca8b45352..b173c6146a 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -9,14 +9,15 @@ from sqlalchemy import Connection, text from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, FILING_JSON, IN_DISSOLUTION, - JURISDICTION, OFFICE, PARTY, - PARTY_ROLE, RESOLUTION, - SHARE_CLASSES, USER, OFFICES_HELD) + JURISDICTION, OFFICE, OFFICES_HELD, + PARTY, PARTY_ROLE, RESOLUTION, + SHARE_CLASSES, USER) from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, EVENT_FILING_LEAR_TARGET_MAPPING, LEAR_FILING_BUSINESS_UPDATE_MAPPING, LEAR_STATE_FILINGS, LEGAL_TYPE_CHANGE_FILINGS, + NO_FILING_EVENT_FILE_TYPES, SKIPPED_EVENT_FILE_TYPES, EventFilings) @@ -344,7 +345,7 @@ def format_filings_data(data: dict) -> dict: # TODO: build a new complete filing event mapper (WIP) raw_filing_type, raw_filing_subtype = get_target_filing_type(event_file_type) # skip the unsupported ones (need to support in the future) - if not raw_filing_type: + if not raw_filing_type and event_file_type not in NO_FILING_EVENT_FILE_TYPES: print(f'❗ Unsupported event filing type: {event_file_type}') current_unsupported_types.add(event_file_type) all_unsupported_types.add(event_file_type) From 188350c7c63a6455fe878cfbe1be79a6e0db7109 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Wed, 12 Mar 2025 09:46:24 -0700 Subject: [PATCH 111/133] 26444 Tombstone pipeline - update to support custom contact email (#3298) --- data-tool/flows/config.py | 3 +++ data-tool/flows/corps_tombstone_flow.py | 7 ++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/data-tool/flows/config.py b/data-tool/flows/config.py index 534b2e79fe..59cc3c579a 100644 --- a/data-tool/flows/config.py +++ b/data-tool/flows/config.py @@ -61,6 +61,9 @@ class _Config(): # pylint: disable=too-few-public-methods else: AFFILIATE_ENTITY_ACCOUNT_ID = None + USE_CUSTOM_CONTACT_EMAIL = os.getenv('USE_CUSTOM_CONTACT_EMAIL', 'False') == 'True' + CUSTOM_CONTACT_EMAIL = os.getenv('CUSTOM_CONTACT_EMAIL', '') + # POSTGRESQL COLIN MIGRATION DB DB_USER_COLIN_MIGR = os.getenv('DATABASE_USERNAME_COLIN_MIGR', '') DB_PASSWORD_COLIN_MIGR = os.getenv('DATABASE_PASSWORD_COLIN_MIGR', '') diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 078c3e03dc..527ce163f6 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -327,7 +327,12 @@ def update_auth(conn: Connection, config, corp_num: str, tombstone_data: dict): business_name=business_data['legal_name'], corp_type_code=business_data['legal_type'] ) - if entity_status == HTTPStatus.OK and (admin_email := tombstone_data.get('admin_email')): + + admin_email = tombstone_data.get('admin_email') + if config.USE_CUSTOM_CONTACT_EMAIL: + admin_email = config.CUSTOM_CONTACT_EMAIL + + if entity_status == HTTPStatus.OK and admin_email: update_email_status = AuthService.update_contact_email( config=config, identifier=business_data['identifier'], From 529b556c0f420f9cea52da4405b1e6a403185656 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Wed, 12 Mar 2025 14:20:37 -0700 Subject: [PATCH 112/133] 26409 Tombstone pipeline - timestamp with timezone (#3297) --- .../flows/tombstone/tombstone_queries.py | 32 +++++++++---------- data-tool/flows/tombstone/tombstone_utils.py | 3 ++ .../src/business_pay/resources/pay_filer.py | 2 +- 3 files changed, 20 insertions(+), 17 deletions(-) diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 6a2179ae3e..3b6b2cae56 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -171,7 +171,7 @@ def get_corp_users_query(corp_nums: list): u_middle_name, u_last_name, to_char( - min(u_timestamp::timestamp at time zone 'UTC'), + min(u_timestamp::timestamptz at time zone 'UTC'), 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as earliest_event_dt_str, min(u_email_addr) as u_email_addr, @@ -245,7 +245,7 @@ def get_business_query(corp_num, suffix): (case when (c.recognition_dts is null and e.event_timerstamp is not null) then e.event_timerstamp else c.recognition_dts - end)::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as founding_date, + end)::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as founding_date, -- state ( select op_state_type_cd @@ -270,7 +270,7 @@ def get_business_query(corp_num, suffix): -- c.send_ar_ind, c.last_ar_reminder_year, - to_char(c.last_ar_filed_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as last_ar_date, + to_char(c.last_ar_filed_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as last_ar_date, -- admin_freeze case when c.corp_frozen_type_cd = 'C' @@ -552,7 +552,7 @@ def get_jurisdictions_query(corp_num): j.home_company_nme as j_home_company_nme, j.home_juris_num as j_home_juris_num, to_char( - j.home_recogn_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + j.home_recogn_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as j_home_recogn_dt, j.othr_juris_desc as j_othr_juris_desc, j.bc_xpro_num as j_bc_xpro_num @@ -570,27 +570,27 @@ def get_filings_query(corp_num): e.event_id as e_event_id, e.corp_num as e_corp_num, e.event_type_cd as e_event_type_cd, - to_char(e.event_timerstamp::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, - to_char(e.trigger_dts::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_trigger_dt_str, + to_char(e.event_timerstamp::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, + to_char(e.trigger_dts::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_trigger_dt_str, e.event_type_cd || '_' || COALESCE(f.filing_type_cd, 'NULL') as event_file_type, -- filing f.event_id as f_event_id, f.filing_type_cd as f_filing_type_cd, - to_char(f.effective_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, + to_char(f.effective_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, f.withdrawn_event_id as f_withdrawn_event_id, case when f.withdrawn_event_id is null then null else ( select - to_char(we.event_timerstamp::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') + to_char(we.event_timerstamp::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') from event we where we.event_id = f.withdrawn_event_id ) end as f_withdrawn_event_ts_str, -- paper only now -> f_ods_type f.nr_num as f_nr_num, - to_char(f.period_end_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, - to_char(f.change_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_change_at_str, + to_char(f.period_end_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, + to_char(f.change_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_change_at_str, -- state filing info ( select start_event_id @@ -610,7 +610,7 @@ def get_filings_query(corp_num): --- conversion ledger cl.ledger_title_txt as cl_ledger_title_txt, -- conv event - to_char(ce.effective_dt at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as ce_effective_dt_str, + to_char(ce.effective_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as ce_effective_dt_str, -- corp name change cn_old.corp_name as old_corp_name, cn_new.corp_name as new_corp_name, @@ -656,10 +656,10 @@ def get_amalgamation_query(corp_num): foreign_nme, -- event e.event_type_cd as e_event_type_cd, - to_char(e.event_timerstamp::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, + to_char(e.event_timerstamp::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, -- filing f.filing_type_cd as f_filing_type_cd, - to_char(f.effective_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, + to_char(f.effective_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, f.court_appr_ind as f_court_approval, -- event_file e.event_type_cd || '_' || COALESCE(f.filing_type_cd, 'NULL') as event_file_type @@ -679,7 +679,7 @@ def get_business_comments_query(corp_num): query = f""" select to_char( - cc.comment_dts::timestamp at time zone 'UTC', + cc.comment_dts::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as cc_comments_dts_str, cc.comments as cc_comments, @@ -699,7 +699,7 @@ def get_filing_comments_query(corp_num): select e.event_id as e_event_id, to_char( - lt.ledger_text_dts::timestamp at time zone 'UTC', + lt.ledger_text_dts::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as lt_ledger_text_dts_str, lt.user_id as lt_user_id, @@ -735,7 +735,7 @@ def get_in_dissolution_query(corp_num): e.event_id as e_event_id, e.event_type_cd as e_event_type_cd, to_char( - e.trigger_dts::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + e.trigger_dts::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as e_trigger_dts_str from corp_state cs join event e on e.event_id = cs.start_event_id diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index b173c6146a..fea459709c 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -364,6 +364,9 @@ def format_filings_data(data: dict) -> dict: filing_subtype = raw_filing_subtype effective_date = x['ce_effective_dt_str'] or x['f_effective_dt_str'] or x['e_event_dt_str'] + if filing_type == 'annualReport': + effective_date = data['f_period_end_dt_str'] + filing_date = x['ce_effective_dt_str'] or x['e_event_dt_str'] trigger_date = x['e_trigger_dt_str'] diff --git a/queue_services/business-pay/src/business_pay/resources/pay_filer.py b/queue_services/business-pay/src/business_pay/resources/pay_filer.py index c5e3919e8f..75b3919931 100644 --- a/queue_services/business-pay/src/business_pay/resources/pay_filer.py +++ b/queue_services/business-pay/src/business_pay/resources/pay_filer.py @@ -109,7 +109,7 @@ async def worker(): logger.debug(f"Removed From Queue: no payment info in ce: {str(ce)}") return {}, HTTPStatus.OK - if payment_token.corp_type_code in ["MHR", "BTR", "BUS", "STRR"]: + if payment_token.corp_type_code in ["MHR", "BCR", "BTR", "BUS", "STRR"]: logger.debug( f"ignoring message for corp_type_code:{payment_token.corp_type_code}, {str(ce)}") return {}, HTTPStatus.OK From 96dc185af55c3fc7c4e5f6ada2d093466751350d Mon Sep 17 00:00:00 2001 From: Arwen Qin <122495122+ArwenQin@users.noreply.github.com> Date: Wed, 12 Mar 2025 16:23:28 -0700 Subject: [PATCH 113/133] 26476 - Support Save and Update Consent to Amalgamate Out draft (#3300) * add Consent to Amalgamate Out Filing type Signed-off-by: Qin * add unit tests Signed-off-by: Qin * fix linting error Signed-off-by: Qin * update schema version Signed-off-by: Qin * fix a unit test Signed-off-by: Qin --------- Signed-off-by: Qin --- legal-api/requirements.txt | 2 +- .../requirements/bcregistry-libraries.txt | 2 +- legal-api/src/legal_api/core/filing.py | 1 + legal-api/src/legal_api/core/meta/filing.py | 21 ++++++++++++ legal-api/src/legal_api/models/filing.py | 14 ++++++++ legal-api/src/legal_api/services/authz.py | 6 ++++ .../tests/unit/resources/v2/test_business.py | 4 +++ .../tests/unit/services/test_authorization.py | 34 +++++++++++++++++-- 8 files changed, 80 insertions(+), 4 deletions(-) diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index ab4c3f2917..b4d90b5f84 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.37#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.39#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/requirements/bcregistry-libraries.txt b/legal-api/requirements/bcregistry-libraries.txt index ec5a8d3ed7..a64a9a57ae 100644 --- a/legal-api/requirements/bcregistry-libraries.txt +++ b/legal-api/requirements/bcregistry-libraries.txt @@ -1,2 +1,2 @@ -git+https://github.com/bcgov/business-schemas.git@2.18.35#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.39#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index c87bb84207..f38a06a4a2 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -78,6 +78,7 @@ class FilingTypes(str, Enum): CHANGEOFDIRECTORS = 'changeOfDirectors' CHANGEOFNAME = 'changeOfName' CHANGEOFREGISTRATION = 'changeOfRegistration' + CONSENTAMALGAMATIONOUT = 'consentAmalgamationOut' CONSENTCONTINUATIONOUT = 'consentContinuationOut' CONTINUATIONIN = 'continuationIn' CONTINUATIONOUT = 'continuationOut' diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index bb000f926c..8e9c60aa48 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -318,6 +318,27 @@ class FilingTitles(str, Enum): }, ] }, + 'consentAmalgamationOut': { + 'name': 'consentAmalgamationOut', + 'title': 'Consent Amalgamation Out', + 'displayName': '6-Month Consent to Amalgamate Out', + 'codes': { + 'BC': 'IAMGO', + 'BEN': 'IAMGO', + 'ULC': 'IAMGO', + 'CC': 'IAMGO', + 'C': 'IAMGO', + 'CBEN': 'IAMGO', + 'CUL': 'IAMGO', + 'CCC': 'IAMGO' + }, + 'additional': [ + { + 'types': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CCC', 'CUL'], + 'outputs': ['letterOfConsent'] + }, + ] + }, 'consentContinuationOut': { 'name': 'consentContinuationOut', 'title': 'Consent Continuation Out', diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 7ffcc15e8b..858db218ac 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -258,6 +258,20 @@ class Source(Enum): 'GP': 'FMCHANGE' } }, + 'consentAmalgamationOut': { + 'name': 'consentAmalgamationOut', + 'title': 'Consent Amalgamation Out', + 'codes': { + 'BC': 'IAMGO', + 'BEN': 'IAMGO', + 'ULC': 'IAMGO', + 'CC': 'IAMGO', + 'C': 'IAMGO', + 'CBEN': 'IAMGO', + 'CUL': 'IAMGO', + 'CCC': 'IAMGO' + } + }, 'consentContinuationOut': { 'name': 'consentContinuationOut', 'title': 'Consent Continuation Out', diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index 0cf7fa3ea6..a5b6955835 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -243,6 +243,12 @@ def get_allowable_filings_dict(): # only show filing when providing allowable filings not specific to a business 'businessRequirement': BusinessRequirement.NOT_EXIST }, + 'consentAmalgamationOut': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.DEFAULT, BusinessBlocker.NOT_IN_GOOD_STANDING] + } + }, 'consentContinuationOut': { 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index 2fc9d7ba13..af390c931f 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -604,6 +604,10 @@ def test_get_could_file(session, client, jwt): "displayName": "Director Change", "name": "changeOfDirectors" }, + { + "displayName": "6-Month Consent to Amalgamate Out", + "name": "consentAmalgamationOut" + }, { "displayName": "6-Month Consent to Continue Out", "name": "consentContinuationOut" diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 4118af1372..6a1f85f6ad 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -33,6 +33,7 @@ ALTERATION_FILING_TEMPLATE, ANNUAL_REPORT, CHANGE_OF_REGISTRATION_TEMPLATE, + CONSENT_AMALGAMATION_OUT, CONSENT_CONTINUATION_OUT, CONTINUATION_IN, CONTINUATION_OUT, @@ -144,6 +145,7 @@ class FilingKey(str, Enum): AGM_EXTENSION = 'AGM_EXTENSION' AGM_LOCATION_CHANGE = 'AGM_LOCATION_CHANGE' ALTERATION = 'ALTERATION' + CONSENT_AMALGAMATION_OUT = 'CONSENT_AMALGAMATION_OUT' CONSENT_CONTINUATION_OUT = 'CONSENT_CONTINUATION_OUT' CONTINUATION_OUT = 'CONTINUATION_OUT' TRANSITION = 'TRANSITION' @@ -195,6 +197,8 @@ class FilingKey(str, Enum): FilingKey.AGM_EXTENSION: {'displayName': 'Request for AGM Extension', 'feeCode': 'AGMDT', 'name': 'agmExtension'}, FilingKey.AGM_LOCATION_CHANGE: {'displayName': 'AGM Location Change', 'feeCode': 'AGMLC', 'name': 'agmLocationChange'}, FilingKey.ALTERATION: {'displayName': 'Alteration', 'feeCode': 'ALTER', 'name': 'alteration'}, + FilingKey.CONSENT_AMALGAMATION_OUT: {'displayName': '6-Month Consent to Amalgamate Out', 'feeCode': 'IAMGO', + 'name': 'consentAmalgamationOut'}, FilingKey.CONSENT_CONTINUATION_OUT: {'displayName': '6-Month Consent to Continue Out', 'feeCode': 'CONTO', 'name': 'consentContinuationOut'}, FilingKey.CONTINUATION_OUT: {'displayName': 'Continuation Out', 'feeCode': 'COUTI', 'name': 'continuationOut'}, @@ -276,6 +280,8 @@ class FilingKey(str, Enum): FilingKey.AGM_EXTENSION: {'displayName': 'Request for AGM Extension', 'feeCode': 'AGMDT', 'name': 'agmExtension'}, FilingKey.AGM_LOCATION_CHANGE: {'displayName': 'AGM Location Change', 'feeCode': 'AGMLC', 'name': 'agmLocationChange'}, FilingKey.ALTERATION: {'displayName': 'Alteration', 'feeCode': 'ALTER', 'name': 'alteration'}, + FilingKey.CONSENT_AMALGAMATION_OUT: {'displayName': '6-Month Consent to Amalgamate Out', 'feeCode': 'IAMGO', + 'name': 'consentAmalgamationOut'}, FilingKey.CONSENT_CONTINUATION_OUT: {'displayName': '6-Month Consent to Continue Out', 'feeCode': 'CONTO', 'name': 'consentContinuationOut'}, FilingKey.CONTINUATION_OUT: {'displayName': 'Continuation Out', 'feeCode': 'COUTI', 'name': 'continuationOut'}, @@ -366,6 +372,9 @@ class FilingKey(str, Enum): CONTINUATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) CONTINUATION_OUT_TEMPLATE['filing']['continuationOut'] = CONTINUATION_OUT +CONSENT_AMALGAMATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) +CONSENT_AMALGAMATION_OUT_TEMPLATE['filing']['consentAmalgamationOut'] = CONSENT_AMALGAMATION_OUT + CONSENT_CONTINUATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) CONSENT_CONTINUATION_OUT_TEMPLATE['filing']['consentContinuationOut'] = CONSENT_CONTINUATION_OUT @@ -383,6 +392,7 @@ class FilingKey(str, Enum): 'putBackOn': PUT_BACK_ON_FILING_TEMPLATE, 'continuationIn': CONTINUATION_IN_TEMPLATE, 'continuationOut': CONTINUATION_OUT_TEMPLATE, + 'consentAmalgamationOut': CONSENT_AMALGAMATION_OUT_TEMPLATE, 'consentContinuationOut': CONSENT_CONTINUATION_OUT_TEMPLATE } @@ -563,14 +573,14 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', - 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationOut', + 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'consentAmalgamationOut', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', - 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentContinuationOut', + 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentAmalgamationOut', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), @@ -665,6 +675,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active', Business.State.ACTIVE, 'changeOfDirectors', None, ['LLC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'consentAmalgamationOut', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), + ('staff_active', Business.State.ACTIVE, 'consentAmalgamationOut', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'consentContinuationOut', None, ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), ('staff_active', Business.State.ACTIVE, 'consentContinuationOut', None, @@ -875,6 +890,9 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_historical', Business.State.HISTORICAL, 'changeOfRegistration', None, ['SP', 'GP'], 'staff', [STAFF_ROLE], False), + ('staff_historical', Business.State.HISTORICAL, 'consentAmalgamationOut', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), + ('staff_historical', Business.State.HISTORICAL, 'consentContinuationOut', None, ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), @@ -978,6 +996,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -1001,6 +1020,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -1282,6 +1302,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -1305,6 +1326,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2097,6 +2119,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2119,6 +2142,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2267,6 +2291,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2293,6 +2318,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2318,6 +2344,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2341,6 +2368,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, @@ -2653,6 +2681,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CONTINUATION_OUT, FilingKey.CORRCTN, @@ -2694,6 +2723,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, From 5ac39b2cf87e863157b612cccab8f7a69df1b7ea Mon Sep 17 00:00:00 2001 From: Argus Chiu Date: Thu, 13 Mar 2025 09:15:03 -0700 Subject: [PATCH 114/133] 26472 Add App-Name as new allowable header and add error handling logging that includes new header when available (#3302) --- legal-api/src/legal_api/errorhandlers.py | 14 ++++++++++++-- legal-api/src/legal_api/resources/endpoints.py | 2 +- legal-api/src/legal_api/utils/util.py | 2 +- legal-api/tests/unit/test_error_handlers.py | 4 ++-- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/legal-api/src/legal_api/errorhandlers.py b/legal-api/src/legal_api/errorhandlers.py index f3335c4ccd..3ea6a9b0cf 100644 --- a/legal-api/src/legal_api/errorhandlers.py +++ b/legal-api/src/legal_api/errorhandlers.py @@ -20,9 +20,10 @@ """ import logging +import re import sys -from flask import jsonify +from flask import jsonify, request from werkzeug.exceptions import HTTPException from werkzeug.routing import RoutingException @@ -47,6 +48,11 @@ def handle_http_error(error): if isinstance(error, RoutingException): return error + app_name = request.headers.get('App-Name', 'unknown') + if not re.match(r'^[a-zA-Z0-9_-]+$', app_name): + app_name = 'invalid app name' + logger.error('HTTP error from app: %s', app_name, exc_info=sys.exc_info()) + response = jsonify({'message': error.description}) response.status_code = error.code return response @@ -58,7 +64,11 @@ def handle_uncaught_error(error: Exception): # pylint: disable=unused-argument Since the handler suppresses the actual exception, log it explicitly to ensure it's logged and recorded in Sentry. """ - logger.error('Uncaught exception', exc_info=sys.exc_info()) + app_name = request.headers.get('App-Name', 'unknown') + if not re.match(r'^[a-zA-Z0-9_-]+$', app_name): + app_name = 'invalid app name' + logger.error('Uncaught exception from app: %s', app_name, exc_info=sys.exc_info()) + response = jsonify({'message': 'Internal server error'}) response.status_code = 500 return response diff --git a/legal-api/src/legal_api/resources/endpoints.py b/legal-api/src/legal_api/resources/endpoints.py index 4f3963a71a..2a0521e72c 100644 --- a/legal-api/src/legal_api/resources/endpoints.py +++ b/legal-api/src/legal_api/resources/endpoints.py @@ -107,7 +107,7 @@ def _redirect(self, path, code=302): def _set_access_control_header(self, response): # pylint: disable=unused-variable response.headers['Access-Control-Allow-Origin'] = '*' - response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type' + response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type, App-Name' def _mount_endpoints(self): """Mount the endpoints of the system.""" diff --git a/legal-api/src/legal_api/utils/util.py b/legal-api/src/legal_api/utils/util.py index 6f430ef3ac..a98d953b15 100644 --- a/legal-api/src/legal_api/utils/util.py +++ b/legal-api/src/legal_api/utils/util.py @@ -26,7 +26,7 @@ def options(self, *args, **kwargs): # pylint: disable=unused-argument return {'Allow': 'GET'}, 200, \ {'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': methods, - 'Access-Control-Allow-Headers': 'Authorization, Content-Type'} + 'Access-Control-Allow-Headers': 'Authorization, Content-Type, App-Name'} setattr(func, 'options', options) return func diff --git a/legal-api/tests/unit/test_error_handlers.py b/legal-api/tests/unit/test_error_handlers.py index 3e93799c25..5c20d9e40a 100644 --- a/legal-api/tests/unit/test_error_handlers.py +++ b/legal-api/tests/unit/test_error_handlers.py @@ -32,7 +32,7 @@ def test_handle_http_error_pass_through_routing_exception(): # pylint: disable= def test_handle_http_error_pass(app): """Assert that the RoutingException is passed through the handler.""" - with app.app_context(): + with app.test_request_context(): err = HTTPException(description='description') err.code = 200 response = errorhandlers.handle_http_error(err) @@ -47,7 +47,7 @@ def test_handle_uncaught_error(app, caplog): and log an ERROR of an uncaught exception. Unhandled exceptions should get ticketed and managed. """ - with app.app_context(): + with app.test_request_context(): # logger = errorhandlers.logger caplog.set_level(errorhandlers.logging.ERROR, logger=errorhandlers.logger.name) resp = errorhandlers.handle_uncaught_error(Exception()) From 0b9a5039425c715ddd4e828d77f06796ff2d30a4 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Thu, 13 Mar 2025 12:20:37 -0700 Subject: [PATCH 115/133] 26409 fix (#3303) --- data-tool/.corps.env.sample | 3 +++ data-tool/flows/tombstone/tombstone_utils.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/data-tool/.corps.env.sample b/data-tool/.corps.env.sample index fa83a184cd..58a436fa39 100644 --- a/data-tool/.corps.env.sample +++ b/data-tool/.corps.env.sample @@ -44,6 +44,9 @@ UPDATE_ENTITY=False AFFILIATE_ENTITY=False AFFILIATE_ENTITY_ACCOUNT_ID= +USE_CUSTOM_CONTACT_EMAIL=False +CUSTOM_CONTACT_EMAIL= + AUTH_SVC_URL= ACCOUNT_SVC_AUTH_URL= diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index fea459709c..b517043ad3 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -365,7 +365,7 @@ def format_filings_data(data: dict) -> dict: effective_date = x['ce_effective_dt_str'] or x['f_effective_dt_str'] or x['e_event_dt_str'] if filing_type == 'annualReport': - effective_date = data['f_period_end_dt_str'] + effective_date = x['f_period_end_dt_str'] filing_date = x['ce_effective_dt_str'] or x['e_event_dt_str'] trigger_date = x['e_trigger_dt_str'] From 9eff953c376d66766b51799f587459e6660ce4ea Mon Sep 17 00:00:00 2001 From: Kial Date: Fri, 14 Mar 2025 08:28:02 -0400 Subject: [PATCH 116/133] API - add founding date to slim business resp (#3299) * API - add founding date to slim business resp Signed-off-by: Kial Jinnah * update test for slim response Signed-off-by: Kial Jinnah --------- Signed-off-by: Kial Jinnah --- legal-api/src/legal_api/models/business.py | 2 +- legal-api/tests/unit/models/test_business.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 169e260362..79e82fb26e 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -594,7 +594,6 @@ def json(self, slim=False): **slim_json, 'arMinDate': ar_min_date.isoformat() if ar_min_date else '', 'arMaxDate': ar_max_date.isoformat() if ar_max_date else '', - 'foundingDate': self.founding_date.isoformat() if self.founding_date else '', 'hasRestrictions': self.restriction_ind, 'complianceWarnings': self.compliance_warnings, 'warnings': self.warnings, @@ -622,6 +621,7 @@ def _slim_json(self): """Return a smaller/faster version of the business json.""" d = { 'adminFreeze': self.admin_freeze or False, + 'foundingDate': self.founding_date.isoformat() if self.founding_date else '', 'goodStanding': self.good_standing, 'identifier': self.identifier, 'inDissolution': self.in_dissolution, diff --git a/legal-api/tests/unit/models/test_business.py b/legal-api/tests/unit/models/test_business.py index 00f157b84b..b061f66123 100644 --- a/legal-api/tests/unit/models/test_business.py +++ b/legal-api/tests/unit/models/test_business.py @@ -324,6 +324,7 @@ def test_business_json(session): # slim json d_slim = { 'adminFreeze': False, + 'foundingDate': '1970-01-01T00:00:00+00:00', 'goodStanding': False, # good standing will be false because the epoch is 1970 'identifier': 'CP1234567', 'inDissolution': False, From 9fe52de4a973834ae162aae0ecc1040b5945e03a Mon Sep 17 00:00:00 2001 From: Arwen Qin <122495122+ArwenQin@users.noreply.github.com> Date: Fri, 14 Mar 2025 12:44:33 -0700 Subject: [PATCH 117/133] 26487 - Support save amalgamation out draft (#3305) * support save the amalg out draft Signed-off-by: Qin * add unit tests Signed-off-by: Qin --------- Signed-off-by: Qin --- legal-api/src/legal_api/core/filing.py | 1 + legal-api/src/legal_api/core/meta/filing.py | 15 +++++ legal-api/src/legal_api/models/filing.py | 14 +++++ legal-api/src/legal_api/services/authz.py | 7 +++ .../v1/test_business_filings/test_filings.py | 7 +++ .../tests/unit/resources/v2/test_business.py | 4 ++ .../v2/test_business_filings/test_filings.py | 7 +++ .../tests/unit/services/test_authorization.py | 59 ++++++++++++++++++- 8 files changed, 112 insertions(+), 2 deletions(-) diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index f38a06a4a2..b818fb7020 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -68,6 +68,7 @@ class FilingTypes(str, Enum): AGMLOCATIONCHANGE = 'agmLocationChange' ALTERATION = 'alteration' AMALGAMATIONAPPLICATION = 'amalgamationApplication' + AMALGAMATIONOUT = 'amalgamationOut' AMENDEDAGM = 'amendedAGM' AMENDEDANNUALREPORT = 'amendedAnnualReport' AMENDEDCHANGEOFDIRECTORS = 'amendedChangeOfDirectors' diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index 8e9c60aa48..5437743cc1 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -189,6 +189,21 @@ class FilingTitles(str, Enum): } } }, + 'amalgamationOut': { + 'name': 'amalgamationOut', + 'title': 'Amalgamation Out', + 'displayName': 'Amalgamation Out', + 'codes': { + 'BC': 'AMALO', + 'BEN': 'AMALO', + 'ULC': 'AMALO', + 'CC': 'AMALO', + 'C': 'AMALO', + 'CBEN': 'AMALO', + 'CUL': 'AMALO', + 'CCC': 'AMALO' + } + }, 'annualReport': { 'name': 'annualReport', 'title': 'Annual Report Filing', diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index 858db218ac..8996ac1099 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -156,6 +156,20 @@ class Source(Enum): }, } }, + 'amalgamationOut': { + 'name': 'amalgamationOut', + 'title': 'Amalgamation Out', + 'codes': { + 'BC': 'AMALO', + 'BEN': 'AMALO', + 'ULC': 'AMALO', + 'CC': 'AMALO', + 'C': 'AMALO', + 'CBEN': 'AMALO', + 'CUL': 'AMALO', + 'CCC': 'AMALO' + } + }, 'annualReport': { 'name': 'annualReport', 'title': 'Annual Report Filing', diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index a5b6955835..5639ef238e 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -201,6 +201,13 @@ def get_allowable_filings_dict(): } } }, + 'amalgamationOut': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.NOT_IN_GOOD_STANDING], + 'completedFilings': ['consentAmalgamationOut'] + } + }, 'annualReport': { 'legalTypes': ['CP', 'BEN', 'BC', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { diff --git a/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py index a53f72c5d5..89d0dfce38 100644 --- a/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py @@ -1031,6 +1031,9 @@ def test_calc_annual_report_date(session, client, jwt): SPECIAL_RESOLUTION_NO_CON_FILING = copy.deepcopy(CP_SPECIAL_RESOLUTION_TEMPLATE) del SPECIAL_RESOLUTION_NO_CON_FILING['filing']['changeOfName'] +AMALGAMATION_OUT_FILING = copy.deepcopy(FILING_HEADER) +AMALGAMATION_OUT_FILING['filing']['amalgamationOut'] = {} + CONTINUATION_OUT_FILING = copy.deepcopy(FILING_HEADER) CONTINUATION_OUT_FILING['filing']['continuationOut'] = {} @@ -1098,6 +1101,10 @@ def _fee_code_asserts(business, filing_json: dict, multiple_fee_codes, expected_ False, []), ('CP1234567', CP_SPECIAL_RESOLUTION_TEMPLATE, 'specialResolution', Business.LegalTypes.COOP.value, False, ['SPRLN', 'OTCON']), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.COMP.value, False, []), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BCOMP.value, False, []), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, []), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_CCC.value, False, []), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.COMP.value, False, []), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BCOMP.value, False, []), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, []), diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index af390c931f..c374f570a0 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -600,6 +600,10 @@ def test_get_could_file(session, client, jwt): "displayName": "Address Change", "name": "changeOfAddress" }, + { + "displayName": "Amalgamation Out", + "name": "amalgamationOut" + }, { "displayName": "Director Change", "name": "changeOfDirectors" diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index 4cc19f2072..9867c22f39 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -1242,6 +1242,9 @@ def test_calc_annual_report_date(session, client, jwt): RESTORATION_LIMITED_TO_FULL_FILING = copy.deepcopy(RESTORATION_FILING) RESTORATION_LIMITED_TO_FULL_FILING['filing']['restoration']['type'] = 'limitedRestorationToFull' +AMALGAMATION_OUT_FILING = copy.deepcopy(FILING_HEADER) +AMALGAMATION_OUT_FILING['filing']['amalgamationOut'] = {} + CONTINUATION_OUT_FILING = copy.deepcopy(FILING_HEADER) CONTINUATION_OUT_FILING['filing']['continuationOut'] = {} @@ -1320,6 +1323,10 @@ def _get_expected_fee_code(free, filing_name, filing_json: dict, legal_type): ('BC1234567', RESTORATION_LIMITED_TO_FULL_FILING, 'restoration', Business.LegalTypes.COMP.value, False, [], False), ('BC1234567', RESTORATION_LIMITED_TO_FULL_FILING, 'restoration', Business.LegalTypes.BC_ULC_COMPANY.value, False, [], False), ('BC1234567', RESTORATION_LIMITED_TO_FULL_FILING, 'restoration', Business.LegalTypes.BC_CCC.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BCOMP.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.COMP.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_CCC.value, False, [], False), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BCOMP.value, False, [], False), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, [], False), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.COMP.value, False, [], False), diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 6a1f85f6ad..eb3b2545ec 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -31,6 +31,7 @@ AGM_EXTENSION, AGM_LOCATION_CHANGE, ALTERATION_FILING_TEMPLATE, + AMALGAMATION_OUT, ANNUAL_REPORT, CHANGE_OF_REGISTRATION_TEMPLATE, CONSENT_AMALGAMATION_OUT, @@ -139,6 +140,7 @@ class FilingKey(str, Enum): ADM_DISS = 'ADM_DISS' VOL_DISS_FIRMS = 'VOL_DISS_FIRMS' ADM_DISS_FIRMS = 'ADM_DISS_FIRMS' + AMALGAMATION_OUT = 'AMALGAMATION_OUT' REGISTRARS_NOTATION = 'REGISTRARS_NOTATION' REGISTRARS_ORDER = 'REGISTRARS_ORDER' SPECIAL_RESOLUTION = 'SPECIAL_RESOLUTION' @@ -190,6 +192,7 @@ class FilingKey(str, Enum): 'name': 'dissolution', 'type': 'voluntary'}, FilingKey.ADM_DISS_FIRMS: {'displayName': 'Statement of Dissolution', 'feeCode': 'DIS_ADM', 'name': 'dissolution', 'type': 'administrative'}, + FilingKey.AMALGAMATION_OUT: {'displayName': 'Amalgamation Out', 'feeCode': 'AMALO', 'name': 'amalgamationOut'}, FilingKey.REGISTRARS_NOTATION: {'displayName': "Registrar's Notation", 'feeCode': 'NOFEE', 'name': 'registrarsNotation'}, FilingKey.REGISTRARS_ORDER: {'displayName': "Registrar's Order", 'feeCode': 'NOFEE', 'name': 'registrarsOrder'}, @@ -280,6 +283,7 @@ class FilingKey(str, Enum): FilingKey.AGM_EXTENSION: {'displayName': 'Request for AGM Extension', 'feeCode': 'AGMDT', 'name': 'agmExtension'}, FilingKey.AGM_LOCATION_CHANGE: {'displayName': 'AGM Location Change', 'feeCode': 'AGMLC', 'name': 'agmLocationChange'}, FilingKey.ALTERATION: {'displayName': 'Alteration', 'feeCode': 'ALTER', 'name': 'alteration'}, + FilingKey.AMALGAMATION_OUT: {'displayName': 'Amalgamation Out', 'feeCode': 'AMALO', 'name': 'amalgamationOut'}, FilingKey.CONSENT_AMALGAMATION_OUT: {'displayName': '6-Month Consent to Amalgamate Out', 'feeCode': 'IAMGO', 'name': 'consentAmalgamationOut'}, FilingKey.CONSENT_CONTINUATION_OUT: {'displayName': '6-Month Consent to Continue Out', 'feeCode': 'CONTO', @@ -353,6 +357,9 @@ class FilingKey(str, Enum): AGM_LOCATION_CHANGE_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) AGM_LOCATION_CHANGE_FILING_TEMPLATE['filing']['agmLocationChange'] = AGM_LOCATION_CHANGE +AMALGAMATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) +AMALGAMATION_OUT_TEMPLATE['filing']['amalgamationOut'] = AMALGAMATION_OUT + RESTORATION_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) RESTORATION_FILING_TEMPLATE['filing']['restoration'] = RESTORATION @@ -382,6 +389,7 @@ class FilingKey(str, Enum): 'agmExtension': AGM_EXTENSION_FILING_TEMPLATE, 'agmLocationChange': AGM_LOCATION_CHANGE_FILING_TEMPLATE, 'alteration': ALTERATION_FILING_TEMPLATE, + 'amalgamationOut': AMALGAMATION_OUT_TEMPLATE, 'correction': CORRECTION_AR, 'changeOfRegistration': CHANGE_OF_REGISTRATION_TEMPLATE, 'restoration.limitedRestoration': RESTORATION_FILING_TEMPLATE, @@ -572,14 +580,14 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me 'registrarsNotation', 'registrarsOrder', 'specialResolution']), ('staff_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'amalgamationOut','annualReport', 'appointReceiver', 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'consentAmalgamationOut', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'appointReceiver', + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']},'amalgamationOut', 'annualReport', 'appointReceiver', 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentAmalgamationOut', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', @@ -660,6 +668,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_allowed', Business.State.ACTIVE, 'amalgamationApplication', None, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'amalgamationOut', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), + ('staff_active', Business.State.ACTIVE, 'amalgamationOut', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'annualReport', None, ['CP', 'BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), ('staff_active', Business.State.ACTIVE, 'annualReport', None, @@ -2701,6 +2714,40 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), + ('staff_active_corps_completed_filing_success', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', + [STAFF_ROLE], ['consentAmalgamationOut', 'consentAmalgamationOut'], [None, None], [True, True], + expected_lookup([FilingKey.ADMN_FRZE, + FilingKey.AGM_EXTENSION, + FilingKey.AGM_LOCATION_CHANGE, + FilingKey.ALTERATION, + FilingKey.AMALGAMATION_REGULAR, + FilingKey.AMALGAMATION_VERTICAL, + FilingKey.AMALGAMATION_HORIZONTAL, + FilingKey.AMALGAMATION_OUT, + FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, + FilingKey.COA_CORPS, + FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, + FilingKey.CONSENT_CONTINUATION_OUT, + FilingKey.CORRCTN, + FilingKey.COURT_ORDER, + FilingKey.VOL_DISS, + FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, + FilingKey.REGISTRARS_NOTATION, + FilingKey.REGISTRARS_ORDER, + FilingKey.TRANSITION])), + ('staff_active_corps_completed_filing_success', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', + [STAFF_ROLE], ['consentAmalgamationOut', 'consentAmalgamationOut'], [None, None], [True, False], + expected_lookup([FilingKey.ADMN_FRZE, + FilingKey.AMALGAMATION_OUT, + FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, + FilingKey.REGISTRARS_NOTATION, + FilingKey.REGISTRARS_ORDER, + FilingKey.TRANSITION])), ('staff_active_corps_completed_filing_fail', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], ['consentContinuationOut', 'consentContinuationOut'], [None, None], [False, False], expected_lookup([FilingKey.ADMN_FRZE, @@ -2709,6 +2756,14 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), + ('staff_active_corps_completed_filing_fail', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', + [STAFF_ROLE], ['consentAmalgamationOut', 'consentAmalgamationOut'], [None, None], [False, False], + expected_lookup([FilingKey.ADMN_FRZE, + FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, + FilingKey.REGISTRARS_NOTATION, + FilingKey.REGISTRARS_ORDER, + FilingKey.TRANSITION])), ('staff_active_corps_completed_filing_fail', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], [None, None], [None, None], [False, False], expected_lookup([FilingKey.ADMN_FRZE, From 8cec034ee689713d570706ade371a38922b5eb1f Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Fri, 14 Mar 2025 15:29:54 -0700 Subject: [PATCH 118/133] 26511 amalgamation out model changes (#3308) --- .../versions/fe158a53151f_amalgamation_out.py | 38 +++++++++++++++++++ legal-api/src/legal_api/models/business.py | 34 +++++++++-------- .../models/consent_continuation_out.py | 20 ++++++++-- .../models/test_consent_continuation_out.py | 2 + .../test_consent_continuation_out.py | 1 + .../validations/test_continuation_out.py | 1 + 6 files changed, 77 insertions(+), 19 deletions(-) create mode 100644 legal-api/migrations/versions/fe158a53151f_amalgamation_out.py diff --git a/legal-api/migrations/versions/fe158a53151f_amalgamation_out.py b/legal-api/migrations/versions/fe158a53151f_amalgamation_out.py new file mode 100644 index 0000000000..76b04425be --- /dev/null +++ b/legal-api/migrations/versions/fe158a53151f_amalgamation_out.py @@ -0,0 +1,38 @@ +"""amalgamation_out + +Revision ID: fe158a53151f +Revises: 24b59f535ec3 +Create Date: 2025-03-14 11:34:01.606149 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'fe158a53151f' +down_revision = '24b59f535ec3' +branch_labels = None +depends_on = None + +consent_out_types_enum = postgresql.ENUM('continuation_out', 'amalgamation_out', name='consent_out_types') + + +def upgrade(): + # add enum values + consent_out_types_enum.create(op.get_bind(), checkfirst=True) + + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('businesses', sa.Column('amalgamation_out_date', sa.DateTime(timezone=True), nullable=True)) + op.add_column('businesses_version', sa.Column('amalgamation_out_date', sa.DateTime(timezone=True), autoincrement=False, nullable=True)) + op.add_column('consent_continuation_outs', sa.Column('consent_type', consent_out_types_enum, nullable=False, server_default='continuation_out')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('consent_continuation_outs', 'consent_type') + consent_out_types_enum.drop(op.get_bind(), checkfirst=True) + op.drop_column('businesses_version', 'amalgamation_out_date') + op.drop_column('businesses', 'amalgamation_out_date') + # ### end Alembic commands ### diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 79e82fb26e..be96c25552 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -189,11 +189,16 @@ class AssociationTypes(Enum): 'include_properties': [ 'id', 'admin_freeze', + 'amalgamation_out_date', 'association_type', + 'continuation_out_date', 'dissolution_date', 'fiscal_year_end_date', + 'foreign_jurisdiction_region', + 'foreign_legal_name', 'founding_date', 'identifier', + 'jurisdiction', 'last_agm_date', 'last_ar_date', 'last_ar_year', @@ -207,22 +212,18 @@ class AssociationTypes(Enum): 'last_tr_year', 'legal_name', 'legal_type', + 'naics_code', + 'naics_description', + 'naics_key', + 'no_dissolution', + 'restoration_expiry_date', 'restriction_ind', + 'send_ar_ind', 'state', + 'start_date', 'state_filing_id', 'submitter_userid', 'tax_id', - 'naics_key', - 'naics_code', - 'naics_description', - 'no_dissolution', - 'start_date', - 'jurisdiction', - 'foreign_jurisdiction_region', - 'foreign_legal_name', - 'send_ar_ind', - 'restoration_expiry_date', - 'continuation_out_date' ] } @@ -242,6 +243,7 @@ class AssociationTypes(Enum): restoration_expiry_date = db.Column('restoration_expiry_date', db.DateTime(timezone=True)) dissolution_date = db.Column('dissolution_date', db.DateTime(timezone=True), default=None) continuation_out_date = db.Column('continuation_out_date', db.DateTime(timezone=True)) + amalgamation_out_date = db.Column('amalgamation_out_date', db.DateTime(timezone=True)) _identifier = db.Column('identifier', db.String(10), index=True) tax_id = db.Column('tax_id', db.String(15), index=True) fiscal_year_end_date = db.Column('fiscal_year_end_date', db.DateTime(timezone=True), default=datetime.utcnow) @@ -525,10 +527,10 @@ def _has_no_transition_filed_after_restoration(self) -> bool: exists().where( and_( transition_filing.business_id == self.id, - transition_filing._filing_type == \ - CoreFiling.FilingTypes.TRANSITION.value, # pylint: disable=protected-access - transition_filing._status == \ - Filing.Status.COMPLETED.value, # pylint: disable=protected-access + (transition_filing._filing_type == # pylint: disable=protected-access + CoreFiling.FilingTypes.TRANSITION.value), + (transition_filing._status == # pylint: disable=protected-access + Filing.Status.COMPLETED.value), transition_filing.effective_date.between( restoration_filing.effective_date, restoration_filing_effective_cutoff @@ -663,6 +665,8 @@ def _extend_json(self, d): d['restorationExpiryDate'] = LegislationDatetime.format_as_legislation_date(self.restoration_expiry_date) if self.continuation_out_date: d['continuationOutDate'] = LegislationDatetime.format_as_legislation_date(self.continuation_out_date) + if self.amalgamation_out_date: + d['amalgamationOutDate'] = LegislationDatetime.format_as_legislation_date(self.amalgamation_out_date) if self.jurisdiction: d['jurisdiction'] = self.jurisdiction diff --git a/legal-api/src/legal_api/models/consent_continuation_out.py b/legal-api/src/legal_api/models/consent_continuation_out.py index 4247601dc4..fab346af64 100644 --- a/legal-api/src/legal_api/models/consent_continuation_out.py +++ b/legal-api/src/legal_api/models/consent_continuation_out.py @@ -14,19 +14,29 @@ """This model holds data for consent continuation out.""" from __future__ import annotations +from enum import auto from typing import Optional from sqlalchemy.orm import backref +from ..utils.base import BaseEnum from .db import db class ConsentContinuationOut(db.Model): # pylint: disable=too-few-public-methods """This class manages the consent continuation out for businesses.""" + # pylint: disable=invalid-name + class ConsentTypes(BaseEnum): + """Enum for the consent type.""" + + continuation_out = auto() + amalgamation_out = auto() + __tablename__ = 'consent_continuation_outs' id = db.Column('id', db.Integer, unique=True, primary_key=True) + consent_type = db.Column('consent_type', db.Enum(ConsentTypes), nullable=False) foreign_jurisdiction = db.Column('foreign_jurisdiction', db.String(10)) foreign_jurisdiction_region = db.Column('foreign_jurisdiction_region', db.String(10)) expiry_date = db.Column('expiry_date', db.DateTime(timezone=True)) @@ -46,11 +56,13 @@ def save(self): def get_active_cco(business_id, expiry_date, foreign_jurisdiction=None, - foreign_jurisdiction_region=None) -> list[ConsentContinuationOut]: + foreign_jurisdiction_region=None, + consent_type=ConsentTypes.continuation_out) -> list[ConsentContinuationOut]: """Get a list of active consent_continuation_outs linked to the given business_id.""" - query = db.session.query(ConsentContinuationOut). \ - filter(ConsentContinuationOut.business_id == business_id). \ - filter(ConsentContinuationOut.expiry_date >= expiry_date) + query = (db.session.query(ConsentContinuationOut). + filter(ConsentContinuationOut.business_id == business_id). + filter(ConsentContinuationOut.consent_type == consent_type). + filter(ConsentContinuationOut.expiry_date >= expiry_date)) if foreign_jurisdiction: query = query.filter(ConsentContinuationOut.foreign_jurisdiction == foreign_jurisdiction.upper()) diff --git a/legal-api/tests/unit/models/test_consent_continuation_out.py b/legal-api/tests/unit/models/test_consent_continuation_out.py index e5e98484f0..7c1cd0b208 100644 --- a/legal-api/tests/unit/models/test_consent_continuation_out.py +++ b/legal-api/tests/unit/models/test_consent_continuation_out.py @@ -45,6 +45,7 @@ def test_consent_continuation_out_save(session): expiry_date = get_cco_expiry_date(filing.effective_date) consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = 'CA' consent_continuation_out.foreign_jurisdiction_region = 'AB' consent_continuation_out.expiry_date = expiry_date @@ -65,6 +66,7 @@ def test_get_active_cco(session): expiry_date = get_cco_expiry_date(filing.effective_date) consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = 'CA' consent_continuation_out.foreign_jurisdiction_region = 'AB' consent_continuation_out.expiry_date = expiry_date diff --git a/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py b/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py index bea64b1a32..e1272af49f 100644 --- a/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py +++ b/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py @@ -161,6 +161,7 @@ def test_validate_existing_cco(session, test_name, expected_code, message): foreign_jurisdiction = filing['filing']['consentContinuationOut']['foreignJurisdiction'] consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = foreign_jurisdiction.get('country') consent_continuation_out.foreign_jurisdiction_region = foreign_jurisdiction.get('region').upper() consent_continuation_out.expiry_date = get_cco_expiry_date(effective_date) diff --git a/legal-api/tests/unit/services/filings/validations/test_continuation_out.py b/legal-api/tests/unit/services/filings/validations/test_continuation_out.py index 62e86c0127..1591b6fa4b 100644 --- a/legal-api/tests/unit/services/filings/validations/test_continuation_out.py +++ b/legal-api/tests/unit/services/filings/validations/test_continuation_out.py @@ -39,6 +39,7 @@ def _create_consent_continuation_out(business, foreign_jurisdiction, effective_d filing = factory_completed_filing(business, filing_dict, filing_date=effective_date) consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = foreign_jurisdiction.get('country') region = foreign_jurisdiction.get('region') From e816a83e4d3fcf0f999c0e023fb6276e16aba353 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Fri, 14 Mar 2025 15:43:26 -0700 Subject: [PATCH 119/133] 26511 add consent_type to cco (#3309) --- .../filing_processors/consent_continuation_out.py | 12 ++++++------ .../test_worker/test_consent_continuation_out.py | 5 +++-- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py b/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py index 8165b465c1..529cad4b04 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py @@ -33,7 +33,7 @@ def process(business: Business, cco_filing: Filing, filing: Dict, filing_meta: F foreign_jurisdiction = filing['consentContinuationOut']['foreignJurisdiction'] consent_continuation_out = ConsentContinuationOut() - + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out country = foreign_jurisdiction.get('country').upper() consent_continuation_out.foreign_jurisdiction = country @@ -48,11 +48,11 @@ def process(business: Business, cco_filing: Filing, filing: Dict, filing_meta: F consent_continuation_out.business_id = business.id business.consent_continuation_outs.append(consent_continuation_out) - filing_meta.consent_continuation_out = {} - filing_meta.consent_continuation_out = {**filing_meta.consent_continuation_out, - **{'country': country, - 'region': region, - 'expiry': expiry_date.isoformat()}} + filing_meta.consent_continuation_out = { + 'country': country, + 'region': region, + 'expiry': expiry_date.isoformat() + } def get_expiry_date(filing: Filing): diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py b/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py index fdccf471cb..836eec1e58 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py @@ -42,8 +42,8 @@ async def test_worker_consent_continuation_out(app, session, mocker, test_name, identifier = 'BC1234567' business = create_business(identifier, legal_type='BC') business.save() - business_id=business.id - + business_id = business.id + filing_json = copy.deepcopy(FILING_TEMPLATE) filing_json['filing']['business']['identifier'] = identifier filing_json['filing']['header']['name'] = 'consentContinuationOut' @@ -79,6 +79,7 @@ async def test_worker_consent_continuation_out(app, session, mocker, test_name, cco = ConsentContinuationOut.get_active_cco(business_id, expiry_date_utc) assert cco + assert cco[0].consent_type == ConsentContinuationOut.ConsentTypes.continuation_out assert cco[0].foreign_jurisdiction == \ filing_json['filing']['consentContinuationOut']['foreignJurisdiction']['country'] assert cco[0].foreign_jurisdiction_region == \ From 403a9d6664c163a12fad447abaeed77c39c15d38 Mon Sep 17 00:00:00 2001 From: Arwen Qin <122495122+ArwenQin@users.noreply.github.com> Date: Mon, 17 Mar 2025 13:29:22 -0700 Subject: [PATCH 120/133] Fix 26487 - fix unit test (#3311) Signed-off-by: Qin --- legal-api/tests/unit/resources/v2/test_business.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index c374f570a0..f1fb04d860 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -584,6 +584,10 @@ def test_get_could_file(session, client, jwt): "name": "amalgamationApplication", "type": "horizontal" }, + { + "displayName": "Amalgamation Out", + "name": "amalgamationOut" + }, { "displayName": "Annual Report", "name": "annualReport" @@ -600,10 +604,6 @@ def test_get_could_file(session, client, jwt): "displayName": "Address Change", "name": "changeOfAddress" }, - { - "displayName": "Amalgamation Out", - "name": "amalgamationOut" - }, { "displayName": "Director Change", "name": "changeOfDirectors" From 2f52fbfc6ffbb5aa9c52f59c7287b02d2641950a Mon Sep 17 00:00:00 2001 From: Hongjing Chen Date: Mon, 17 Mar 2025 14:25:58 -0700 Subject: [PATCH 121/133] 26403 - create verify script Signed-off-by: Hongjing Chen --- data-tool/.corps.env.sample | 5 ++ data-tool/Makefile | 4 + data-tool/flows/config.py | 5 ++ data-tool/flows/corps_verify_flow.py | 105 +++++++++++++++++++++++++++ 4 files changed, 119 insertions(+) create mode 100644 data-tool/flows/corps_verify_flow.py diff --git a/data-tool/.corps.env.sample b/data-tool/.corps.env.sample index 58a436fa39..87df82910b 100644 --- a/data-tool/.corps.env.sample +++ b/data-tool/.corps.env.sample @@ -72,6 +72,11 @@ TOMBSTONE_BATCH_SIZE=300 DELETE_BATCHES=1 DELETE_BATCH_SIZE=300 +VERIFY_BATCH_SIZE=300 + ## delete corps record in auth db, corp_processing of colin extract DELETE_AUTH_RECORDS=False DELETE_CORP_PROCESSING_RECORDS=True + +# verify script +VERIFY_SUMMARY_PATH=results.csv diff --git a/data-tool/Makefile b/data-tool/Makefile index a19866731c..3c50a2e04a 100644 --- a/data-tool/Makefile +++ b/data-tool/Makefile @@ -105,6 +105,10 @@ run-tombstone-migration: ## Run corp tombstone migration flow . $(VENV_DIR)/bin/activate && \ python flows/corps_tombstone_flow.py +run-tombstone-verify: ## Run corp tombstone verify flow + . $(VENV_DIR)/bin/activate && \ + python flows/corps_verify_flow.py + ################################################################################# # Self Documenting Commands # diff --git a/data-tool/flows/config.py b/data-tool/flows/config.py index 59cc3c579a..fa13f0e4e0 100644 --- a/data-tool/flows/config.py +++ b/data-tool/flows/config.py @@ -142,6 +142,11 @@ class _Config(): # pylint: disable=too-few-public-methods TOMBSTONE_BATCH_SIZE = os.getenv('TOMBSTONE_BATCH_SIZE') TOMBSTONE_BATCH_SIZE = int(TOMBSTONE_BATCH_SIZE) if TOMBSTONE_BATCH_SIZE.isnumeric() else 0 + # verify flow + VERIFY_BATCH_SIZE = os.getenv('VERIFY_BATCH_SIZE') + VERIFY_BATCH_SIZE = int(VERIFY_BATCH_SIZE) if VERIFY_BATCH_SIZE.isnumeric() else 0 + VERIFY_SUMMARY_PATH = os.getenv('VERIFY_SUMMARY_PATH') + TESTING = False DEBUG = False diff --git a/data-tool/flows/corps_verify_flow.py b/data-tool/flows/corps_verify_flow.py new file mode 100644 index 0000000000..4048a34922 --- /dev/null +++ b/data-tool/flows/corps_verify_flow.py @@ -0,0 +1,105 @@ +import math + +import pandas as pd +from common.init_utils import colin_init, get_config, lear_init +from prefect import flow, task +from sqlalchemy import Engine, text + + +# TODO: adjust clause in different phases +where_clause = """ +1 = 1 +""" + +colin_cnt_query = f""" + SELECT COUNT(*) FROM corporation c WHERE {where_clause} + """ + +colin_query = f""" + SELECT corp_num FROM corporation c WHERE {where_clause} ORDER BY corp_num LIMIT :limit OFFSET :offset +""" + +lear_query = f""" + SELECT colin_corps.identifier FROM UNNEST(ARRAY[:identifiers]) AS colin_corps(identifier) + LEFT JOIN businesses b on colin_corps.identifier = b.identifier + WHERE b.identifier IS NULL +""" + + +@task(name='1-Count') +def get_verify_count(colin_engine: Engine) -> int: + with colin_engine.connect() as colin_conn: + rs = colin_conn.execute(text(colin_cnt_query)) + total = rs.scalar() + return total + + +@task(name='2-Verify') +def verify(colin_engine: Engine, lear_engine: Engine, limit: int, offset: int) -> list: + + identifiers = None + + with colin_engine.connect() as colin_conn: + rs = colin_conn.execute(text(colin_query), {'limit': limit, 'offset': offset}) + colin_results = rs.fetchall() + identifiers = [row[0] for row in colin_results] + + if identifiers: + with lear_engine.connect() as lear_conn: + rs = lear_conn.execute(text(lear_query), {'identifiers': identifiers}) + lear_results = rs.fetchall() + missing = [row[0] for row in lear_results] + return missing + + return [] + + +@flow( + name='Corps-Tombstone-Verify-Flow', + log_prints=True, + persist_result=False, +) +def verify_flow(): + try: + config = get_config() + colin_engine = colin_init(config) + lear_engine = lear_init(config) + + total = get_verify_count(colin_engine) + + if config.VERIFY_BATCH_SIZE <= 0: + raise ValueError('VERIFY_BATCH_SIZE must be explicitly set to a positive integer') + batch_size = config.VERIFY_BATCH_SIZE + batches = math.ceil(total/batch_size) + + print(f'🚀 Verifying {total} busiesses...') + + cnt = 0 + offset = 0 + results = [] + futures = [] + while cnt < batches: + print(f'🚀 Running {cnt} round...') + futures.append(verify.submit(colin_engine, lear_engine, batch_size, offset)) + offset += batch_size + cnt += 1 + + for f in futures: + r = f.result() + results.extend(r) + + print(f'🌟 Complete round {cnt}') + + if summary_path:=config.VERIFY_SUMMARY_PATH: + df = pd.DataFrame(results, columns=['identifier']) + df.to_csv(summary_path, index=False) + print(f"🌰 Save {len(results)} corps which meet the selection criteria but don't exsit in LEAR to {summary_path}") + else: + print(f"🌰 {len(results)} corps which meet the selection criteria don't exsit in LEAR: {results}") + + except Exception as e: + raise e + + +if __name__ == '__main__': + verify_flow() From a708d8aa4211303084b9dbab29cb6366f0fc33a2 Mon Sep 17 00:00:00 2001 From: Vysakh Menon Date: Tue, 18 Mar 2025 16:11:41 -0700 Subject: [PATCH 122/133] 26511 Tombstone pipeline - consent, amalgamation out (#3313) --- data-tool/flows/corps_tombstone_flow.py | 5 ++ .../flows/tombstone/tombstone_mappings.py | 19 ++--- .../flows/tombstone/tombstone_queries.py | 13 +-- data-tool/flows/tombstone/tombstone_utils.py | 83 ++++++++++++++----- 4 files changed, 83 insertions(+), 37 deletions(-) diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index 527ce163f6..5ec57c67c9 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -262,6 +262,11 @@ def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id comment['staff_id'] = staff_id load_data(conn, 'comments', comment) + if cco_data := data['consent_continuation_out']: + cco_data['business_id'] = business_id + cco_data['filing_id'] = filing_id + load_data(conn, 'consent_continuation_outs', cco_data) + # load epoch filing epoch_filing_data = build_epoch_filing(business_id) load_data(conn, 'filings', epoch_filing_data) diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index a752ce0dbd..0b63893f00 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -19,8 +19,9 @@ class EventFilings(str, Enum): FILE_AM_SS = 'FILE_AM_SS' # TODO: FILE_AM_AR = 'FILE_AM_AR' - # TODO: Amalgamation Out Consent - unsupported - # TODO: Amalgamation Out - unsupported + + FILE_IAMGO = 'FILE_IAMGO' + FILE_AMALO = 'FILE_AMALO' # Amalgamation Appliation FILE_AMALH = 'FILE_AMALH' @@ -183,8 +184,8 @@ def has_value(cls, value): EventFilings.FILE_AM_RM: 'alteration', EventFilings.FILE_AM_SS: 'alteration', - # TODO: Amalgamation Out Consent - unsupported - # TODO: Amalgamation Out - unsupported + EventFilings.FILE_IAMGO: 'consentAmalgamationOut', + EventFilings.FILE_AMALO: 'amalgamationOut', EventFilings.FILE_AMALH: ['amalgamationApplication', 'horizontal'], EventFilings.FILE_AMALR: ['amalgamationApplication', 'regular'], @@ -218,7 +219,7 @@ def has_value(cls, value): EventFilings.CONVAMAL_NULL: ['conversion', ('amalgamationApplication', 'unknown')], EventFilings.CONVCIN_NULL: ['conversion', 'continuationIn'], - EventFilings.CONVCOUT_NULL: ['conversion', 'continuationOut'], # TODO: continuation out + EventFilings.CONVCOUT_NULL: ['conversion', 'continuationOut'], EventFilings.CONVDS_NULL: ['conversion', ('dissolution', 'voluntary')], EventFilings.CONVDSF_NULL: ['conversion', ('dissolution', 'involuntary')], EventFilings.CONVDSL_NULL: 'conversion', # TODO: liquidation @@ -308,11 +309,8 @@ def has_value(cls, value): EventFilings.FILE_AM_RM: 'Amendment - Receiver or Receiver Manager', EventFilings.FILE_AM_SS: 'Amendment - Share Structure', - # TODO: Amalgamation Out Consent - unsupported - # IAMGO: 'Application For Authorization For Amalgamation (into a Foreign Corporation) with 6 months consent granted' - # TODO: Amalgamation Out - unsupported - # AMALO: 'Record of Amalgamation' - + EventFilings.FILE_IAMGO: 'Application For Authorization For Amalgamation (into a Foreign Corporation) with 6 months consent granted', + EventFilings.FILE_AMALO: 'Record of Amalgamation', EventFilings.FILE_AMALH: 'Amalgamation Application Short Form (Horizontal)', EventFilings.FILE_AMALR: 'Amalgamation Application (Regular)', @@ -482,6 +480,7 @@ def has_value(cls, value): 'putBackOff', 'putBackOn', 'continuationOut', + 'amalgamationOut', # TODO: other state filings that lear doesn't support for now e.g. liquidation # ingore the following since we won't map to them diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 3b6b2cae56..420097abc4 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -97,7 +97,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): and cp.flow_name = '{flow_name}' and cp.environment = '{environment}' where 1 = 1 - {where_clause} + {where_clause} -- and c.corp_type_cd like 'BC%' -- some are 'Q%' -- and c.corp_num = 'BC0000621' -- state changes a lot -- and c.corp_num = 'BC0883637' -- one pary with multiple roles, but werid address_ids, same filing submitter but diff email @@ -616,10 +616,10 @@ def get_filings_query(corp_num): cn_new.corp_name as new_corp_name, -- continuation out - co.can_jur_typ_cd as cont_out_can_jur_typ_cd, + co.can_jur_typ_cd as out_can_jur_typ_cd, to_char(co.cont_out_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cont_out_dt, - co.othr_juri_desc as cont_out_othr_juri_desc, - co.home_company_nme as cont_out_home_company_nme + co.othr_juri_desc as out_othr_juri_desc, + co.home_company_nme as out_home_company_nme from event e left outer join filing f on e.event_id = f.event_id left outer join filing_user u on u.event_id = e.event_id @@ -764,9 +764,10 @@ def get_offices_held_query(corp_num): return query -def get_cont_out_query(corp_num): +def get_out_data_query(corp_num): query = f""" select + cs.state_type_cd, co.can_jur_typ_cd, to_char(co.cont_out_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cont_out_dt, co.othr_juri_desc, @@ -795,7 +796,7 @@ def get_corp_snapshot_filings_queries(config, corp_num): 'business_comments': get_business_comments_query(corp_num), 'filing_comments': get_filing_comments_query(corp_num), 'in_dissolution': get_in_dissolution_query(corp_num), - 'cont_out': get_cont_out_query(corp_num), + 'out_data': get_out_data_query(corp_num), # continuation/amalgamation out } return queries diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index b517043ad3..b5951b8443 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -1,8 +1,9 @@ import copy +import datedelta import json from datetime import datetime, timezone from decimal import Decimal -from typing import Optional +from typing import Final, Optional import pandas as pd import pytz @@ -22,7 +23,7 @@ EventFilings) all_unsupported_types = set() - +date_format_with_tz: Final = '%Y-%m-%d %H:%M:%S%z' def format_business_data(data: dict) -> dict: business_data = data['businesses'][0] @@ -376,6 +377,7 @@ def format_filings_data(data: dict) -> dict: filing_body = copy.deepcopy(FILING['filings']) jurisdiction = None amalgamation = None + consent_continuation_out = None user_id = get_username(x) @@ -425,6 +427,8 @@ def format_filings_data(data: dict) -> dict: elif filing_type == 'noticeOfWithdrawal': filing_body['withdrawn_filing_id'] = withdrawn_filing_idx # will be updated to real filing_id when loading data withdrawn_filing_idx = -1 + elif filing_type in ('consentContinuationOut', 'consentAmalgamationOut'): + consent_continuation_out = format_consent_continuation_out(filing_type, effective_date) comments = format_filing_comments_data(data, x['e_event_id']) @@ -434,7 +438,8 @@ def format_filings_data(data: dict) -> dict: 'jurisdiction': jurisdiction, 'amalgamations': amalgamation, 'comments': comments, - 'colin_event_ids': colin_event_ids + 'colin_event_ids': colin_event_ids, + 'consent_continuation_out': consent_continuation_out } formatted_filings.append(filing) @@ -457,6 +462,35 @@ def format_filings_data(data: dict) -> dict: 'unsupported_types': current_unsupported_types, } +def format_consent_continuation_out(filing_type: str, effective_date_str: str): + expiry_date = get_expiry_date(effective_date_str) + consent_continuation_out = { + 'consent_type': 'continuation_out' if filing_type == 'consentContinuationOut' else 'amalgamation_out', + 'expiry_date': expiry_date.isoformat(), + 'foreign_jurisdiction': '', + 'foreign_jurisdiction_region': '', + } + + return consent_continuation_out + + +def get_expiry_date(effective_date_str: str) -> datetime: + pst = pytz.timezone('America/Vancouver') + effective_date = datetime.strptime(effective_date_str, date_format_with_tz) + effective_date = effective_date.astimezone(pst) + _date = effective_date.replace(hour=23, minute=59, second=0, microsecond=0) + _date += datedelta.datedelta(months=6) + + # Setting legislation timezone again after adding 6 months to recalculate the UTC offset and DST info + _date = _date.astimezone(pst) + + # Adjust day light savings. Handle DST +-1 hour changes + dst_offset_diff = effective_date.dst() - _date.dst() + _date += dst_offset_diff + + return _date.astimezone(pytz.timezone('GMT')) + + def format_amalgamations_data(data: dict, event_id: Decimal, amalgamation_date: str, amalgamation_type: str) -> dict: amalgamations_data = data['amalgamations'] @@ -654,22 +688,27 @@ def format_users_data(users_data: list) -> list: return formatted_users -def format_cont_out_data(data: dict) -> dict: - cont_data = data.get('cont_out', []) - if not cont_data: +def format_out_data_data(data: dict) -> dict: + out_data = data.get('out_data') + if not out_data: return {} - cont_data = cont_data[0] - country, region = map_country_region(cont_data['can_jur_typ_cd']) + out_data = out_data[0] + country, region = map_country_region(out_data['can_jur_typ_cd']) + + date_field = { + 'HCO': 'continuation_out_date', + 'HAO': 'amalgamation_out_date' + }.get(out_data['state_type_cd']) - formatted_cont_out = { + formatted_out_data = { 'foreign_jurisdiction': country, 'foreign_jurisdiction_region': region, - 'foreign_legal_name': cont_data['home_company_nme'], - 'continuation_out_date': cont_data['cont_out_dt'], + 'foreign_legal_name': out_data['home_company_nme'], + date_field: out_data['cont_out_dt'], } - return formatted_cont_out + return formatted_out_data def map_country_region(can_jur_typ_cd): @@ -696,7 +735,7 @@ def formatted_data_cleanup(data: dict) -> dict: data['admin_email'] = data['businesses']['admin_email'] del data['businesses']['admin_email'] - data['businesses'].update(data['cont_out']) + data['businesses'].update(data['out_data']) return data @@ -712,7 +751,7 @@ def get_data_formatters() -> dict: 'filings': format_filings_data, 'comments': format_business_comments_data, # only for business level, filing level will be formatted ith filings 'in_dissolution': format_in_dissolution_data, - 'cont_out': format_cont_out_data, + 'out_data': format_out_data_data, # continuation/amalgamation out } return ret @@ -847,17 +886,19 @@ def build_filing_json_meta_data(raw_filing_type: str, filing_type: str, filing_s 'reason': 'Limited Restoration Expired', 'expiryDate': effective_date[:10] } - elif filing_type == 'continuationOut': - country, region = map_country_region(data['cont_out_can_jur_typ_cd']) - meta_data['continuationOut'] = { + elif filing_type in ('amalgamationOut', 'continuationOut'): + country, region = map_country_region(data['out_can_jur_typ_cd']) + meta_data[filing_type] = { 'country': country, 'region': region, - 'legalName': data['cont_out_home_company_nme'], - 'continuationOutDate': data['cont_out_dt'][:10] + 'legalName': data['out_home_company_nme'], + f'{filing_type}Date': data['cont_out_dt'][:10] } + if data['out_othr_juri_desc']: + meta_data[filing_type]['otherJurisdictionDesc'] = data['out_othr_juri_desc'] if withdrawn_ts_str := data['f_withdrawn_event_ts_str']: - withdrawn_ts = datetime.strptime(withdrawn_ts_str, '%Y-%m-%d %H:%M:%S%z') + withdrawn_ts = datetime.strptime(withdrawn_ts_str, date_format_with_tz) meta_data = { **meta_data, 'withdrawnDate': withdrawn_ts.isoformat() @@ -875,7 +916,7 @@ def get_colin_display_name(data: dict) -> str: # Annual Report if event_file_type == EventFilings.FILE_ANNBC.value: ar_dt_str = data['f_period_end_dt_str'] - ar_dt = datetime.strptime(ar_dt_str, '%Y-%m-%d %H:%M:%S%z') + ar_dt = datetime.strptime(ar_dt_str, date_format_with_tz) suffix = ar_dt.strftime('%b %d, %Y').upper() name = f'{name} - {suffix}' From 5722bf123e2f5881e85b8def81686c86db48a64c Mon Sep 17 00:00:00 2001 From: flutistar Date: Thu, 19 Dec 2024 14:15:27 -0800 Subject: [PATCH 123/133] Replaced Minio with Document Record Service --- .../src/legal_api/resources/v2/document.py | 17 +++ legal-api/src/legal_api/services/__init__.py | 1 + .../src/legal_api/services/document_record.py | 126 ++++++++++++++++++ .../filings/validations/continuation_in.py | 6 +- 4 files changed, 145 insertions(+), 5 deletions(-) create mode 100644 legal-api/src/legal_api/services/document_record.py diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index 8d08f9af91..96ff6cda82 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -20,6 +20,7 @@ from legal_api.models import Document, Filing from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService from legal_api.utils.auth import jwt @@ -77,3 +78,19 @@ def get_minio_document(document_key: str): return jsonify( message=f'Error getting file {document_key}.' ), HTTPStatus.INTERNAL_SERVER_ERROR + +@bp.route('//', methods=['POST', 'OPTIONS']) +@cross_origin(origin='*') +@jwt.requires_auth +def upload_document(document_class: str, document_type: str): + """Upload document file to Document Record Service.""" + + return DocumentRecordService.upload_document(document_class, document_type), HTTPStatus.OK + +@bp.route('/drs/', methods=['DELETE']) +@cross_origin(origin='*') +@jwt.requires_auth +def delete_document(document_service_id: str): + """Delete document file from Document Record Service.""" + + return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK \ No newline at end of file diff --git a/legal-api/src/legal_api/services/__init__.py b/legal-api/src/legal_api/services/__init__.py index c2a44e2dc0..7f895c28a1 100644 --- a/legal-api/src/legal_api/services/__init__.py +++ b/legal-api/src/legal_api/services/__init__.py @@ -29,6 +29,7 @@ from .furnishing_documents_service import FurnishingDocumentsService from .involuntary_dissolution import InvoluntaryDissolutionService from .minio import MinioService +from .document_record import DocumentRecordService from .mras_service import MrasService from .naics import NaicsService from .namex import NameXService diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py new file mode 100644 index 0000000000..25a7d30b32 --- /dev/null +++ b/legal-api/src/legal_api/services/document_record.py @@ -0,0 +1,126 @@ +# Copyright © 2021 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This module is a wrapper for Document Record Service.""" + +import base64 +from typing import Optional +import requests +from flask import current_app, request +from flask_babel import _ + +import PyPDF2 + + +BASE_URL = current_app.config.get('DRS_BASE_URL') +BASE_HEADERS = { + 'x-apikey': current_app.config.get('DRS_X_API_KEY'), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID'), +} + +class DocumentRecordService: + """Document Storage class.""" + + + @staticmethod + def upload_document(document_class: str, document_type: str) -> dict: + """Upload document to Docuemtn Record Service.""" + query_params = request.args.to_dict() + file = request.files.get('file') + # Ensure file exists + if not file: + current_app.logger.debug('No file found in request.') + return {'data': 'File not provided'} + current_app.logger.debug(f'Upload file to document record service {file.filename}') + url = f'{BASE_URL}documents/{document_class}/{document_type}' + + # Validate file size and encryption status before submitting to DRS. + validation_error = DocumentRecordService.validate_pdf(file, request.content_length) + if validation_error: + return { + 'error': validation_error + } + + file_content = file.read() + + try: + # Read and encode the file content as base64 + file_content = file.read() + file_base64 = base64.b64encode(file_content).decode('utf-8') + + response_body = requests.post( + url, + params=query_params, + json={ + 'filename': file.filename, + 'content': file_base64, + 'content_type': file.content_type, + }, + headers={ + **BASE_HEADERS, + 'Content-Type': 'application/pdf' + } + ).json() + + current_app.logger.debug(f'Upload file to document record service {response_body}') + return { + 'documentServiceId': response_body['documentServiceId'], + 'consumerDocumentId': response_body['consumerDocumentId'], + 'consumerFilename': response_body['consumerFilename'] + } + except Exception as e: + current_app.logger.debug(f"Error on uploading document {e}") + return {} + + @staticmethod + def delete_document(document_service_id: str) -> dict: + """Delete document from Document Record Service.""" + url = f'{BASE_URL}documents/{document_service_id}' + + try: + response = requests.patch( + url, json={ 'removed': True }, + headers=BASE_HEADERS + ).json() + current_app.logger.debug(f'Delete document from document record service {response}') + return response + except Exception as e: + current_app.logger.debug(f'Error on deleting document {e}') + return {} + + @staticmethod + def validate_pdf(file, content_length) -> Optional[list]: + """Validate the PDF file.""" + msg = [] + try: + pdf_reader = PyPDF2.PdfFileReader(file) + + # Check that all pages in the pdf are letter size and able to be processed. + if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): + msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), + 'path': file.filename}) + + if content_length > 30000000: + msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) + + if pdf_reader.isEncrypted: + msg.append({'error': _('File must be unencrypted.'), 'path': file.filename}) + + except Exception as e: + msg.append({'error': _('Invalid file.'), 'path': file.filename}) + current_app.logger.debug(e) + + if msg: + return msg + + return None diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 08ec8e0286..68925ede5d 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -51,7 +51,6 @@ def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many- return msg # Cannot continue validation without legal_type msg.extend(validate_business_in_colin(filing_json, filing_type)) - msg.extend(validate_continuation_in_authorization(filing_json, filing_type)) msg.extend(_validate_foreign_jurisdiction(filing_json, filing_type, legal_type)) msg.extend(validate_name_request(filing_json, legal_type, filing_type)) @@ -126,10 +125,7 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty foreign_jurisdiction['country'] == 'CA' and ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' - if file_key := foreign_jurisdiction.get('affidavitFileKey'): - if err := validate_pdf(file_key, affidavit_file_key_path, False): - msg.extend(err) - else: + if not foreign_jurisdiction.get('affidavitFileKey'): msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) try: # Check the incorporation date is in valid format From 9b958f866d7102a7f76cec90e321ed1928c6718e Mon Sep 17 00:00:00 2001 From: flutistar Date: Thu, 19 Dec 2024 14:40:11 -0800 Subject: [PATCH 124/133] fixed issue on getting env variables --- legal-api/src/legal_api/config.py | 4 ++++ .../src/legal_api/services/document_record.py | 21 +++++++++---------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/legal-api/src/legal_api/config.py b/legal-api/src/legal_api/config.py index 38f2b746d5..006ba49d0e 100644 --- a/legal-api/src/legal_api/config.py +++ b/legal-api/src/legal_api/config.py @@ -184,6 +184,10 @@ class _Config(): # pylint: disable=too-few-public-methods # Transparency Register TR_START_DATE = os.getenv('TR_START_DATE', '').strip() # i.e. '2025-02-01' + # Document Record Service Settings + DRS_BASE_URL = os.getenv('DRS_BASE_URL', '') + DRS_ACCOUNT_ID = os.getenv('DRS_ACCOUNT_ID', '') + DRS_X_API_KEY = os.getenv('DRS_X_API_KEY', '') TESTING = False DEBUG = False diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 25a7d30b32..7c3872b694 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -21,13 +21,6 @@ import PyPDF2 - -BASE_URL = current_app.config.get('DRS_BASE_URL') -BASE_HEADERS = { - 'x-apikey': current_app.config.get('DRS_X_API_KEY'), - 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID'), -} - class DocumentRecordService: """Document Storage class.""" @@ -42,7 +35,8 @@ def upload_document(document_class: str, document_type: str) -> dict: current_app.logger.debug('No file found in request.') return {'data': 'File not provided'} current_app.logger.debug(f'Upload file to document record service {file.filename}') - url = f'{BASE_URL}documents/{document_class}/{document_type}' + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. validation_error = DocumentRecordService.validate_pdf(file, request.content_length) @@ -67,7 +61,8 @@ def upload_document(document_class: str, document_type: str) -> dict: 'content_type': file.content_type, }, headers={ - **BASE_HEADERS, + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), 'Content-Type': 'application/pdf' } ).json() @@ -85,12 +80,16 @@ def upload_document(document_class: str, document_type: str) -> dict: @staticmethod def delete_document(document_service_id: str) -> dict: """Delete document from Document Record Service.""" - url = f'{BASE_URL}documents/{document_service_id}' + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}documents/{document_service_id}' try: response = requests.patch( url, json={ 'removed': True }, - headers=BASE_HEADERS + headers={ + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + } ).json() current_app.logger.debug(f'Delete document from document record service {response}') return response From 36c22a254174127244da3c91de3646daf2092f6d Mon Sep 17 00:00:00 2001 From: flutistar Date: Sun, 5 Jan 2025 09:06:40 -0800 Subject: [PATCH 125/133] added drs download function --- .../src/legal_api/resources/v2/document.py | 10 +++++++- .../src/legal_api/services/document_record.py | 23 +++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index 96ff6cda82..ea21ccd9ef 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -93,4 +93,12 @@ def upload_document(document_class: str, document_type: str): def delete_document(document_service_id: str): """Delete document file from Document Record Service.""" - return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK \ No newline at end of file + return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK + +@bp.route('/drs//', methods=['GET']) +@cross_origin(origins='*') +@jwt.requires_auth +def get_document(document_class: str, document_service_id: str): + """Get document file from Document Record Service.""" + + return DocumentRecordService.get_document(document_class, document_service_id), HTTPStatus.OK \ No newline at end of file diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 7c3872b694..9f6e33c86b 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -36,7 +36,7 @@ def upload_document(document_class: str, document_type: str) -> dict: return {'data': 'File not provided'} current_app.logger.debug(f'Upload file to document record service {file.filename}') DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}documents/{document_class}/{document_type}' + url = f'{DRS_BASE_URL}/documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. validation_error = DocumentRecordService.validate_pdf(file, request.content_length) @@ -81,7 +81,7 @@ def upload_document(document_class: str, document_type: str) -> dict: def delete_document(document_service_id: str) -> dict: """Delete document from Document Record Service.""" DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}documents/{document_service_id}' + url = f'{DRS_BASE_URL}/documents/{document_service_id}' try: response = requests.patch( @@ -97,6 +97,25 @@ def delete_document(document_service_id: str) -> dict: current_app.logger.debug(f'Error on deleting document {e}') return {} + @staticmethod + def get_document(document_class: str, document_service_id: str) -> dict: + + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}/searches/{document_class}?documentServiceId={document_service_id}' + try: + response = requests.get( + url, + headers={ + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Get document from document record service {response}') + return response[0] + except Exception as e: + current_app.logger.debug(f'Error on downloading document {e}') + return {} + @staticmethod def validate_pdf(file, content_length) -> Optional[list]: """Validate the PDF file.""" From 31ff9843a25e75c75e220a6879870e99e78c4fa1 Mon Sep 17 00:00:00 2001 From: flutistar Date: Fri, 10 Jan 2025 07:35:55 -0800 Subject: [PATCH 126/133] update entity id upon approval --- .../business_filings/business_documents.py | 9 +++- .../src/legal_api/services/document_record.py | 45 ++++++++++++++----- 2 files changed, 42 insertions(+), 12 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py index 5bc0e81c72..a5663e8ce5 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py @@ -26,7 +26,7 @@ from legal_api.exceptions import ErrorCode, get_error_message from legal_api.models import Business, Document, Filing as FilingModel # noqa: I001 from legal_api.reports import get_pdf -from legal_api.services import MinioService, authorized +from legal_api.services import MinioService, authorized, DocumentRecordService from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime from legal_api.utils.util import cors_preflight @@ -92,6 +92,13 @@ def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None return get_pdf(filing.storage, legal_filing_name) elif file_key and (document := Document.find_by_file_key(file_key)): if document.filing_id == filing.id: # make sure the file belongs to this filing + if document.file_key.startswith('DS'): # docID from DRS + response = DocumentRecordService.download_document('CORP', document.file_key) + return current_app.response_class( + response=response, + status=HTTPStatus.OK, + mimetype='application/pdf' + ) response = MinioService.get_file(document.file_key) return current_app.response_class( response=response.data, diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 9f6e33c86b..955fb57817 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -29,7 +29,7 @@ class DocumentRecordService: def upload_document(document_class: str, document_type: str) -> dict: """Upload document to Docuemtn Record Service.""" query_params = request.args.to_dict() - file = request.files.get('file') + file = request.data.get('file') # Ensure file exists if not file: current_app.logger.debug('No file found in request.') @@ -45,8 +45,6 @@ def upload_document(document_class: str, document_type: str) -> dict: 'error': validation_error } - file_content = file.read() - try: # Read and encode the file content as base64 file_content = file.read() @@ -63,7 +61,7 @@ def upload_document(document_class: str, document_type: str) -> dict: headers={ 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), - 'Content-Type': 'application/pdf' + 'Content-Type': file.content_type } ).json() @@ -99,7 +97,7 @@ def delete_document(document_service_id: str) -> dict: @staticmethod def get_document(document_class: str, document_service_id: str) -> dict: - + """Get document record from Document Record Service.""" DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name url = f'{DRS_BASE_URL}/searches/{document_class}?documentServiceId={document_service_id}' try: @@ -113,7 +111,37 @@ def get_document(document_class: str, document_service_id: str) -> dict: current_app.logger.debug(f'Get document from document record service {response}') return response[0] except Exception as e: - current_app.logger.debug(f'Error on downloading document {e}') + current_app.logger.debug(f'Error on getting a document object {e}') + return {} + + @staticmethod + def download_document(document_class: str, document_service_id: str) -> dict: + """Download document from Document Record Service.""" + doc_object = DocumentRecordService.get_document(document_class, document_service_id) + + response = requests.get(doc_object['documentURL']) # Download file from storage + response.raise_for_status() # Raise an HTTPError for bad responses (4xx and 5xx) + + return response + + @staticmethod + def update_business_identifier(business_identifier: str, document_service_id: str): + """Update business identifier up on approval.""" + DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name + url = f'{DRS_BASE_URL}/documents/{document_service_id}' + + try: + response = requests.patch( + url, json={ 'consumerIdentifer': business_identifier }, + headers={ + 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), + 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Update business identifier - {business_identifier}') + return response + except Exception as e: + current_app.logger.debug(f'Error on deleting document {e}') return {} @staticmethod @@ -123,11 +151,6 @@ def validate_pdf(file, content_length) -> Optional[list]: try: pdf_reader = PyPDF2.PdfFileReader(file) - # Check that all pages in the pdf are letter size and able to be processed. - if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): - msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), - 'path': file.filename}) - if content_length > 30000000: msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) From d0121c475e9700fd1e3d392b9ad671b85f63e3d2 Mon Sep 17 00:00:00 2001 From: flutistar Date: Thu, 23 Jan 2025 08:35:36 -0800 Subject: [PATCH 127/133] updated validation --- legal-api/src/legal_api/constants.py | 10 ++++++++++ .../src/legal_api/services/document_record.py | 18 ++++++++++++++---- .../filings/validations/common_validations.py | 11 ++++++++++- .../filings/validations/continuation_in.py | 11 ++++++++--- .../filing_processors/continuation_in.py | 9 +++++++++ 5 files changed, 51 insertions(+), 8 deletions(-) diff --git a/legal-api/src/legal_api/constants.py b/legal-api/src/legal_api/constants.py index 92e7394c79..403cc32e13 100644 --- a/legal-api/src/legal_api/constants.py +++ b/legal-api/src/legal_api/constants.py @@ -13,4 +13,14 @@ # limitations under the License. """Constants for legal api.""" +from enum import Enum + + BOB_DATE = '2019-03-08' + +class DocumentClassEnum(Enum): + CORP = 'CORP' + +class DocumentTypeEnum(Enum): + CNTO = 'CNTO', + DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' \ No newline at end of file diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index 955fb57817..b373181e37 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -18,9 +18,10 @@ import requests from flask import current_app, request from flask_babel import _ - import PyPDF2 +from legal_api.constants import DocumentTypeEnum + class DocumentRecordService: """Document Storage class.""" @@ -39,7 +40,7 @@ def upload_document(document_class: str, document_type: str) -> dict: url = f'{DRS_BASE_URL}/documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. - validation_error = DocumentRecordService.validate_pdf(file, request.content_length) + validation_error = DocumentRecordService.validate_pdf(file, request.content_length, document_type) if validation_error: return { 'error': validation_error @@ -145,12 +146,21 @@ def update_business_identifier(business_identifier: str, document_service_id: st return {} @staticmethod - def validate_pdf(file, content_length) -> Optional[list]: + def validate_pdf(file, content_length, document_type) -> Optional[list]: """Validate the PDF file.""" msg = [] + verify_paper_size = document_type in [ + DocumentTypeEnum.CNTO, + DocumentTypeEnum.DIRECTOR_AFFIDAVIT + ] + try: pdf_reader = PyPDF2.PdfFileReader(file) - + if verify_paper_size: + # Check that all pages in the pdf are letter size and able to be processed. + if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): + msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), + 'path': file.filename}) if content_length > 30000000: msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) diff --git a/legal-api/src/legal_api/services/filings/validations/common_validations.py b/legal-api/src/legal_api/services/filings/validations/common_validations.py index a624e296a6..f58ad3f2e8 100644 --- a/legal-api/src/legal_api/services/filings/validations/common_validations.py +++ b/legal-api/src/legal_api/services/filings/validations/common_validations.py @@ -22,7 +22,7 @@ from legal_api.errors import Error from legal_api.models import Business -from legal_api.services import MinioService, flags, namex +from legal_api.services import MinioService, flags, namex, DocumentRecordService from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt @@ -329,3 +329,12 @@ def validate_foreign_jurisdiction(foreign_jurisdiction: dict, msg.append({'error': 'Invalid region.', 'path': f'{foreign_jurisdiction_path}/region'}) return msg + +def validate_file_on_drs(document_class: str, document_service_id: str, path) -> bool: + """Validate file existence on DRS""" + msg = [] + doc = DocumentRecordService.get_document(document_class, document_service_id) + if not bool(doc.get("documentURL")): + msg.append({'error': 'File does not exist on Document Record Service', 'path': path}) + + return msg \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 68925ede5d..40b43f01cd 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -25,8 +25,8 @@ validate_foreign_jurisdiction, validate_name_request, validate_parties_names, - validate_pdf, validate_share_structure, + validate_file_on_drs ) from legal_api.services.filings.validations.incorporation_application import ( validate_incorporation_effective_date, @@ -35,6 +35,7 @@ ) from legal_api.services.utils import get_bool, get_str from legal_api.utils.datetime import datetime as dt +from legal_api.constants import DocumentClassEnum def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many-branches; @@ -51,6 +52,7 @@ def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many- return msg # Cannot continue validation without legal_type msg.extend(validate_business_in_colin(filing_json, filing_type)) + msg.extend(validate_continuation_in_authorization(filing_json, filing_type)) msg.extend(_validate_foreign_jurisdiction(filing_json, filing_type, legal_type)) msg.extend(validate_name_request(filing_json, legal_type, filing_type)) @@ -125,7 +127,10 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty foreign_jurisdiction['country'] == 'CA' and ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' - if not foreign_jurisdiction.get('affidavitFileKey'): + if file_key := foreign_jurisdiction.get('affidavitFileKey'): + if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, affidavit_file_key_path): + msg.extend(err) + else: msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) try: # Check the incorporation date is in valid format @@ -153,7 +158,7 @@ def validate_continuation_in_authorization(filing_json: dict, filing_type: str) for index, file in enumerate(filing_json['filing'][filing_type]['authorization']['files']): file_key = file['fileKey'] file_key_path = f'{authorization_path}/files/{index}/fileKey' - if err := validate_pdf(file_key, file_key_path, False): + if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, file_key_path): msg.extend(err) return msg diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py index 393c30db69..c803536d1a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py @@ -17,6 +17,7 @@ from entity_queue_common.service_utils import QueueException from legal_api.models import Business, Document, DocumentType, Filing, Jurisdiction +from legal_api.services import DocumentRecordService from legal_api.utils.legislation_datetime import LegislationDatetime from entity_filer.filing_meta import FilingMeta @@ -160,4 +161,12 @@ def process(business: Business, # pylint: disable=too-many-branches,too-many-lo filing_json['filing']['business']['legalType'] = business.legal_type filing_json['filing']['business']['foundingDate'] = business.founding_date.isoformat() filing_rec._filing_json = filing_json # pylint: disable=protected-access; bypass to update filing data + # Get a file key from continuation in object. + files = continuation_in.get('authorization', {}).get('files', []) + if not len(files): + raise QueueException( + f'continuationIn {filing_rec.id}, Unable to update business identifier on Document Record Service.' + ) + # Update business identifier on Document Record Service + DocumentRecordService.update_business_identifier(business.identifier, files[0].get('fileKey')) return business, filing_rec, filing_meta From 2430a31082274049202b7d32cd9d4c5f6f787c51 Mon Sep 17 00:00:00 2001 From: flutistar Date: Fri, 31 Jan 2025 14:19:46 -0800 Subject: [PATCH 128/133] updated get document api --- .../src/legal_api/resources/v2/document.py | 24 +++++++++++++++---- .../filings/validations/continuation_in.py | 4 ++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index ea21ccd9ef..38115fd5da 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -13,6 +13,7 @@ # limitations under the License. """Module for handling Minio document operations.""" +import re from http import HTTPStatus from flask import Blueprint, current_app, jsonify @@ -95,10 +96,25 @@ def delete_document(document_service_id: str): return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK -@bp.route('/drs//', methods=['GET']) +@bp.route('/drs//', methods=['GET']) @cross_origin(origins='*') @jwt.requires_auth -def get_document(document_class: str, document_service_id: str): - """Get document file from Document Record Service.""" +def get_document(document_class: str, document_key: str): + """Get document file from Minio or Document Record Service.""" + drs_id_pattern = r"^DS\d{10}$" - return DocumentRecordService.get_document(document_class, document_service_id), HTTPStatus.OK \ No newline at end of file + try: + if re.match(drs_id_pattern, document_key): + return DocumentRecordService.get_document(document_class, document_key), HTTPStatus.OK + else: + response = MinioService.get_file(document_key) + return current_app.response_class( + response=response.data, + status=response.status, + mimetype='application/pdf' + ) + except Exception as e: + current_app.logger.error(f'Error getting file {document_key}: {e}') + return jsonify( + message=f'Error getting file {document_key}.' + ), HTTPStatus.INTERNAL_SERVER_ERROR \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 40b43f01cd..245278b3e7 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -128,7 +128,7 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' if file_key := foreign_jurisdiction.get('affidavitFileKey'): - if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, affidavit_file_key_path): + if err := validate_file_on_drs(DocumentClassEnum.CORP.value, file_key, affidavit_file_key_path): msg.extend(err) else: msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) @@ -158,7 +158,7 @@ def validate_continuation_in_authorization(filing_json: dict, filing_type: str) for index, file in enumerate(filing_json['filing'][filing_type]['authorization']['files']): file_key = file['fileKey'] file_key_path = f'{authorization_path}/files/{index}/fileKey' - if err := validate_file_on_drs(DocumentClassEnum.CORP, file_key, file_key_path): + if err := validate_file_on_drs(DocumentClassEnum.CORP.value, file_key, file_key_path): msg.extend(err) return msg From 02193c92f076fc40f79f97f01c4eddc07cd4f002 Mon Sep 17 00:00:00 2001 From: flutistar Date: Mon, 10 Feb 2025 10:02:50 -0800 Subject: [PATCH 129/133] update validation --- legal-api/src/legal_api/constants.py | 8 ++++++-- .../legal_api/services/filings/validations/dissolution.py | 6 ++++-- .../filings/validations/incorporation_application.py | 7 ++++--- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/legal-api/src/legal_api/constants.py b/legal-api/src/legal_api/constants.py index 403cc32e13..25c41b17e2 100644 --- a/legal-api/src/legal_api/constants.py +++ b/legal-api/src/legal_api/constants.py @@ -19,8 +19,12 @@ BOB_DATE = '2019-03-08' class DocumentClassEnum(Enum): - CORP = 'CORP' + CORP = 'CORP' + COOP = 'COOP' class DocumentTypeEnum(Enum): CNTO = 'CNTO', - DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' \ No newline at end of file + DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' + CORP_AFFIDAVIT = 'CORP_AFFIDAVIT' + COOP_MEMORANDUM = 'COOP_MEMORANDUM' + COOP_RULES = 'COOP_RULES' \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/dissolution.py b/legal-api/src/legal_api/services/filings/validations/dissolution.py index 21bc9b2575..f762766756 100644 --- a/legal-api/src/legal_api/services/filings/validations/dissolution.py +++ b/legal-api/src/legal_api/services/filings/validations/dissolution.py @@ -22,7 +22,9 @@ from legal_api.errors import Error from legal_api.models import Address, Business, PartyRole -from .common_validations import validate_court_order, validate_pdf +from .common_validations import validate_court_order, validate_file_on_drs +from legal_api.constants import DocumentClassEnum + from ...utils import get_str # noqa: I003; needed as the linter gets confused from the babel override above. @@ -243,7 +245,7 @@ def validate_affidavit(filing_json, legal_type, dissolution_type) -> Optional[li return [{'error': _('A valid affidavit key is required.'), 'path': affidavit_file_key_path}] - return validate_pdf(affidavit_file_key, affidavit_file_key_path) + return validate_file_on_drs(DocumentClassEnum.CORP.value, affidavit_file_key, affidavit_file_key_path) return None diff --git a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py index dc33ed88eb..d540a68347 100644 --- a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py +++ b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py @@ -24,12 +24,13 @@ from legal_api.models import Business from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt +from legal_api.constants import DocumentClassEnum from .common_validations import ( # noqa: I001 validate_court_order, validate_name_request, validate_parties_names, - validate_pdf, + validate_file_on_drs, validate_share_structure, ) @@ -294,13 +295,13 @@ def validate_cooperative_documents(incorporation_json: dict): rules_file_key = cooperative['rulesFileKey'] rules_file_key_path = '/filing/incorporationApplication/cooperative/rulesFileKey' - rules_err = validate_pdf(rules_file_key, rules_file_key_path) + rules_err = validate_file_on_drs(DocumentClassEnum.COOP.value, rules_file_key, rules_file_key_path) if rules_err: msg.extend(rules_err) memorandum_file_key = cooperative['memorandumFileKey'] memorandum_file_key_path = '/filing/incorporationApplication/cooperative/memorandumFileKey' - memorandum_err = validate_pdf(memorandum_file_key, memorandum_file_key_path) + memorandum_err = validate_file_on_drs(DocumentClassEnum.COOP.value, memorandum_file_key, memorandum_file_key_path) if memorandum_err: msg.extend(memorandum_err) From 9d9f3995a5428c5278841cd51185a49f13c971f4 Mon Sep 17 00:00:00 2001 From: flutistar Date: Wed, 19 Mar 2025 08:59:51 -0700 Subject: [PATCH 130/133] added feature flag for doc service --- legal-api/src/legal_api/config.py | 6 +-- legal-api/src/legal_api/reports/report.py | 25 ++++++++- .../resources/v1/business/business_filings.py | 37 ++++++++++++- .../business_filings/business_filings.py | 53 +++++++++++++++++-- .../src/legal_api/services/document_record.py | 53 +++++++++++++------ .../filings/validations/alteration.py | 13 ++++- 6 files changed, 159 insertions(+), 28 deletions(-) diff --git a/legal-api/src/legal_api/config.py b/legal-api/src/legal_api/config.py index 006ba49d0e..0c3a0b30c0 100644 --- a/legal-api/src/legal_api/config.py +++ b/legal-api/src/legal_api/config.py @@ -185,9 +185,9 @@ class _Config(): # pylint: disable=too-few-public-methods # Transparency Register TR_START_DATE = os.getenv('TR_START_DATE', '').strip() # i.e. '2025-02-01' # Document Record Service Settings - DRS_BASE_URL = os.getenv('DRS_BASE_URL', '') - DRS_ACCOUNT_ID = os.getenv('DRS_ACCOUNT_ID', '') - DRS_X_API_KEY = os.getenv('DRS_X_API_KEY', '') + DOC_API_URL = os.getenv('DOC_API_URL', '') + DOC_API_ACCOUNT_ID = os.getenv('DOC_API_ACCOUNT_ID', '') + DOC_API_KEY = os.getenv('DOC_API_KEY', '') TESTING = False DEBUG = False diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index 8e86695367..929fadc814 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -38,10 +38,16 @@ ) from legal_api.models.business import ASSOCIATION_TYPE_DESC from legal_api.reports.registrar_meta import RegistrarInfo -from legal_api.services import MinioService, VersionedBusinessDetailsService, flags +from legal_api.services import ( + MinioService, + VersionedBusinessDetailsService, + DocumentRecordService, + flags +) from legal_api.utils.auth import jwt from legal_api.utils.formatting import float_to_str from legal_api.utils.legislation_datetime import LegislationDatetime +from legal_api.constants import DocumentClassEnum OUTPUT_DATE_FORMAT: Final = '%B %-d, %Y' @@ -66,9 +72,19 @@ def get_pdf(self, report_type=None): return self._get_report() def _get_static_report(self): + document_type = ReportMeta.static_reports[self._report_key]['documentType'] + document_class = ReportMeta.static_reports[self._report_key]['documentType'] + + print(document_type) document: Document = self._filing.documents.filter(Document.type == document_type).first() - response = MinioService.get_file(document.file_key) + if(flags.is_on('enable-document-records')): + response = DocumentRecordService.download_document( + document_class, + document.file_key + ) + else: + response = MinioService.get_file(document.file_key) return current_app.response_class( response=response.data, status=response.status, @@ -76,6 +92,7 @@ def _get_static_report(self): ) def _get_report(self): + current_app.logger.debug("Came to _get_report") if self._filing.business_id: self._business = Business.find_by_internal_id(self._filing.business_id) Report._populate_business_info_to_filing(self._filing, self._business) @@ -1501,15 +1518,19 @@ class ReportMeta: # pylint: disable=too-few-public-methods static_reports = { 'certifiedRules': { + 'documentClass': DocumentClassEnum.COOP.value, 'documentType': 'coop_rules' }, 'certifiedMemorandum': { + 'documentClass': DocumentClassEnum.COOP.value, 'documentType': 'coop_memorandum' }, 'affidavit': { + 'documentClass': DocumentClassEnum.CORP.value, 'documentType': 'affidavit' }, 'uploadedCourtOrder': { + 'documentClass': DocumentClassEnum.CORP.value, 'documentType': 'court_order' } } diff --git a/legal-api/src/legal_api/resources/v1/business/business_filings.py b/legal-api/src/legal_api/resources/v1/business/business_filings.py index 5057b89cde..768916db10 100644 --- a/legal-api/src/legal_api/resources/v1/business/business_filings.py +++ b/legal-api/src/legal_api/resources/v1/business/business_filings.py @@ -40,9 +40,11 @@ DocumentMetaService, MinioService, RegistrationBootstrapService, + DocumentRecordService, authorized, namex, queue, + flags ) from legal_api.services.authz import is_allowed from legal_api.services.filings import validate @@ -244,7 +246,10 @@ def delete(identifier, filing_id=None): # pylint: disable=too-many-branches return ListFilingResource._create_deletion_locked_response(identifier, filing) try: - ListFilingResource._delete_from_minio(filing) + if flags.is_on('enable-document-records'): + ListFilingResource._delete_from_drs(filing) + else: + ListFilingResource._delete_from_minio(filing) filing.delete() except BusinessException as err: return jsonify({'errors': [{'error': err.error}, ]}), err.status_code @@ -287,6 +292,36 @@ def _delete_from_minio(filing): .get('fileKey', None)): MinioService.delete_file(file_key) + @staticmethod + def _delete_from_drs(filing): + document_service_id = '' + if (filing.filing_type == Filing.FILINGS['incorporationApplication'].get('name') + and (cooperative := filing.filing_json + .get('filing', {}) + .get('incorporationApplication', {}) + .get('cooperative', None))) or \ + (filing.filing_type == Filing.FILINGS['alteration'].get('name') + and (cooperative := filing.filing_json + .get('filing', {}) + .get('alteration', {}))): + if rules_file_key := cooperative.get('rulesFileKey', None): + document_service_id = rules_file_key + if memorandum_file_key := cooperative.get('memorandumFileKey', None): + document_service_id = memorandum_file_key + elif filing.filing_type == Filing.FILINGS['dissolution'].get('name') \ + and (affidavit_file_key := filing.filing_json + .get('filing', {}) + .get('dissolution', {}) + .get('affidavitFileKey', None)): + document_service_id = affidavit_file_key + elif filing.filing_type == Filing.FILINGS['courtOrder'].get('name') \ + and (file_key := filing.filing_json + .get('filing', {}) + .get('courtOrder', {}) + .get('fileKey', None)): + document_service_id = file_key + DocumentRecordService.delete_document(document_service_id) + @staticmethod def _create_deletion_locked_response(identifier, filing): business = Business.find_by_identifier(identifier) diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index 7a7507e2d9..23ac93ead2 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -58,7 +58,9 @@ SYSTEM_ROLE, MinioService, RegistrationBootstrapService, + DocumentRecordService, authorized, + flags, namex, queue, ) @@ -207,7 +209,10 @@ def delete_filings(identifier, filing_id=None): filing.delete() with suppress(Exception): - ListFilingResource.delete_from_minio(filing_type, filing_json) + if flags.is_on('enable-document-records'): + ListFilingResource.delete_from_drs(filing_type, filing_json) + else: + ListFilingResource.delete_from_minio(filing_type, filing_json) if identifier.startswith('T') and filing.filing_type != Filing.FILINGS['noticeOfWithdrawal']['name']: bootstrap = RegistrationBootstrap.find_by_identifier(identifier) @@ -635,7 +640,10 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s if not filing: filing = Filing() filing.business_id = business.id - + current_app.logger.error("------------------------------------------------------") + current_app.logger.error(filing.business_id) + current_app.logger.error("------------------------------------------------------") + try: filing.submitter_id = user.id filing.filing_json = ListFilingResource.sanitize_html_fields(json_input) @@ -1044,6 +1052,37 @@ def delete_from_minio(filing_type: str, filing_json: dict): elif filing_type == Filing.FILINGS['continuationIn'].get('name'): ListFilingResource.delete_continuation_in_files(filing_json) + @staticmethod + def delete_from_drs(filing_type: str, filing_json: dict): + """Delete file from Document Record Service.""" + if (filing_type == Filing.FILINGS['incorporationApplication'].get('name') + and (cooperative := filing_json + .get('filing', {}) + .get('incorporationApplication', {}) + .get('cooperative', None))) or \ + (filing_type == Filing.FILINGS['alteration'].get('name') + and (cooperative := filing_json + .get('filing', {}) + .get('alteration', {}))): + if rules_file_key := cooperative.get('rulesFileKey', None): + DocumentRecordService.delete_document(rules_file_key) + if memorandum_file_key := cooperative.get('memorandumFileKey', None): + DocumentRecordService.delete_document(memorandum_file_key) + elif filing_type == Filing.FILINGS['dissolution'].get('name') \ + and (affidavit_file_key := filing_json + .get('filing', {}) + .get('dissolution', {}) + .get('affidavitFileKey', None)): + DocumentRecordService.delete_document(affidavit_file_key) + elif filing_type == Filing.FILINGS['courtOrder'].get('name') \ + and (file_key := filing_json + .get('filing', {}) + .get('courtOrder', {}) + .get('fileKey', None)): + DocumentRecordService.delete_document(file_key) + elif filing_type == Filing.FILINGS['continuationIn'].get('name'): + ListFilingResource.delete_continuation_in_files(filing_json) + @staticmethod def delete_continuation_in_files(filing_json: dict): """Delete continuation in files from minio.""" @@ -1051,13 +1090,19 @@ def delete_continuation_in_files(filing_json: dict): # Delete affidavit file if affidavit_file_key := continuation_in.get('foreignJurisdiction', {}).get('affidavitFileKey', None): - MinioService.delete_file(affidavit_file_key) + if flags.is_on('enable-document-records'): + DocumentRecordService.delete_document(affidavit_file_key) + else: + MinioService.delete_file(affidavit_file_key) # Delete authorization file(s) authorization_files = continuation_in.get('authorization', {}).get('files', []) for file in authorization_files: if auth_file_key := file.get('fileKey', None): - MinioService.delete_file(auth_file_key) + if flags.is_on('enable-document-records'): + DocumentRecordService.delete_document(auth_file_key) + else: + MinioService.delete_file(auth_file_key) @staticmethod def details_for_invoice(business_identifier: str, corp_type: str): diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index b373181e37..e659880e7c 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -36,8 +36,8 @@ def upload_document(document_class: str, document_type: str) -> dict: current_app.logger.debug('No file found in request.') return {'data': 'File not provided'} current_app.logger.debug(f'Upload file to document record service {file.filename}') - DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}/documents/{document_class}/{document_type}' + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/documents/{document_class}/{document_type}' # Validate file size and encryption status before submitting to DRS. validation_error = DocumentRecordService.validate_pdf(file, request.content_length, document_type) @@ -60,8 +60,8 @@ def upload_document(document_class: str, document_type: str) -> dict: 'content_type': file.content_type, }, headers={ - 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), - 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), 'Content-Type': file.content_type } ).json() @@ -76,18 +76,39 @@ def upload_document(document_class: str, document_type: str) -> dict: current_app.logger.debug(f"Error on uploading document {e}") return {} + @staticmethod + def update_document(document: bytes, document_service_id: str, document_name: str) -> dict: + """Update a document on Document Record Service (DRS).""" + + DOC_API_URL = current_app.config.get('DOC_API_URL', '') + url = f"{DOC_API_URL}/documents/{document_service_id}?consumerFilename={document_name}" + + headers = { + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), + 'Content-Type': 'application/pdf' + } + + try: + response = requests.put(url, data=document, headers=headers) + response.raise_for_status() + return response.json() + except requests.exceptions.RequestException as error: + current_app.logger.error(f"Error updating document on DRS: {error}") + return {"error": str(error), "response": error.response.json() if error.response else None} + @staticmethod def delete_document(document_service_id: str) -> dict: """Delete document from Document Record Service.""" - DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}/documents/{document_service_id}' + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/documents/{document_service_id}' try: response = requests.patch( url, json={ 'removed': True }, headers={ - 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), - 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), } ).json() current_app.logger.debug(f'Delete document from document record service {response}') @@ -99,14 +120,14 @@ def delete_document(document_service_id: str) -> dict: @staticmethod def get_document(document_class: str, document_service_id: str) -> dict: """Get document record from Document Record Service.""" - DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}/searches/{document_class}?documentServiceId={document_service_id}' + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/searches/{document_class}?documentServiceId={document_service_id}' try: response = requests.get( url, headers={ - 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), - 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), } ).json() current_app.logger.debug(f'Get document from document record service {response}') @@ -128,15 +149,15 @@ def download_document(document_class: str, document_service_id: str) -> dict: @staticmethod def update_business_identifier(business_identifier: str, document_service_id: str): """Update business identifier up on approval.""" - DRS_BASE_URL = current_app.config.get('DRS_BASE_URL', '') # pylint: disable=invalid-name - url = f'{DRS_BASE_URL}/documents/{document_service_id}' + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/documents/{document_service_id}' try: response = requests.patch( url, json={ 'consumerIdentifer': business_identifier }, headers={ - 'x-apikey': current_app.config.get('DRS_X_API_KEY', ''), - 'Account-Id': current_app.config.get('DRS_ACCOUNT_ID', ''), + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), } ).json() current_app.logger.debug(f'Update business identifier - {business_identifier}') diff --git a/legal-api/src/legal_api/services/filings/validations/alteration.py b/legal-api/src/legal_api/services/filings/validations/alteration.py index 38ce04d0aa..3b3da063f7 100644 --- a/legal-api/src/legal_api/services/filings/validations/alteration.py +++ b/legal-api/src/legal_api/services/filings/validations/alteration.py @@ -21,6 +21,7 @@ from legal_api.errors import Error from legal_api.models import Business from legal_api.services.utils import get_bool, get_str +from legal_api.constants import DocumentClassEnum from .common_validations import ( validate_court_order, @@ -28,6 +29,7 @@ validate_pdf, validate_resolution_date_in_share_structure, validate_share_structure, + validate_file_on_drs ) @@ -179,7 +181,11 @@ def rules_change_validation(filing): return msg if rules_file_key: - rules_err = validate_pdf(rules_file_key, rules_file_key_path) + rules_err = validate_file_on_drs( + DocumentClassEnum.COOP.value, + rules_file_key, + rules_file_key_path) + if rules_err: msg.extend(rules_err) return msg @@ -203,7 +209,10 @@ def memorandum_change_validation(filing): return msg if memorandum_file_key: - memorandum_err = validate_pdf(memorandum_file_key, memorandum_file_key_path) + memorandum_err = validate_file_on_drs( + DocumentClassEnum.COOP.value, + memorandum_file_key, + memorandum_file_key_path) if memorandum_err: msg.extend(memorandum_err) From 60c8fa00b06dc2f5572a65ea70723964875fa80a Mon Sep 17 00:00:00 2001 From: flutistar Date: Wed, 19 Mar 2025 09:02:25 -0700 Subject: [PATCH 131/133] added feature flag for doc service --- .../filing_processors/dissolution.py | 20 +++++++++- .../filing_components/rules_and_memorandum.py | 37 ++++++++++++++++--- .../filing_processors/incorporation_filing.py | 34 +++++++++++++++-- .../entity-filer/src/entity_filer/utils.py | 12 ++++-- .../entity-filer/src/entity_filer/worker.py | 15 ++++++-- 5 files changed, 99 insertions(+), 19 deletions(-) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py index 0ff7e58eeb..aa3500675a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py @@ -21,16 +21,20 @@ from legal_api.models import BatchProcessing, Business, Document, Filing, db from legal_api.models.document import DocumentType from legal_api.services.filings.validations.dissolution import DissolutionTypes +from legal_api.services import Flags +from legal_api.services.document_record import DocumentRecordService from legal_api.services.minio import MinioService from legal_api.services.pdf_service import RegistrarStampData from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime +from legal_api.constants import DocumentClassEnum from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import create_office, filings from entity_filer.filing_processors.filing_components.parties import update_parties from entity_filer.utils import replace_file_with_certified_copy +flags = Flags() # pylint: disable=invalid-name # pylint: disable=too-many-locals def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta, flag_on: bool = False): @@ -117,9 +121,21 @@ def _update_cooperative(dissolution_filing: Dict, business: Business, filing: Fi # create certified copy for affidavit document affidavit_file_key = dissolution_filing.get('affidavitFileKey') - affidavit_file = MinioService.get_file(affidavit_file_key) + if flags.is_on('enable-document-records'): + affidavit_file = DocumentRecordService.download_document( + DocumentClassEnum.COOP.value, + affidavit_file_key + ) + else: + affidavit_file = MinioService.get_file(affidavit_file_key) + registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier) - replace_file_with_certified_copy(affidavit_file.data, affidavit_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + affidavit_file.data, + affidavit_file_key, + registrar_stamp_data, + affidavit_file.name + ) document = Document() document.type = DocumentType.AFFIDAVIT.value diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py index 3ae0cc52c1..763d674fc8 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py @@ -19,11 +19,15 @@ from legal_api.models import Business, Document, Filing from legal_api.models.document import DocumentType +from legal_api.services import Flags from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService +from legal_api.constants import DocumentClassEnum from legal_api.services.pdf_service import RegistrarStampData from entity_filer.utils import replace_file_with_certified_copy +flags = Flags() # pylint: disable=invalid-name def update_rules( business: Business, @@ -40,9 +44,22 @@ def update_rules( return None is_correction = filing.filing_type == 'correction' - rules_file = MinioService.get_file(rules_file_key) + + if not flags.is_on('enable-document-records'): + rules_file = DocumentRecordService.download_document( + DocumentClassEnum.COOP.value, + rules_file_key + ) + else: + rules_file = MinioService.get_file(rules_file_key) + registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier, file_name, is_correction) - replace_file_with_certified_copy(rules_file.data, rules_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + rules_file.data, + rules_file_key, + registrar_stamp_data, + rules_file.name + ) document = Document() document.type = DocumentType.COOP_RULES.value @@ -70,10 +87,20 @@ def update_memorandum( is_correction = filing.filing_type == 'correction' # create certified copy for memorandum document - memorandum_file = MinioService.get_file(memorandum_file_key) + if flags.is_on('enable-document-records'): + memorandum_file = DocumentRecordService.download_document( + DocumentClassEnum.COOP.value, + memorandum_file_key + ) + else: + memorandum_file = MinioService.get_file(memorandum_file_key) registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier, file_name, is_correction) - replace_file_with_certified_copy(memorandum_file.data, memorandum_file_key, registrar_stamp_data) - + replace_file_with_certified_copy( + memorandum_file.data, + memorandum_file_key, + registrar_stamp_data, + memorandum_file.name + ) document = Document() document.type = DocumentType.COOP_MEMORANDUM.value document.file_key = memorandum_file_key diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py index 348ff0cadd..cd8f1143e7 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py @@ -19,7 +19,10 @@ from legal_api.models import Business, Document, Filing from legal_api.models.document import DocumentType from legal_api.services.minio import MinioService +from legal_api.services import Flags from legal_api.services.pdf_service import RegistrarStampData +from legal_api.services.document_record import DocumentRecordService +from legal_api.constants import DocumentClassEnum from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import aliases, business_info, filings, shares @@ -27,15 +30,27 @@ from entity_filer.filing_processors.filing_components.parties import update_parties from entity_filer.utils import replace_file_with_certified_copy +flags = Flags() # pylint: disable=invalid-name def _update_cooperative(incorp_filing: Dict, business: Business, filing: Filing): cooperative_obj = incorp_filing.get('cooperative', None) if cooperative_obj: # create certified copy for rules document rules_file_key = cooperative_obj.get('rulesFileKey') - rules_file = MinioService.get_file(rules_file_key) + if flags.is_on('enable-document-records'): + rules_file = DocumentRecordService.download_document( + DocumentClassEnum.COOP.value, + rules_file_key + ) + else: + rules_file = MinioService.get_file(rules_file_key) registrar_stamp_data = RegistrarStampData(business.founding_date, business.identifier) - replace_file_with_certified_copy(rules_file.data, rules_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + rules_file.data, + rules_file_key, + registrar_stamp_data, + rules_file.name + ) business.association_type = cooperative_obj.get('cooperativeAssociationType') document = Document() @@ -47,9 +62,20 @@ def _update_cooperative(incorp_filing: Dict, business: Business, filing: Filing) # create certified copy for memorandum document memorandum_file_key = cooperative_obj.get('memorandumFileKey') - memorandum_file = MinioService.get_file(memorandum_file_key) + if flags.is_on('enable-document-records'): + memorandum_file = DocumentRecordService.download_document( + DocumentClassEnum.COOP.value, + memorandum_file_key + ) + else: + memorandum_file = MinioService.get_file(memorandum_file_key) registrar_stamp_data = RegistrarStampData(business.founding_date, business.identifier) - replace_file_with_certified_copy(memorandum_file.data, memorandum_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + memorandum_file.data, + memorandum_file_key, + registrar_stamp_data, + memorandum_file.name + ) document = Document() document.type = DocumentType.COOP_MEMORANDUM.value diff --git a/queue_services/entity-filer/src/entity_filer/utils.py b/queue_services/entity-filer/src/entity_filer/utils.py index aae19f9c73..4413fa8d3a 100644 --- a/queue_services/entity-filer/src/entity_filer/utils.py +++ b/queue_services/entity-filer/src/entity_filer/utils.py @@ -19,12 +19,14 @@ import os import PyPDF2 -from legal_api.services import PdfService +from legal_api.services import PdfService, Flags from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService from legal_api.services.pdf_service import RegistrarStampData from entity_filer.version import __version__ +flags = Flags() # pylint: disable=invalid-name def _get_build_openshift_commit_hash(): return os.getenv('OPENSHIFT_BUILD_COMMIT', None) @@ -38,7 +40,7 @@ def get_run_version(): return __version__ -def replace_file_with_certified_copy(_bytes: bytes, key: str, data: RegistrarStampData): +def replace_file_with_certified_copy(_bytes: bytes, key: str, data: RegistrarStampData, file_name: str): """Create a certified copy and replace it into Minio server.""" open_pdf_file = io.BytesIO(_bytes) pdf_reader = PyPDF2.PdfFileReader(open_pdf_file) @@ -50,5 +52,7 @@ def replace_file_with_certified_copy(_bytes: bytes, key: str, data: RegistrarSta pdf_service = PdfService() registrars_stamp = pdf_service.create_registrars_stamp(data) certified_copy = pdf_service.stamp_pdf(output_original_pdf, registrars_stamp, only_first_page=True) - - MinioService.put_file(key, certified_copy, certified_copy.getbuffer().nbytes) + if(flags.is_on('enable-document-records')): + DocumentRecordService.update_document(certified_copy, key, file_name) + else: + MinioService.put_file(key, certified_copy, certified_copy.getbuffer().nbytes) diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 7e45b15e63..1710f4b2d9 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -256,7 +256,7 @@ async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches, for item in sublist]) if is_correction: filing_meta.correction = {} - + for filing in legal_filings: if filing.get('alteration'): alteration.process(business, filing_submission, filing, filing_meta, is_correction) @@ -481,10 +481,11 @@ def is_system_filed_filing(filing_submission) -> bool: async def cb_subscription_handler(msg: nats.aio.client.Msg): +# async def cb_subscription_handler(msg): """Use Callback to process Queue Msg objects.""" try: - logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode()) - filing_msg = json.loads(msg.data.decode('utf-8')) + # logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode()) + filing_msg = msg logger.debug('Extracted filing msg: %s', filing_msg) await process_filing(filing_msg, FLASK_APP) except OperationalError as err: @@ -495,7 +496,13 @@ async def cb_subscription_handler(msg: nats.aio.client.Msg): '\n\nThis message has been put back on the queue for reprocessing.', json.dumps(filing_msg), exc_info=True) raise err # we don't want to handle the error, so that the message gets put back on the queue - except (QueueException, Exception): # pylint: disable=broad-except + except (QueueException, Exception) as err: # pylint: disable=broad-except # Catch Exception so that any error is still caught and the message is removed from the queue capture_message('Queue Error:' + json.dumps(filing_msg), level='error') logger.error('Queue Error: %s', json.dumps(filing_msg), exc_info=True) + +# import asyncio +# # //171009 +# if __name__ == '__main__': + +# asyncio.run(cb_subscription_handler({'filing': {'id': '172702'}})) \ No newline at end of file From 7526199971e09432ceb9c553ac26f8b1581e1315 Mon Sep 17 00:00:00 2001 From: flutistar Date: Wed, 16 Apr 2025 07:33:40 -0700 Subject: [PATCH 132/133] fixed error on unit test --- legal-api/src/legal_api/constants.py | 169 ++++++++++++++++-- legal-api/src/legal_api/reports/report.py | 12 +- .../business_filings/business_filings.py | 5 +- .../src/legal_api/services/document_record.py | 41 ++--- .../filings/validations/alteration.py | 23 +-- .../filings/validations/common_validations.py | 32 ++-- .../filings/validations/continuation_in.py | 17 +- .../filings/validations/dissolution.py | 9 +- .../validations/incorporation_application.py | 16 +- legal-api/tests/unit/invalid_size.pdf | Bin 0 -> 2353 bytes .../tests/unit/services/filings/test_utils.py | 30 +++- legal-api/tests/unit/valid_size.pdf | Bin 0 -> 2353 bytes 12 files changed, 260 insertions(+), 94 deletions(-) create mode 100644 legal-api/tests/unit/invalid_size.pdf create mode 100644 legal-api/tests/unit/valid_size.pdf diff --git a/legal-api/src/legal_api/constants.py b/legal-api/src/legal_api/constants.py index 25c41b17e2..b56479b390 100644 --- a/legal-api/src/legal_api/constants.py +++ b/legal-api/src/legal_api/constants.py @@ -18,13 +18,162 @@ BOB_DATE = '2019-03-08' -class DocumentClassEnum(Enum): - CORP = 'CORP' - COOP = 'COOP' - -class DocumentTypeEnum(Enum): - CNTO = 'CNTO', - DIRECTOR_AFFIDAVIT = 'DIRECTOR_AFFIDAVIT' - CORP_AFFIDAVIT = 'CORP_AFFIDAVIT' - COOP_MEMORANDUM = 'COOP_MEMORANDUM' - COOP_RULES = 'COOP_RULES' \ No newline at end of file +class DocumentClasses(Enum): + """Render an Enum of the document service document classes.""" + + COOP = "COOP" + CORP = "CORP" + DELETED = "DELETED" + FIRM = "FIRM" + LP_LLP = "LP_LLP" + MHR = "MHR" + NR = "NR" + OTHER = "OTHER" + PPR = "PPR" + SOCIETY = "SOCIETY" + XP = "XP" + + +class DocumentTypes(Enum): + """Render an Enum of the document service document types.""" + + REG_101 = "REG_101" + REG_102 = "REG_102" + REG_103 = "REG_103" + ABAN = "ABAN" + ADDI = "ADDI" + AFFE = "AFFE" + ATTA = "ATTA" + BANK = "BANK" + BCLC = "BCLC" + CAU = "CAU" + CAUC = "CAUC" + CAUE = "CAUE" + COMP = "COMP" + COUR = "COUR" + DEAT = "DEAT" + DNCH = "DNCH" + EXMN = "EXMN" + EXNR = "EXNR" + EXRE = "EXRE" + EXRS = "EXRS" + FORE = "FORE" + FZE = "FZE" + GENT = "GENT" + LETA = "LETA" + MAID = "MAID" + MAIL = "MAIL" + MARR = "MARR" + NAMV = "NAMV" + NCAN = "NCAN" + NCON = "NCON" + NPUB = "NPUB" + NRED = "NRED" + PDEC = "PDEC" + PUBA = "PUBA" + REBU = "REBU" + REGC = "REGC" + REIV = "REIV" + REPV = "REPV" + REST = "REST" + STAT = "STAT" + SZL = "SZL" + TAXN = "TAXN" + TAXS = "TAXS" + THAW = "THAW" + TRAN = "TRAN" + VEST = "VEST" + WHAL = "WHAL" + WILL = "WILL" + XP_MISC = "XP_MISC" + COFI = "COFI" + DISS = "DISS" + DISD = "DISD" + ATTN = "ATTN" + FRMA = "FRMA" + AMLO = "AMLO" + CNTA = "CNTA" + CNTI = "CNTI" + CNTO = "CNTO" + COFF = "COFF" + COSD = "COSD" + AMLG = "AMLG" + AMAL = "AMAL" + RSRI = "RSRI" + ASNU = "ASNU" + LPRG = "LPRG" + FILE = "FILE" + CNVF = "CNVF" + COPN = "COPN" + MHSP = "MHSP" + FNCH = "FNCH" + CONS = "CONS" + PPRS = "PPRS" + PPRC = "PPRC" + ADDR = "ADDR" + ANNR = "ANNR" + CORR = "CORR" + DIRS = "DIRS" + CORC = "CORC" + SOCF = "SOCF" + CERT = "CERT" + LTR = "LTR" + CLW = "CLW" + BYLW = "BYLW" + CNST = "CNST" + CONT = "CONT" + SYSR = "SYSR" + ADMN = "ADMN" + RSLN = "RSLN" + AFDV = "AFDV" + SUPP = "SUPP" + MNOR = "MNOR" + FINM = "FINM" + APCO = "APCO" + RPTP = "RPTP" + DAT = "DAT" + BYLT = "BYLT" + CNVS = "CNVS" + CRTO = "CRTO" + MEM = "MEM" + PRE = "PRE" + REGO = "REGO" + PLNA = "PLNA" + REGN = "REGN" + FINC = "FINC" + BCGT = "BCGT" + CHNM = "CHNM" + OTP = "OTP" + PPR = "PPR" + LHS = "LHS" + RGS = "RGS" + HSR = "HSR" + RPL = "RPL" + FINS = "FINS" + DELETED = "DELETED" + COOP_RULES = "COOP_RULES" + COOP_MEMORANDUM = "COOP_MEMORANDUM" + CORP_AFFIDAVIT = "CORP_AFFIDAVIT" + DIRECTOR_AFFIDAVIT = "DIRECTOR_AFFIDAVIT" + PART = "PART" + REG_103E = "REG_103E" + AMEND_PERMIT = "AMEND_PERMIT" + CANCEL_PERMIT = "CANCEL_PERMIT" + REREGISTER_C = "REREGISTER_C" + MEAM = "MEAM" + COU = "COU" + CRT = "CRT" + INV = "INV" + NATB = "NATB" + NWP = "NWP" + +DOCUMENT_TYPES = { + 'coopMemorandum': { + 'class': DocumentClasses.COOP.value, + 'type': DocumentTypes.COOP_MEMORANDUM.value + }, + 'coopRules': { + 'class': DocumentClasses.COOP.value, + 'type': DocumentTypes.COOP_RULES.value + }, +} \ No newline at end of file diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index 929fadc814..527135d8a0 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -47,7 +47,7 @@ from legal_api.utils.auth import jwt from legal_api.utils.formatting import float_to_str from legal_api.utils.legislation_datetime import LegislationDatetime -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses OUTPUT_DATE_FORMAT: Final = '%B %-d, %Y' @@ -76,7 +76,6 @@ def _get_static_report(self): document_type = ReportMeta.static_reports[self._report_key]['documentType'] document_class = ReportMeta.static_reports[self._report_key]['documentType'] - print(document_type) document: Document = self._filing.documents.filter(Document.type == document_type).first() if(flags.is_on('enable-document-records')): response = DocumentRecordService.download_document( @@ -92,7 +91,6 @@ def _get_static_report(self): ) def _get_report(self): - current_app.logger.debug("Came to _get_report") if self._filing.business_id: self._business = Business.find_by_internal_id(self._filing.business_id) Report._populate_business_info_to_filing(self._filing, self._business) @@ -1518,19 +1516,19 @@ class ReportMeta: # pylint: disable=too-few-public-methods static_reports = { 'certifiedRules': { - 'documentClass': DocumentClassEnum.COOP.value, + 'documentClass': DocumentClasses.COOP.value, 'documentType': 'coop_rules' }, 'certifiedMemorandum': { - 'documentClass': DocumentClassEnum.COOP.value, + 'documentClass': DocumentClasses.COOP.value, 'documentType': 'coop_memorandum' }, 'affidavit': { - 'documentClass': DocumentClassEnum.CORP.value, + 'documentClass': DocumentClasses.CORP.value, 'documentType': 'affidavit' }, 'uploadedCourtOrder': { - 'documentClass': DocumentClassEnum.CORP.value, + 'documentClass': DocumentClasses.CORP.value, 'documentType': 'court_order' } } diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index 23ac93ead2..808bdc7ca2 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -80,6 +80,7 @@ class QueryModel(BaseModel): draft: Optional[bool] only_validate: Optional[bool] + document_id: Optional[bool] FilingT = TypeVar('FilingT') @@ -640,9 +641,7 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s if not filing: filing = Filing() filing.business_id = business.id - current_app.logger.error("------------------------------------------------------") - current_app.logger.error(filing.business_id) - current_app.logger.error("------------------------------------------------------") + try: filing.submitter_id = user.id diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py index e659880e7c..6c46d02c03 100644 --- a/legal-api/src/legal_api/services/document_record.py +++ b/legal-api/src/legal_api/services/document_record.py @@ -13,56 +13,39 @@ # limitations under the License. """This module is a wrapper for Document Record Service.""" +import io import base64 from typing import Optional import requests -from flask import current_app, request +from flask import current_app, request, send_file from flask_babel import _ import PyPDF2 -from legal_api.constants import DocumentTypeEnum +from legal_api.constants import DocumentTypes class DocumentRecordService: """Document Storage class.""" @staticmethod - def upload_document(document_class: str, document_type: str) -> dict: + def upload_document(document_class: str, document_type: str, file) -> dict: """Upload document to Docuemtn Record Service.""" - query_params = request.args.to_dict() - file = request.data.get('file') # Ensure file exists if not file: current_app.logger.debug('No file found in request.') return {'data': 'File not provided'} - current_app.logger.debug(f'Upload file to document record service {file.filename}') + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name - url = f'{DOC_API_URL}/documents/{document_class}/{document_type}' - - # Validate file size and encryption status before submitting to DRS. - validation_error = DocumentRecordService.validate_pdf(file, request.content_length, document_type) - if validation_error: - return { - 'error': validation_error - } + url = f'https://bcregistry-dev.apigee.net/doc/api/v1/documents/{document_class}/{document_type}' try: - # Read and encode the file content as base64 - file_content = file.read() - file_base64 = base64.b64encode(file_content).decode('utf-8') - response_body = requests.post( url, - params=query_params, - json={ - 'filename': file.filename, - 'content': file_base64, - 'content_type': file.content_type, - }, + data=file, headers={ 'x-apikey': current_app.config.get('DOC_API_KEY', ''), 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), - 'Content-Type': file.content_type + 'Content-Type': 'application/pdf' } ).json() @@ -130,7 +113,7 @@ def get_document(document_class: str, document_service_id: str) -> dict: 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), } ).json() - current_app.logger.debug(f'Get document from document record service {response}') + current_app.logger.debug(f'Get document from document record service {document_service_id}') return response[0] except Exception as e: current_app.logger.debug(f'Error on getting a document object {e}') @@ -144,7 +127,7 @@ def download_document(document_class: str, document_service_id: str) -> dict: response = requests.get(doc_object['documentURL']) # Download file from storage response.raise_for_status() # Raise an HTTPError for bad responses (4xx and 5xx) - return response + return response.content @staticmethod def update_business_identifier(business_identifier: str, document_service_id: str): @@ -171,8 +154,8 @@ def validate_pdf(file, content_length, document_type) -> Optional[list]: """Validate the PDF file.""" msg = [] verify_paper_size = document_type in [ - DocumentTypeEnum.CNTO, - DocumentTypeEnum.DIRECTOR_AFFIDAVIT + DocumentTypes.CNTO.value, + DocumentTypes.DIRECTOR_AFFIDAVIT.value ] try: diff --git a/legal-api/src/legal_api/services/filings/validations/alteration.py b/legal-api/src/legal_api/services/filings/validations/alteration.py index 3b3da063f7..8998f126ce 100644 --- a/legal-api/src/legal_api/services/filings/validations/alteration.py +++ b/legal-api/src/legal_api/services/filings/validations/alteration.py @@ -21,15 +21,14 @@ from legal_api.errors import Error from legal_api.models import Business from legal_api.services.utils import get_bool, get_str -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses from .common_validations import ( validate_court_order, validate_name_request, validate_pdf, validate_resolution_date_in_share_structure, - validate_share_structure, - validate_file_on_drs + validate_share_structure ) @@ -181,10 +180,11 @@ def rules_change_validation(filing): return msg if rules_file_key: - rules_err = validate_file_on_drs( - DocumentClassEnum.COOP.value, - rules_file_key, - rules_file_key_path) + rules_err = validate_pdf( + file_key=rules_file_key, + file_key_path=rules_file_key_path, + document_class=DocumentClasses.COOP.value + ) if rules_err: msg.extend(rules_err) @@ -209,10 +209,11 @@ def memorandum_change_validation(filing): return msg if memorandum_file_key: - memorandum_err = validate_file_on_drs( - DocumentClassEnum.COOP.value, - memorandum_file_key, - memorandum_file_key_path) + memorandum_err = validate_pdf( + file_key=memorandum_file_key, + file_key_path=memorandum_file_key_path, + document_class=DocumentClasses.COOP.value + ) if memorandum_err: msg.extend(memorandum_err) diff --git a/legal-api/src/legal_api/services/filings/validations/common_validations.py b/legal-api/src/legal_api/services/filings/validations/common_validations.py index f58ad3f2e8..9a6247df26 100644 --- a/legal-api/src/legal_api/services/filings/validations/common_validations.py +++ b/legal-api/src/legal_api/services/filings/validations/common_validations.py @@ -13,6 +13,7 @@ # limitations under the License. """Common validations share through the different filings.""" import io +import re from datetime import datetime from typing import Optional @@ -168,12 +169,23 @@ def validate_court_order(court_order_path, court_order): return None -def validate_pdf(file_key: str, file_key_path: str, verify_paper_size: bool = True) -> Optional[list]: +def validate_pdf(file_key: str, file_key_path: str, verify_paper_size: bool = True, document_class: str = None) -> Optional[list]: """Validate the PDF file.""" msg = [] + DRS_ID_PATTERN = r"^DS\d{10,}$" + file_size = 0 + try: - file = MinioService.get_file(file_key) - open_pdf_file = io.BytesIO(file.data) + file = None + if bool(re.match(DRS_ID_PATTERN, file_key)): # Check if file_key is matched with document service ID pattern + file = DocumentRecordService.download_document(document_class, file_key) + open_pdf_file = io.BytesIO(file) + file_size = len(file) + else: + file = MinioService.get_file(file_key) + open_pdf_file = io.BytesIO(file.data ) + file_info = MinioService.get_file_info(file_key) + file_size = file_info.size pdf_reader = PyPDF2.PdfFileReader(open_pdf_file) if verify_paper_size: @@ -182,14 +194,13 @@ def validate_pdf(file_key: str, file_key_path: str, verify_paper_size: bool = Tr msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), 'path': file_key_path}) - file_info = MinioService.get_file_info(file_key) - if file_info.size > 30000000: + if file_size > 30000000: msg.append({'error': _('File exceeds maximum size.'), 'path': file_key_path}) if pdf_reader.isEncrypted: msg.append({'error': _('File must be unencrypted.'), 'path': file_key_path}) - except Exception: + except Exception as e: msg.append({'error': _('Invalid file.'), 'path': file_key_path}) if msg: @@ -329,12 +340,3 @@ def validate_foreign_jurisdiction(foreign_jurisdiction: dict, msg.append({'error': 'Invalid region.', 'path': f'{foreign_jurisdiction_path}/region'}) return msg - -def validate_file_on_drs(document_class: str, document_service_id: str, path) -> bool: - """Validate file existence on DRS""" - msg = [] - doc = DocumentRecordService.get_document(document_class, document_service_id) - if not bool(doc.get("documentURL")): - msg.append({'error': 'File does not exist on Document Record Service', 'path': path}) - - return msg \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 245278b3e7..10eb14ff93 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -26,7 +26,7 @@ validate_name_request, validate_parties_names, validate_share_structure, - validate_file_on_drs + validate_pdf ) from legal_api.services.filings.validations.incorporation_application import ( validate_incorporation_effective_date, @@ -35,7 +35,7 @@ ) from legal_api.services.utils import get_bool, get_str from legal_api.utils.datetime import datetime as dt -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many-branches; @@ -128,7 +128,12 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' if file_key := foreign_jurisdiction.get('affidavitFileKey'): - if err := validate_file_on_drs(DocumentClassEnum.CORP.value, file_key, affidavit_file_key_path): + if err := validate_pdf( + file_key=file_key, + file_key_path=affidavit_file_key_path, + verify_paper_size=False, + document_class=DocumentClasses.CORP.value + ): msg.extend(err) else: msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) @@ -158,7 +163,11 @@ def validate_continuation_in_authorization(filing_json: dict, filing_type: str) for index, file in enumerate(filing_json['filing'][filing_type]['authorization']['files']): file_key = file['fileKey'] file_key_path = f'{authorization_path}/files/{index}/fileKey' - if err := validate_file_on_drs(DocumentClassEnum.CORP.value, file_key, file_key_path): + if err := validate_pdf( + file_key=file_key, + file_key_path=file_key_path, + document_class=DocumentClasses.CORP.value + ): msg.extend(err) return msg diff --git a/legal-api/src/legal_api/services/filings/validations/dissolution.py b/legal-api/src/legal_api/services/filings/validations/dissolution.py index f762766756..fba6e20d27 100644 --- a/legal-api/src/legal_api/services/filings/validations/dissolution.py +++ b/legal-api/src/legal_api/services/filings/validations/dissolution.py @@ -22,8 +22,8 @@ from legal_api.errors import Error from legal_api.models import Address, Business, PartyRole -from .common_validations import validate_court_order, validate_file_on_drs -from legal_api.constants import DocumentClassEnum +from .common_validations import validate_court_order, validate_pdf +from legal_api.constants import DocumentClasses from ...utils import get_str # noqa: I003; needed as the linter gets confused from the babel override above. @@ -245,7 +245,10 @@ def validate_affidavit(filing_json, legal_type, dissolution_type) -> Optional[li return [{'error': _('A valid affidavit key is required.'), 'path': affidavit_file_key_path}] - return validate_file_on_drs(DocumentClassEnum.CORP.value, affidavit_file_key, affidavit_file_key_path) + return validate_pdf( + file_key=affidavit_file_key, + file_key_path=affidavit_file_key_path, + document_class=DocumentClasses.CORP.value) return None diff --git a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py index d540a68347..6ec12c7905 100644 --- a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py +++ b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py @@ -24,13 +24,13 @@ from legal_api.models import Business from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses from .common_validations import ( # noqa: I001 validate_court_order, validate_name_request, validate_parties_names, - validate_file_on_drs, + validate_pdf, validate_share_structure, ) @@ -295,13 +295,21 @@ def validate_cooperative_documents(incorporation_json: dict): rules_file_key = cooperative['rulesFileKey'] rules_file_key_path = '/filing/incorporationApplication/cooperative/rulesFileKey' - rules_err = validate_file_on_drs(DocumentClassEnum.COOP.value, rules_file_key, rules_file_key_path) + rules_err = validate_pdf( + file_key=rules_file_key, + file_key_path=rules_file_key_path, + document_class=DocumentClasses.COOP.value + ) if rules_err: msg.extend(rules_err) memorandum_file_key = cooperative['memorandumFileKey'] memorandum_file_key_path = '/filing/incorporationApplication/cooperative/memorandumFileKey' - memorandum_err = validate_file_on_drs(DocumentClassEnum.COOP.value, memorandum_file_key, memorandum_file_key_path) + memorandum_err = validate_pdf( + file_key=memorandum_file_key, + file_key_path=memorandum_file_key_path, + document_class=DocumentClasses.COOP.value + ) if memorandum_err: msg.extend(memorandum_err) diff --git a/legal-api/tests/unit/invalid_size.pdf b/legal-api/tests/unit/invalid_size.pdf new file mode 100644 index 0000000000000000000000000000000000000000..053df816df2247a87bbf476addfc205e4815ef3c GIT binary patch literal 2353 zcmds3&2rl|5We#%Hfc0*rnUi+B1I;SQd_bT#g?N;a^2XmJ0J;3a3z2PLb2ULU!cA9 z-e>KCmTX3GC(~OS&Pc#67K{D)c7a;I-Pv-teW&*4AHV+v1D-2w4p{=Px#XG|o&icg zrqacnO9NA5^5=wHU0rSKQg+6Y?NrSj7bvLY(%IQ@NXG>a$$`^sI$UO!ZRyqW9xHf9 zp)&G@&pvS@QU+vSrYaM10^~%Jw{j3xJh18MsSJ^JT<{|$abj7;3M{uMY%#-TYEsE#qoQMX zVWvvM29rwO%+;cJ2Bf7qGeXI>MaM?_xktT#`d;9Mu222%Y4nuRrz<(T`Mx?W^bSR$ znZAK7_@Wwd(*eJ^QaUSO0|~VLYlk4Nvqj4P-6X+t)jwUM&{0ImcO6U{g<6hdi?Q? zKl+#-eHdN5KltU9Nt@mN=8Kc>j@=8-7t{WJ@8?&ollnn-C}P^X>~24PIej}%FB?gN zgkI8VJkG}h@!?59b(Fq1=RcgbPh!bW&kmB!Y2TmkH4Gn;A1{8|CHcEYkMiD=Q}J69 zd*;%VW%n$L|6BF#vzz1F%G#wn2M%7)%7E?oI3J2%xlLHv4wyD&3f(9Qomy@GUB~$wlS`ZS literal 0 HcmV?d00001 diff --git a/legal-api/tests/unit/services/filings/test_utils.py b/legal-api/tests/unit/services/filings/test_utils.py index 88fbb18790..f9e5ec991a 100644 --- a/legal-api/tests/unit/services/filings/test_utils.py +++ b/legal-api/tests/unit/services/filings/test_utils.py @@ -21,7 +21,7 @@ from reportlab.lib.pagesizes import letter from reportlab.pdfgen import canvas -from legal_api.services import MinioService +from legal_api.services import MinioService, DocumentRecordService, flags from legal_api.services.utils import get_date, get_str @@ -49,14 +49,28 @@ def test_get_str(f, p): assert isinstance(d, str) -def _upload_file(page_size, invalid): - signed_url = MinioService.create_signed_put_url('cooperative-test.pdf') - key = signed_url.get('key') - pre_signed_put = signed_url.get('preSignedUrl') +def _upload_file(page_size, invalid, document_class=None, document_type=None): + print("TYUIUYTYUYTYYUYYTYTYYTYTYYUYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYYYTYTY") + if flags.is_on('enable-document-records'): + file_path = "tests/unit/invalid_size.pdf" if invalid else "tests/unit/valid_size.pdf" + raw_data = None + with open(file_path, "rb") as data_file: + raw_data = data_file.read() + data_file.close() + response = DocumentRecordService.upload_document( + document_class, + document_type, + raw_data + ) + return response['documentServiceId'] + else: + signed_url = MinioService.create_signed_put_url('cooperative-test.pdf') + key = signed_url.get('key') + pre_signed_put = signed_url.get('preSignedUrl') - requests.put(pre_signed_put, data=_create_pdf_file(page_size, invalid).read(), - headers={'Content-Type': 'application/octet-stream'}) - return key + requests.put(pre_signed_put, data=_create_pdf_file(page_size, invalid).read(), + headers={'Content-Type': 'application/octet-stream'}) + return key def _create_pdf_file(page_size, invalid): diff --git a/legal-api/tests/unit/valid_size.pdf b/legal-api/tests/unit/valid_size.pdf new file mode 100644 index 0000000000000000000000000000000000000000..b9971a762ed6d20554e550fe0382da3c23bb6c2a GIT binary patch literal 2353 zcmds3&2rl|5We#%Hfc0*rnUi+B1I;SQd_bT#+IWKCmTX3GC(~oX1A$#E7W?z<0<~Vdv*m94PVMi%{`?2}JXhKrvJt%Il51vo1}FiU zN*8l34NQ&6pA&L*b+xTa*%=#cr)uuFKtYWzot+(rbX@R|>^se-!)0dKmR>FIv4VFL zDkE?B>=QR4WkB|2sxl!bKu(0*m4&!x9I-hEtCS2DBXg4@F~mR8cEAu^9dLsR zhz$%G@@sS?nX?JsTaK;en7}!j1*ESG1`@ZEp=PpFY55MU9O&WZiUV~NUB&9o0oU65 zt#}YT=)p3A-$V|=iU&45U6moyjthRIBu*@=Sb^mhg)L^-Oie0zY*cjYF3ePE*kDr0 zo4HyP&w#WvXGSR5w&>VsKli8?P~Qt&gr!S=q|sAKpRVNW=KJcj&^r_(&GZdy!57tt zn-2KRmC{)O8%Ut_UpoYGoh?%S|4y+G#+tn3sez4!EOMTTvEUgVITU7wUM-}tlaixZ zo?|~D9X2bfc(*lok9XVwKH4wP_A{JW)Z2>agGE)}*6MA*%N$g~ZA*NmrXywseZeB# z9kh}p3fdgoi|rLN4EB;hO@~}g%oJQNwu%aa8JRn;S^j=X_g{84>G8)m{_tad^kI1M z{@}M)CT%9Y%@-#>9J?2uFQ&cy?ys*}C-sA5AY$6ROtv4toW7l>myJ<_gx;vrc$|;> z;=_}I>L`72&VM>>pTv@%o*j%fr#*kZ*D!oQe!lo+m*nprJ<7XJPQ@Qh?3qhbmff=~ z{_oYd&u)(ID{t4Dj~$Bea_HR$VG#HMjNy*gK#nY2>CUh=VGMRc=ZS5f*1>G zP0n6&>%1t$E9G-J*A1}ot?@h$8`&DKUjI@q3@bdNnV4~1J_nLEIA=TYII2@WidmYu zQ9b0YPscR&>T%|?l#Zk3*Co!KTIGc@dH990wFQ~L?u;GjzaA^k9ET)wtiX2YzJnLE fGGIGC&IjUmZWC6v1Ex)xLN|&+r&im4*Kz&@hkcue literal 0 HcmV?d00001 From b8f111e83799353265e36f533b199dccc839f7da Mon Sep 17 00:00:00 2001 From: flutistar Date: Wed, 16 Apr 2025 07:35:11 -0700 Subject: [PATCH 133/133] fixed error on unit test --- .../src/entity_filer/filing_processors/dissolution.py | 4 ++-- .../filing_components/rules_and_memorandum.py | 6 +++--- .../entity_filer/filing_processors/incorporation_filing.py | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py index aa3500675a..82fa390739 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py @@ -27,7 +27,7 @@ from legal_api.services.pdf_service import RegistrarStampData from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import create_office, filings @@ -123,7 +123,7 @@ def _update_cooperative(dissolution_filing: Dict, business: Business, filing: Fi affidavit_file_key = dissolution_filing.get('affidavitFileKey') if flags.is_on('enable-document-records'): affidavit_file = DocumentRecordService.download_document( - DocumentClassEnum.COOP.value, + DocumentClasses.COOP.value, affidavit_file_key ) else: diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py index 763d674fc8..6a0a3eeebb 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py @@ -22,7 +22,7 @@ from legal_api.services import Flags from legal_api.services.minio import MinioService from legal_api.services.document_record import DocumentRecordService -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses from legal_api.services.pdf_service import RegistrarStampData from entity_filer.utils import replace_file_with_certified_copy @@ -47,7 +47,7 @@ def update_rules( if not flags.is_on('enable-document-records'): rules_file = DocumentRecordService.download_document( - DocumentClassEnum.COOP.value, + DocumentClasses.COOP.value, rules_file_key ) else: @@ -89,7 +89,7 @@ def update_memorandum( # create certified copy for memorandum document if flags.is_on('enable-document-records'): memorandum_file = DocumentRecordService.download_document( - DocumentClassEnum.COOP.value, + DocumentClasses.COOP.value, memorandum_file_key ) else: diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py index cd8f1143e7..ae17fade50 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py @@ -22,7 +22,7 @@ from legal_api.services import Flags from legal_api.services.pdf_service import RegistrarStampData from legal_api.services.document_record import DocumentRecordService -from legal_api.constants import DocumentClassEnum +from legal_api.constants import DocumentClasses from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import aliases, business_info, filings, shares @@ -39,7 +39,7 @@ def _update_cooperative(incorp_filing: Dict, business: Business, filing: Filing) rules_file_key = cooperative_obj.get('rulesFileKey') if flags.is_on('enable-document-records'): rules_file = DocumentRecordService.download_document( - DocumentClassEnum.COOP.value, + DocumentClasses.COOP.value, rules_file_key ) else: @@ -64,7 +64,7 @@ def _update_cooperative(incorp_filing: Dict, business: Business, filing: Filing) memorandum_file_key = cooperative_obj.get('memorandumFileKey') if flags.is_on('enable-document-records'): memorandum_file = DocumentRecordService.download_document( - DocumentClassEnum.COOP.value, + DocumentClasses.COOP.value, memorandum_file_key ) else: