diff --git a/.devops/dev-CD.yml b/.devops/dev-CD.yml deleted file mode 100644 index 907e8d0e..00000000 --- a/.devops/dev-CD.yml +++ /dev/null @@ -1,61 +0,0 @@ -# Node.js -# Build a general Node.js project with npm. -# Add steps that analyze code, save build artifacts, deploy, and more: -# https://docs.microsoft.com/azure/devops/pipelines/languages/javascript - -trigger: -- dev - -pool: - vmImage: ubuntu-latest - -steps: -- task: NodeTool@0 - inputs: - versionSpec: '18.x' - displayName: 'Install Node.js' - -- script: | - npm install - npm run build - displayName: 'npm install and build' - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/build' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: true - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/node_modules' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/config' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/package.json' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/package-lock.json' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false diff --git a/.devops/sit-CD.yml b/.devops/sit-CD.yml deleted file mode 100644 index 3f094952..00000000 --- a/.devops/sit-CD.yml +++ /dev/null @@ -1,61 +0,0 @@ -# Node.js -# Build a general Node.js project with npm. -# Add steps that analyze code, save build artifacts, deploy, and more: -# https://docs.microsoft.com/azure/devops/pipelines/languages/javascript - -trigger: -- stage - -pool: - vmImage: ubuntu-latest - -steps: -- task: NodeTool@0 - inputs: - versionSpec: '18.x' - displayName: 'Install Node.js' - -- script: | - npm install - npm run build - displayName: 'npm install and build' - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/build' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: true - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/node_modules' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/config' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/package.json' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false - -- task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/package-lock.json' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(Build.ArtifactStagingDirectory)/sit/api/$(Build.BuildId).zip' - replaceExistingArchive: false diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 53d2b5c8..84eb1153 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -5,14 +5,41 @@ on: types: [CD] jobs: + create_deployment: + name: Create Deployment + runs-on: ubuntu-latest + environment: ${{ github.event.client_payload.environment }} + + permissions: + deployments: write + + outputs: + deployment_id: ${{ steps.deployment.outputs.deployment_id }} + environment_url: ${{ steps.deployment.outputs.environment_url }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - uses: chrnorm/deployment-action@v2 + name: Create GitHub deployment + id: deployment + with: + initial-status: 'in_progress' + token: '${{ github.token }}' + environment-url: ${{ vars.DEPLOYMENT_URL }} + environment: ${{ github.event.client_payload.environment }} + auto-inactive: false + deploy: name: Deploy 🚀 runs-on: ubuntu-latest environment: ${{ github.event.client_payload.environment }} + needs: create_deployment steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.client_payload.ref }} - name: Setup SSH Keys and known_hosts @@ -25,3 +52,33 @@ jobs: SSH_PASS: ${{ secrets.SSH_PASS }} CONNECTION: ${{ secrets.CONNECTION }} REMOTE_PATH: ${{ secrets.REMOTE_PATH }} + + update_deployment_status: + name: Update Deployment Status ✅ + runs-on: ubuntu-latest + needs: + - create_deployment + - deploy + if: always() + + permissions: + deployments: write + + steps: + - name: Update deployment status (success) + if: ${{ needs.deploy.result == 'success' }} + uses: chrnorm/deployment-status@v2 + with: + token: '${{ github.token }}' + environment-url: ${{ needs.create_deployment.outputs.environment_url }} + deployment-id: ${{ needs.create_deployment.outputs.deployment_id }} + state: 'success' + + - name: Update deployment status (failure) + if: ${{ needs.deploy.result != 'success' }} + uses: chrnorm/deployment-status@v2 + with: + token: '${{ github.token }}' + environment-url: ${{ needs.create_deployment.outputs.environment_url }} + deployment-id: ${{ needs.create_deployment.outputs.deployment_id }} + state: 'failure' \ No newline at end of file diff --git a/.jira/config.yml b/.jira/config.yml new file mode 100644 index 00000000..655899cc --- /dev/null +++ b/.jira/config.yml @@ -0,0 +1,10 @@ +deployments: + environmentMapping: + development: + - "DEV-*" + testing: + - "TST-*" + staging: + - "STG-*" + production: + - "PROD-*" \ No newline at end of file diff --git a/CI/deploy.sh b/CI/deploy.sh index b7810861..73eba5c7 100755 --- a/CI/deploy.sh +++ b/CI/deploy.sh @@ -3,11 +3,11 @@ set -e echo -e "Stopping docker containers..." -CMD="cd $REMOTE_PATH && echo $SSH_PASS | sudo -S docker-compose pull api" +CMD="cd $REMOTE_PATH && echo $SSH_PASS | sudo -S docker compose pull api" ssh -oStrictHostKeyChecking=no -o PubkeyAuthentication=yes $CONNECTION "$CMD" echo -e "Stopping docker containers..." -CMD="cd $REMOTE_PATH && echo $SSH_PASS | sudo -S docker-compose up -d --no-deps api" +CMD="cd $REMOTE_PATH && echo $SSH_PASS | sudo -S docker compose up -d --no-deps api" ssh -oStrictHostKeyChecking=no -o PubkeyAuthentication=yes $CONNECTION "$CMD" echo -e "Deployed!" diff --git a/migrations/1725323084593-convert-resource-field-roles-to-objectid.ts b/migrations/1725323084593-convert-resource-field-roles-to-objectid.ts new file mode 100644 index 00000000..e84af0df --- /dev/null +++ b/migrations/1725323084593-convert-resource-field-roles-to-objectid.ts @@ -0,0 +1,105 @@ +import { Resource } from '@models'; +import { startDatabaseForMigration } from '../src/utils/migrations/database.helper'; +import mongoose from 'mongoose'; +import { logger } from '@services/logger.service'; + +/** + * Convert a value ( string or mongo object id ) to mongo object id. + * + * @param value value to transform + * @returns value as mongo object id + */ +const convertToObjectId = (value) => { + // Only convert if value is a valid ObjectId string + if (mongoose.Types.ObjectId.isValid(value) && typeof value === 'string') { + return new mongoose.Types.ObjectId(value); + } + return value; // Return original if it's not a valid string ObjectId +}; + +/** + * Sample function of up migration + * + * @returns just migrate data. + */ +export const up = async () => { + console.log('oua oua'); + await startDatabaseForMigration(); + try { + // Find resources where fields.permissions.canSee or fields.permissions.canUpdate contain strings + const resources = await Resource.find({ + $or: [ + { + 'fields.permissions.canSee': { $elemMatch: { $type: 'string' } }, + }, + { + 'fields.permissions.canUpdate': { $elemMatch: { $type: 'string' } }, + }, + ], + }); + + // Loop through each resource and update the fields + for (const resource of resources) { + let updated = false; + + // Iterate through the fields array + // eslint-disable-next-line @typescript-eslint/no-loop-func + resource.fields = resource.fields.map((field) => { + if (field.name === 'campaign') { + // eslint-disable-next-line @typescript-eslint/no-loop-func + console.log(field); + } + if (field.permissions) { + // Check and convert canSee strings to ObjectIds + if (field.permissions.canSee) { + field.permissions.canSee = field.permissions.canSee.map((perm) => { + const updatedPerm = convertToObjectId(perm); + if (updatedPerm !== perm) updated = true; + return updatedPerm; + }); + } + + // Check and convert canUpdate strings to ObjectIds + if (field.permissions.canUpdate) { + field.permissions.canUpdate = field.permissions.canUpdate.map( + (perm) => { + const updatedPerm = convertToObjectId(perm); + if (updatedPerm !== perm) updated = true; + return updatedPerm; + } + ); + } + } + if (field.name === 'campaign') { + // eslint-disable-next-line @typescript-eslint/no-loop-func + console.log(field); + } + return field; + }); + + // Save the resource if any updates were made + if (updated) { + resource.markModified('fields'); + await resource.save(); + logger.info( + `Updated resource ${resource.name} with ID: ${resource._id}` + ); + } + } + + logger.info('Completed updating resources.'); + } catch (error) { + logger.error('Error updating resources:', error); + } +}; + +/** + * Sample function of down migration + * + * @returns just migrate data. + */ +export const down = async () => { + /* + Code you downgrade script here! + */ +}; diff --git a/package-lock.json b/package-lock.json index aeda100f..67a98836 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "oort-backend", - "version": "2.6.2", + "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "oort-backend", - "version": "2.6.2", + "version": "1.0.0", "license": "ISC", "dependencies": { "@apollo/datasource-rest": "^6.1.0", diff --git a/package.json b/package.json index f62bdee1..98d231df 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "oort-backend", - "version": "2.6.2", + "version": "1.0.0", "description": "", "main": "index.js", "scripts": { diff --git a/src/models/application.model.ts b/src/models/application.model.ts index a7e32e85..e1b87cd0 100644 --- a/src/models/application.model.ts +++ b/src/models/application.model.ts @@ -25,6 +25,8 @@ export interface Application extends Document { modifiedAt: Date; description?: string; sideMenu?: boolean; + variant?: string; + logo?: string; hideMenu?: boolean; status?: any; createdBy?: mongoose.Types.ObjectId; @@ -75,6 +77,8 @@ const applicationSchema = new Schema( description: String, sideMenu: Boolean, hideMenu: Boolean, + variant: String, + logo: String, permissions: { canSee: [ { diff --git a/src/routes/download/index.ts b/src/routes/download/index.ts index 390fa024..a02de117 100644 --- a/src/routes/download/index.ts +++ b/src/routes/download/index.ts @@ -450,8 +450,8 @@ router.post('/records', async (req, res) => { await sendEmail({ message: { to: req.context.user.username, - subject: `${params.application} - Your data export is completed - ${params.fileName}`, // TODO : put in config for 1.3 - html: 'Dear colleague,\n\nPlease find attached to this e-mail the requested data export.\n\nFor any issues with the data export, please contact ems2@who.int\n\n Best regards,\nems2@who.int', // TODO : put in config for 1.3 + subject: `${params.application} - Your data export is completed - ${params.fileName}`, + html: 'Dear colleague,\n\nPlease find attached to this e-mail the requested data export.\n\nFor any issues with the data export, please contact your manager.', attachments, }, }); diff --git a/src/routes/upload/index.ts b/src/routes/upload/index.ts index 8ca46537..5f8a12d5 100644 --- a/src/routes/upload/index.ts +++ b/src/routes/upload/index.ts @@ -13,7 +13,12 @@ import { } from '@models'; import { AppAbility } from '@security/defineUserAbility'; import { Types } from 'mongoose'; -import { getUploadColumns, loadRow, uploadFile } from '@utils/files'; +import { + downloadFile, + getUploadColumns, + loadRow, + uploadFile, +} from '@utils/files'; import { getNextId, getOwnership } from '@utils/form'; import i18next from 'i18next'; import get from 'lodash/get'; @@ -23,10 +28,19 @@ import { insertRecords as insertRecordsPulljob } from '@server/pullJobScheduler' import jwtDecode from 'jwt-decode'; import { cloneDeep, has, isEqual } from 'lodash'; import { Context } from '@server/apollo/context'; +import sanitize from 'sanitize-filename'; +import { deleteFolder } from '@utils/files/deleteFolder'; +import * as fs from 'fs'; /** File size limit, in bytes */ const FILE_SIZE_LIMIT = 7 * 1024 * 1024; +/** File size limit for logo uploads (2MB) */ +const LOGO_SIZE_LIMIT = 2 * 1024 * 1024; + +/** Allowed file types for logos */ +const ALLOWED_FILE_TYPES = ['image/png', 'image/jpeg', 'image/jpg']; + /** Import data from user-uploaded files */ const router = express.Router(); @@ -666,4 +680,93 @@ router.post('/style/:application', async (req, res) => { } }); +/** Posts a logo to azure storage */ +router.post('/logo/:application', async (req: any, res) => { + try { + if (!req.files || Object.keys(req.files).length === 0) { + return res + .status(400) + .send(i18next.t('routes.upload.errors.missingFile')); + } + + const logo = req.files.file; + + // Check if logo is valid (size, type, etc.) + if ( + logo.size > LOGO_SIZE_LIMIT || + !ALLOWED_FILE_TYPES.includes(logo.mimetype) + ) { + return res.status(400).send(i18next.t('common.errors.invalidFile')); + } + + // Find the application by ID + const application = await Application.findById(req.params.application); + + if (!application) { + return res.status(404).send(i18next.t('common.errors.dataNotFound')); + } + + const path = await uploadFile('applications', req.params.application, logo); + await Application.updateOne( + { _id: req.params.application }, + { logo: path } + ); + + // Confirm update + return res.status(200).send({ path: logo.name }); + } catch (err) { + logger.error(err.message, { stack: err.stack }); + return res.status(500).send(i18next.t('common.errors.internalServerError')); + } +}); + +/** Gets a logo from azure storage */ +router.get('/logo/:application', async (req, res) => { + try { + const application = await Application.findById(req.params.application); + + if (!application || !application.logo) { + return res.status(404).send(i18next.t('common.errors.dataNotFound')); + } + + if (application.logo) { + const blobName = application.logo; + const path = `files/${sanitize(blobName)}`; + await downloadFile('applications', blobName, path); + res.download(path, () => { + fs.unlink(path, () => { + logger.info('file deleted'); + }); + }); + } + } catch (err) { + logger.error(err.message, { stack: err.stack }); + res.status(500).send(i18next.t('common.errors.internalServerError')); + } +}); + +/** Deletes a logo from azure storage */ +router.delete('/logo/:application', async (req, res) => { + try { + const application = await Application.findById(req.params.application); + + if (!application || !application.logo) { + return res.status(404).send(i18next.t('common.errors.dataNotFound')); + } + + const blobName = application.logo; + await deleteFolder('applications', blobName); + + await Application.updateOne( + { _id: req.params.application }, + { logo: null } + ); + + return res.status(200).send({ message: 'Logo deleted' }); + } catch (err) { + logger.error(err.message, { stack: err.stack }); + return res.status(500).send(i18next.t('common.errors.internalServerError')); + } +}); + export default router; diff --git a/src/schema/mutation/duplicateApplication.mutation.ts b/src/schema/mutation/duplicateApplication.mutation.ts index ec0ff4fd..61bacc12 100644 --- a/src/schema/mutation/duplicateApplication.mutation.ts +++ b/src/schema/mutation/duplicateApplication.mutation.ts @@ -87,6 +87,7 @@ export default { name: args.name, description: baseApplication.description, sideMenu: baseApplication.sideMenu, + variant: baseApplication.variant, hideMenu: baseApplication.hideMenu, status: status.pending, createdBy: user._id, diff --git a/src/schema/mutation/editApplication.mutation.ts b/src/schema/mutation/editApplication.mutation.ts index 3961a869..21b0a6a6 100644 --- a/src/schema/mutation/editApplication.mutation.ts +++ b/src/schema/mutation/editApplication.mutation.ts @@ -24,6 +24,8 @@ type EditApplicationArgs = { id: string | Types.ObjectId; description?: string; sideMenu?: boolean; + variant?: string; + logo?: string; hideMenu?: boolean; name?: string; status?: StatusType; @@ -42,6 +44,8 @@ export default { id: { type: new GraphQLNonNull(GraphQLID) }, description: { type: GraphQLString }, sideMenu: { type: GraphQLBoolean }, + variant: { type: GraphQLString }, + logo: { type: GraphQLString }, hideMenu: { type: GraphQLBoolean }, name: { type: GraphQLString }, status: { type: StatusEnumType }, @@ -90,6 +94,8 @@ export default { args.pages && { pages: args.pages }, args.settings && { settings: args.settings }, args.permissions && { permissions: args.permissions }, + args.variant && { variant: args.variant }, + args.logo && { logo: args.logo }, !isNil(args.sideMenu) && { sideMenu: args.sideMenu }, !isNil(args.hideMenu) && { hideMenu: args.hideMenu } ); diff --git a/src/schema/mutation/editPage.mutation.ts b/src/schema/mutation/editPage.mutation.ts index 5cf7b9fb..87b7b501 100644 --- a/src/schema/mutation/editPage.mutation.ts +++ b/src/schema/mutation/editPage.mutation.ts @@ -85,7 +85,7 @@ export default { // Create update const update = { ...(args.name && { name: args.name }), - ...(args.icon && { icon: args.icon }), + ...(!isNil(args.icon) && { icon: args.icon }), }; // Updating permissions diff --git a/src/schema/mutation/editStep.mutation.ts b/src/schema/mutation/editStep.mutation.ts index 6a05a070..f412474b 100644 --- a/src/schema/mutation/editStep.mutation.ts +++ b/src/schema/mutation/editStep.mutation.ts @@ -101,7 +101,7 @@ export default { // Create update const update = { ...(args.name && { name: args.name }), - ...(args.icon && { icon: args.icon }), + ...(!isNil(args.icon) && { icon: args.icon }), ...(args.type && { type: args.type }), ...(args.content && { content: args.content }), ...(!isNil(args.nextStepOnSave) && { diff --git a/src/schema/types/application.type.ts b/src/schema/types/application.type.ts index b3ea8723..e12b20a2 100644 --- a/src/schema/types/application.type.ts +++ b/src/schema/types/application.type.ts @@ -55,6 +55,8 @@ export const ApplicationType = new GraphQLObjectType({ createdAt: { type: GraphQLString }, modifiedAt: { type: GraphQLString }, description: { type: GraphQLString }, + variant: { type: GraphQLString }, + logo: { type: GraphQLString }, sideMenu: { type: GraphQLBoolean, resolve(parent) { diff --git a/src/server/pullJobScheduler.ts b/src/server/pullJobScheduler.ts index 992909a0..615a5106 100644 --- a/src/server/pullJobScheduler.ts +++ b/src/server/pullJobScheduler.ts @@ -148,66 +148,25 @@ const fetchRecordsServiceToService = ( token: string ): void => { const apiConfiguration: ApiConfiguration = pullJob.apiConfiguration; - // Hard coded for EIOS due to specific behavior - const EIOS_ORIGIN = 'https://portal.who.int/eios/'; - // === HARD CODED ENDPOINTS === const headers: any = { Authorization: 'Bearer ' + token, }; - // Hardcoded specific behavior for EIOS - if (apiConfiguration.endpoint.startsWith(EIOS_ORIGIN)) { - // === HARD CODED ENDPOINTS === - const boardsUrl = 'GetBoards?tags=signal+app'; - const articlesUrl = 'GetPinnedArticles'; - axios({ - url: apiConfiguration.endpoint + boardsUrl, - method: 'get', - headers, - }) - .then(({ data }) => { - if (data && data.result) { - const boardIds = data.result.map((x) => x.id); - axios({ - url: `${apiConfiguration.endpoint}${articlesUrl}?boardIds=${boardIds}`, - method: 'get', - headers, - }) - .then(({ data: data2 }) => { - if (data2 && data2.result) { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - insertRecords(data2.result, pullJob, true, false); - } - }) - .catch((err) => { - logger.error( - `Job ${pullJob.name} : Failed to get pinned articles : ${err}` - ); - }); - } - }) - .catch((err) => { - logger.error( - `Job ${pullJob.name} : Failed to get signal app boards : ${err}` - ); - }); - } else { - // Generic case - axios({ - url: apiConfiguration.endpoint + pullJob.url, - method: 'get', - headers, + // Generic case + axios({ + url: apiConfiguration.endpoint + pullJob.url, + method: 'get', + headers, + }) + .then(({ data }) => { + const records = pullJob.path ? get(data, pullJob.path) : data; + if (records) { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + insertRecords(records, pullJob, false, false); + } }) - .then(({ data }) => { - const records = pullJob.path ? get(data, pullJob.path) : data; - if (records) { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - insertRecords(records, pullJob, false, false); - } - }) - .catch((err) => { - logger.error(`Job ${pullJob.name} : Failed to fetch data : ${err}`); - }); - } + .catch((err) => { + logger.error(`Job ${pullJob.name} : Failed to fetch data : ${err}`); + }); }; /**