From 30919b322c7eb19b3c7c484bb9e7a31de6467749 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Tue, 30 Sep 2025 08:43:13 +0300 Subject: [PATCH 1/5] Add node: prefix to internal imports These were missed when we started enforcing the node prefix several months ago. --- karma.conf.js | 2 +- scripts/base-href.ts | 4 ++-- scripts/env-to-yaml.ts | 4 ++-- scripts/merge-i18n-files.ts | 2 +- scripts/sync-i18n-files.ts | 2 +- server.ts | 4 ++-- webpack/helpers.ts | 4 ++-- webpack/webpack.browser.ts | 2 +- webpack/webpack.common.ts | 2 +- webpack/webpack.mirador.config.ts | 4 ++-- 10 files changed, 15 insertions(+), 15 deletions(-) diff --git a/karma.conf.js b/karma.conf.js index f96558bfaff..217d930541e 100644 --- a/karma.conf.js +++ b/karma.conf.js @@ -21,7 +21,7 @@ module.exports = function (config) { } }, coverageIstanbulReporter: { - dir: require('path').join(__dirname, './coverage/dspace-angular'), + dir: require('node:path').join(__dirname, './coverage/dspace-angular'), reports: ['html', 'lcovonly', 'text-summary'], fixWebpackSourcePaths: true }, diff --git a/scripts/base-href.ts b/scripts/base-href.ts index 7212e1c5168..2e87e94fe33 100644 --- a/scripts/base-href.ts +++ b/scripts/base-href.ts @@ -1,5 +1,5 @@ -import { existsSync, writeFileSync } from 'fs'; -import { join } from 'path'; +import { existsSync, writeFileSync } from 'node:fs'; +import { join } from 'node:path'; import { AppConfig } from '../src/config/app-config.interface'; import { buildAppConfig } from '../src/config/config.server'; diff --git a/scripts/env-to-yaml.ts b/scripts/env-to-yaml.ts index 6e8153f4c11..2388174c2ab 100644 --- a/scripts/env-to-yaml.ts +++ b/scripts/env-to-yaml.ts @@ -1,6 +1,6 @@ -import { existsSync, writeFileSync } from 'fs'; +import { existsSync, writeFileSync } from 'node:fs'; import { dump } from 'js-yaml'; -import { join } from 'path'; +import { join } from 'node:path'; /** * Script to help convert previous version environment.*.ts to yaml. diff --git a/scripts/merge-i18n-files.ts b/scripts/merge-i18n-files.ts index 64442f57884..f54e2beab0a 100644 --- a/scripts/merge-i18n-files.ts +++ b/scripts/merge-i18n-files.ts @@ -1,6 +1,6 @@ import { projectRoot} from '../webpack/helpers'; const commander = require('commander'); -const fs = require('fs'); +const fs = require('node:fs'); const JSON5 = require('json5'); const _cliProgress = require('cli-progress'); const _ = require('lodash'); diff --git a/scripts/sync-i18n-files.ts b/scripts/sync-i18n-files.ts index 170266b6a28..d67180ecf9f 100644 --- a/scripts/sync-i18n-files.ts +++ b/scripts/sync-i18n-files.ts @@ -1,7 +1,7 @@ import { projectRoot } from '../webpack/helpers'; const commander = require('commander'); -const fs = require('fs'); +const fs = require('node:fs'); const JSON5 = require('json5'); const _cliProgress = require('cli-progress'); const _ = require('lodash'); diff --git a/server.ts b/server.ts index 2f66c78d86c..6059b6e53ef 100644 --- a/server.ts +++ b/server.ts @@ -32,8 +32,8 @@ import { createServer } from 'https'; import { json } from 'body-parser'; import { createHttpTerminator } from 'http-terminator'; -import { readFileSync } from 'fs'; -import { join } from 'path'; +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; import { enableProdMode } from '@angular/core'; diff --git a/webpack/helpers.ts b/webpack/helpers.ts index f0b42a8a690..b90576afea3 100644 --- a/webpack/helpers.ts +++ b/webpack/helpers.ts @@ -1,5 +1,5 @@ -import { readFileSync, readdirSync, statSync, Stats } from 'fs'; -import { join, resolve } from 'path'; +import { readFileSync, readdirSync, statSync, Stats } from 'node:fs'; +import { join, resolve } from 'node:path'; const md5 = require('md5'); diff --git a/webpack/webpack.browser.ts b/webpack/webpack.browser.ts index 168185ef242..3528775831c 100644 --- a/webpack/webpack.browser.ts +++ b/webpack/webpack.browser.ts @@ -1,4 +1,4 @@ -import { join } from 'path'; +import { join } from 'node:path'; import { buildAppConfig } from '../src/config/config.server'; import { commonExports } from './webpack.common'; diff --git a/webpack/webpack.common.ts b/webpack/webpack.common.ts index b1c42df8ad2..42993511fe5 100644 --- a/webpack/webpack.common.ts +++ b/webpack/webpack.common.ts @@ -2,7 +2,7 @@ import { globalCSSImports, projectRoot, getFileHashes, calculateFileHash } from import { EnvironmentPlugin } from 'webpack'; const CopyWebpackPlugin = require('copy-webpack-plugin'); -const path = require('path'); +const path = require('node:path'); const sass = require('sass'); const JSON5 = require('json5'); diff --git a/webpack/webpack.mirador.config.ts b/webpack/webpack.mirador.config.ts index 7699cf1bdc2..13d24c90e38 100644 --- a/webpack/webpack.mirador.config.ts +++ b/webpack/webpack.mirador.config.ts @@ -1,7 +1,7 @@ const CopyWebpackPlugin = require('copy-webpack-plugin'); -const path = require('path'); +const path = require('node:path'); // @ts-ignore -const fs = require('fs'); +const fs = require('node:fs'); module.exports = { mode: 'production', From 1686812da88ff6627bd912226c94b382af3132a8 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 1 Oct 2025 07:42:04 +0300 Subject: [PATCH 2/5] angular.json: update lint paths We need to lint the scripts and webpack helpers as well. --- angular.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/angular.json b/angular.json index 4b95be55122..80ee44af421 100644 --- a/angular.json +++ b/angular.json @@ -271,7 +271,9 @@ "cypress/**/*.ts", "lint/**/*.ts", "src/**/*.html", - "src/**/*.json5" + "src/**/*.json5", + "scripts/*.ts", + "webpack/*.ts" ] } } From 33f0f56b7dcd023c2f79f969561942f9877cba1e Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 1 Oct 2025 08:01:27 +0300 Subject: [PATCH 3/5] Run npm lint-fix Automatically fix most lint errors. --- scripts/base-href.ts | 9 ++- scripts/env-to-yaml.ts | 10 +++- scripts/merge-i18n-files.ts | 3 +- scripts/serve.ts | 4 +- scripts/sync-i18n-files.ts | 64 ++++++++++----------- scripts/test-rest.ts | 94 +++++++++++++++---------------- webpack/helpers.ts | 20 +++++-- webpack/webpack.browser.ts | 8 +-- webpack/webpack.common.ts | 42 ++++++++------ webpack/webpack.mirador.config.ts | 16 +++--- 10 files changed, 146 insertions(+), 124 deletions(-) diff --git a/scripts/base-href.ts b/scripts/base-href.ts index 2e87e94fe33..08d47868b26 100644 --- a/scripts/base-href.ts +++ b/scripts/base-href.ts @@ -1,4 +1,7 @@ -import { existsSync, writeFileSync } from 'node:fs'; +import { + existsSync, + writeFileSync, +} from 'node:fs'; import { join } from 'node:path'; import { AppConfig } from '../src/config/app-config.interface'; @@ -6,9 +9,9 @@ import { buildAppConfig } from '../src/config/config.server'; /** * Script to set baseHref as `ui.nameSpace` for development mode. Adds `baseHref` to angular.json build options. - * + * * Usage (see package.json): - * + * * yarn base-href */ diff --git a/scripts/env-to-yaml.ts b/scripts/env-to-yaml.ts index 2388174c2ab..9529effc062 100644 --- a/scripts/env-to-yaml.ts +++ b/scripts/env-to-yaml.ts @@ -1,12 +1,16 @@ -import { existsSync, writeFileSync } from 'node:fs'; -import { dump } from 'js-yaml'; +import { + existsSync, + writeFileSync, +} from 'node:fs'; import { join } from 'node:path'; +import { dump } from 'js-yaml'; + /** * Script to help convert previous version environment.*.ts to yaml. * * Usage (see package.json): - * + * * yarn env:yaml [relative path to environment.ts file] (optional relative path to write yaml file) * */ diff --git a/scripts/merge-i18n-files.ts b/scripts/merge-i18n-files.ts index f54e2beab0a..01d473a4238 100644 --- a/scripts/merge-i18n-files.ts +++ b/scripts/merge-i18n-files.ts @@ -1,4 +1,5 @@ -import { projectRoot} from '../webpack/helpers'; +import { projectRoot } from '../webpack/helpers'; + const commander = require('commander'); const fs = require('node:fs'); const JSON5 = require('json5'); diff --git a/scripts/serve.ts b/scripts/serve.ts index ee8570a45c1..5c887a8a7e4 100644 --- a/scripts/serve.ts +++ b/scripts/serve.ts @@ -1,4 +1,4 @@ -import { spawn } from 'child_process'; +import { spawn } from 'node:child_process'; import { AppConfig } from '../src/config/app-config.interface'; import { buildAppConfig } from '../src/config/config.server'; @@ -11,5 +11,5 @@ const appConfig: AppConfig = buildAppConfig(); */ spawn( `ng serve --host ${appConfig.ui.host} --port ${appConfig.ui.port} --serve-path ${appConfig.ui.nameSpace} --ssl ${appConfig.ui.ssl} ${process.argv.slice(2).join(' ')} --configuration development`, - { stdio: 'inherit', shell: true } + { stdio: 'inherit', shell: true }, ); diff --git a/scripts/sync-i18n-files.ts b/scripts/sync-i18n-files.ts index d67180ecf9f..1626b059989 100644 --- a/scripts/sync-i18n-files.ts +++ b/scripts/sync-i18n-files.ts @@ -41,13 +41,13 @@ function parseCliInput() { if (!program.targetFile) { fs.readdirSync(projectRoot(LANGUAGE_FILES_LOCATION)).forEach(file => { if (!program.sourceFile.toString().endsWith(file)) { - const targetFileLocation = projectRoot(LANGUAGE_FILES_LOCATION + "/" + file); + const targetFileLocation = projectRoot(LANGUAGE_FILES_LOCATION + '/' + file); console.log('Syncing file at: ' + targetFileLocation + ' with source file at: ' + program.sourceFile); if (program.outputDir) { if (!fs.existsSync(program.outputDir)) { fs.mkdirSync(program.outputDir); } - const outputFileLocation = program.outputDir + "/" + file; + const outputFileLocation = program.outputDir + '/' + file; console.log('Output location: ' + outputFileLocation); syncFileWithSource(targetFileLocation, outputFileLocation); } else { @@ -97,12 +97,12 @@ function syncFileWithSource(pathToTargetFile, pathToOutputFile) { const sourceLines = []; const targetLines = []; const existingTargetFile = readFileIfExists(pathToTargetFile); - existingTargetFile.toString().split("\n").forEach((function (line) { + existingTargetFile.toString().split('\n').forEach((function (line) { targetLines.push(line.trim()); })); progressBar.update(10); const sourceFile = readFileIfExists(program.sourceFile); - sourceFile.toString().split("\n").forEach((function (line) { + sourceFile.toString().split('\n').forEach((function (line) { sourceLines.push(line.trim()); })); progressBar.update(20); @@ -113,22 +113,22 @@ function syncFileWithSource(pathToTargetFile, pathToOutputFile) { const file = fs.createWriteStream(pathToOutputFile); file.on('error', function (err) { - console.error('Something went wrong writing to output file at: ' + pathToOutputFile + err) + console.error('Something went wrong writing to output file at: ' + pathToOutputFile + err); }); file.on('open', function() { - file.write("{\n"); + file.write('{\n'); outputChunks.forEach(function (chunk) { progressBar.increment(); - chunk.split("\n").forEach(function (line) { - file.write((line === '' ? '' : ` ${line}`) + "\n"); + chunk.split('\n').forEach(function (line) { + file.write((line === '' ? '' : ` ${line}`) + '\n'); }); }); - file.write("\n}"); + file.write('\n}'); file.end(); }); file.on('finish', function() { const osName = process.platform; - if (osName.startsWith("win")) { + if (osName.startsWith('win')) { replaceLineEndingsToCRLF(pathToOutputFile); } }); @@ -151,10 +151,10 @@ function compareChunksAndCreateOutput(sourceChunks, targetChunks, progressBar) { sourceChunks.map((sourceChunk) => { progressBar.increment(); if (sourceChunk.trim().length !== 0) { - let newChunk = []; - const sourceList = sourceChunk.split("\n"); + const newChunk = []; + const sourceList = sourceChunk.split('\n'); const keyValueSource = sourceList[sourceList.length - 1]; - const keySource = getSubStringBeforeLastString(keyValueSource, ":"); + const keySource = getSubStringBeforeLastString(keyValueSource, ':'); const commentSource = getSubStringBeforeLastString(sourceChunk, keyValueSource); const correspondingTargetChunk = targetChunks.find((targetChunk) => { @@ -163,7 +163,7 @@ function compareChunksAndCreateOutput(sourceChunks, targetChunks, progressBar) { // Create new chunk with: the source comments, the commented source key-value, the todos and either the old target key-value pair or if it's a new pair, the source key-value pair newChunk.push(removeWhiteLines(commentSource)); - newChunk.push("// " + keyValueSource); + newChunk.push('// ' + keyValueSource); if (correspondingTargetChunk === undefined) { newChunk.push(NEW_MESSAGE_TODO); newChunk.push(keyValueSource); @@ -171,7 +171,7 @@ function compareChunksAndCreateOutput(sourceChunks, targetChunks, progressBar) { createNewChunkComparingSourceAndTarget(correspondingTargetChunk, sourceChunk, commentSource, keyValueSource, newChunk); } - outputChunks.push(newChunk.filter(Boolean).join("\n")); + outputChunks.push(newChunk.filter(Boolean).join('\n')); } else { outputChunks.push(sourceChunk); } @@ -191,22 +191,22 @@ function createNewChunkComparingSourceAndTarget(correspondingTargetChunk, source let commentsOfSourceHaveChanged = false; let messageOfSourceHasChanged = false; - const targetList = correspondingTargetChunk.split("\n"); - const oldKeyValueInTargetComments = getSubStringWithRegex(correspondingTargetChunk, "\\s*\\/\\/\\s*\".*"); + const targetList = correspondingTargetChunk.split('\n'); + const oldKeyValueInTargetComments = getSubStringWithRegex(correspondingTargetChunk, '\\s*\\/\\/\\s*".*'); let keyValueTarget = targetList[targetList.length - 1]; - if (!keyValueTarget.endsWith(",")) { - keyValueTarget = keyValueTarget + ","; + if (!keyValueTarget.endsWith(',')) { + keyValueTarget = keyValueTarget + ','; } if (oldKeyValueInTargetComments != null) { - const oldKeyValueUncommented = getSubStringWithRegex(oldKeyValueInTargetComments[0], "\".*")[0]; + const oldKeyValueUncommented = getSubStringWithRegex(oldKeyValueInTargetComments[0], '".*')[0]; if (!(_.isEmpty(correspondingTargetChunk) && _.isEmpty(commentSource)) && !removeWhiteLines(correspondingTargetChunk).includes(removeWhiteLines(commentSource.trim()))) { commentsOfSourceHaveChanged = true; newChunk.push(COMMENTS_CHANGED_TODO); } - const parsedOldKey = JSON5.stringify("{" + oldKeyValueUncommented + "}"); - const parsedSourceKey = JSON5.stringify("{" + keyValueSource + "}"); + const parsedOldKey = JSON5.stringify('{' + oldKeyValueUncommented + '}'); + const parsedSourceKey = JSON5.stringify('{' + keyValueSource + '}'); if (!_.isEqual(parsedOldKey, parsedSourceKey)) { messageOfSourceHasChanged = true; newChunk.push(MESSAGE_CHANGED_TODO); @@ -219,7 +219,7 @@ function createNewChunkComparingSourceAndTarget(correspondingTargetChunk, source // Adds old todos found in target comments if they've not been added already function addOldTodosIfNeeded(targetList, newChunk, commentsOfSourceHaveChanged, messageOfSourceHasChanged) { targetList.map((targetLine) => { - const foundTODO = getSubStringWithRegex(targetLine, "\\s*//\\s*TODO.*"); + const foundTODO = getSubStringWithRegex(targetLine, '\\s*//\\s*TODO.*'); if (foundTODO != null) { const todo = foundTODO[0]; if (!((todo.includes(COMMENTS_CHANGED_TODO) && commentsOfSourceHaveChanged) @@ -262,7 +262,7 @@ function createChunks(lines, progressBar, creatingTarget) { nextChunk.push(line); const newMessageLineIfExists = nextChunk.find((lineInChunk) => lineInChunk.trim().startsWith(NEW_MESSAGE_TODO)); if (newMessageLineIfExists === undefined || !creatingTarget) { - chunks.push(nextChunk.join("\n")); + chunks.push(nextChunk.join('\n')); } nextChunk = []; } @@ -284,19 +284,19 @@ function readFileIfExists(pathToFile) { } function isOneLineCommentLine(line) { - return (line.startsWith("//")); + return (line.startsWith('//')); } function isStartOfMultiLineComment(line) { - return (line.startsWith("/*")); + return (line.startsWith('/*')); } function isEndOfMultiLineComment(line) { - return (line.endsWith("*/")); + return (line.endsWith('*/')); } function isKeyValuePair(line) { - return (line.startsWith("\"")); + return (line.startsWith('"')); } @@ -318,7 +318,7 @@ function getOutputFileLocationIfExistsElseTargetFileLocation(targetLocation) { } function checkIfPathToFileIsValid(pathToCheck) { - if (!pathToCheck.includes("/")) { + if (!pathToCheck.includes('/')) { return true; } return checkIfFileExists(getPathOfDirectory(pathToCheck)); @@ -329,11 +329,11 @@ function checkIfFileExists(pathToCheck) { } function getPathOfDirectory(pathToCheck) { - return getSubStringBeforeLastString(pathToCheck, "/"); + return getSubStringBeforeLastString(pathToCheck, '/'); } function removeWhiteLines(string) { - return string.replace(/^(?=\n)$|^\s*|\s*$|\n\n+/gm, "") + return string.replace(/^(?=\n)$|^\s*|\s*$|\n\n+/gm, ''); } /** @@ -342,6 +342,6 @@ function removeWhiteLines(string) { */ function replaceLineEndingsToCRLF(filePath) { const data = readFileIfExists(filePath); - const result = data.replace(/\n/g,"\r\n"); + const result = data.replace(/\n/g,'\r\n'); fs.writeFileSync(filePath, result, 'utf8'); } diff --git a/scripts/test-rest.ts b/scripts/test-rest.ts index 9066777c42a..5bfd4722aa7 100644 --- a/scripts/test-rest.ts +++ b/scripts/test-rest.ts @@ -1,5 +1,5 @@ -import { request } from 'http'; -import { request as https_request } from 'https'; +import { request } from 'node:http'; +import { request as https_request } from 'node:https'; import { AppConfig } from '../src/config/app-config.interface'; import { buildAppConfig } from '../src/config/config.server'; @@ -20,43 +20,43 @@ console.log(`...Testing connection to REST API at ${restUrl}...\n`); // If SSL enabled, test via HTTPS, else via HTTP if (appConfig.rest.ssl) { - const req = https_request(restUrl, (res) => { - console.log(`RESPONSE: ${res.statusCode} ${res.statusMessage} \n`); - // We will keep reading data until the 'end' event fires. - // This ensures we don't just read the first chunk. - let data = ''; - res.on('data', (chunk) => { - data += chunk; - }); - res.on('end', () => { - checkJSONResponse(data); - }); + const req = https_request(restUrl, (res) => { + console.log(`RESPONSE: ${res.statusCode} ${res.statusMessage} \n`); + // We will keep reading data until the 'end' event fires. + // This ensures we don't just read the first chunk. + let data = ''; + res.on('data', (chunk) => { + data += chunk; }); - - req.on('error', error => { - console.error('ERROR connecting to REST API\n' + error); + res.on('end', () => { + checkJSONResponse(data); }); + }); + + req.on('error', error => { + console.error('ERROR connecting to REST API\n' + error); + }); - req.end(); + req.end(); } else { - const req = request(restUrl, (res) => { - console.log(`RESPONSE: ${res.statusCode} ${res.statusMessage} \n`); - // We will keep reading data until the 'end' event fires. - // This ensures we don't just read the first chunk. - let data = ''; - res.on('data', (chunk) => { - data += chunk; - }); - res.on('end', () => { - checkJSONResponse(data); - }); + const req = request(restUrl, (res) => { + console.log(`RESPONSE: ${res.statusCode} ${res.statusMessage} \n`); + // We will keep reading data until the 'end' event fires. + // This ensures we don't just read the first chunk. + let data = ''; + res.on('data', (chunk) => { + data += chunk; }); - - req.on('error', error => { - console.error('ERROR connecting to REST API\n' + error); + res.on('end', () => { + checkJSONResponse(data); }); + }); + + req.on('error', error => { + console.error('ERROR connecting to REST API\n' + error); + }); - req.end(); + req.end(); } /** @@ -64,19 +64,19 @@ if (appConfig.rest.ssl) { * @param responseData response data */ function checkJSONResponse(responseData: any): any { - let parsedData; - try { - parsedData = JSON.parse(responseData); - console.log('Checking JSON returned for validity...'); - console.log(`\t"dspaceVersion" = ${parsedData.dspaceVersion}`); - console.log(`\t"dspaceUI" = ${parsedData.dspaceUI}`); - console.log(`\t"dspaceServer" = ${parsedData.dspaceServer}`); - console.log(`\t"dspaceServer" property matches UI's "rest" config? ${(parsedData.dspaceServer === appConfig.rest.baseUrl)}`); - // Check for "authn" and "sites" in "_links" section as they should always exist (even if no data)! - const linksFound: string[] = Object.keys(parsedData._links); - console.log(`\tDoes "/api" endpoint have HAL links ("_links" section)? ${linksFound.includes('authn') && linksFound.includes('sites')}`); - } catch (err) { - console.error('ERROR: INVALID DSPACE REST API! Response is not valid JSON!'); - console.error(`Response returned:\n${responseData}`); - } + let parsedData; + try { + parsedData = JSON.parse(responseData); + console.log('Checking JSON returned for validity...'); + console.log(`\t"dspaceVersion" = ${parsedData.dspaceVersion}`); + console.log(`\t"dspaceUI" = ${parsedData.dspaceUI}`); + console.log(`\t"dspaceServer" = ${parsedData.dspaceServer}`); + console.log(`\t"dspaceServer" property matches UI's "rest" config? ${(parsedData.dspaceServer === appConfig.rest.baseUrl)}`); + // Check for "authn" and "sites" in "_links" section as they should always exist (even if no data)! + const linksFound: string[] = Object.keys(parsedData._links); + console.log(`\tDoes "/api" endpoint have HAL links ("_links" section)? ${linksFound.includes('authn') && linksFound.includes('sites')}`); + } catch (err) { + console.error('ERROR: INVALID DSPACE REST API! Response is not valid JSON!'); + console.error(`Response returned:\n${responseData}`); + } } diff --git a/webpack/helpers.ts b/webpack/helpers.ts index b90576afea3..872c4e5c9c9 100644 --- a/webpack/helpers.ts +++ b/webpack/helpers.ts @@ -1,7 +1,15 @@ -import { readFileSync, readdirSync, statSync, Stats } from 'node:fs'; -import { join, resolve } from 'node:path'; - -const md5 = require('md5'); +import { + readdirSync, + readFileSync, + Stats, + statSync, +} from 'node:fs'; +import { + join, + resolve, +} from 'node:path'; + +const crypto = require('node:crypto'); export const projectRoot = (relativePath) => { return resolve(__dirname, '..', relativePath); @@ -21,7 +29,7 @@ export const globalCSSImports = () => { */ export function calculateFileHash(filePath: string): string { const fileContent: Buffer = readFileSync(filePath); - return md5(fileContent); + return crypto.createHash('md5').update(fileContent).digest('hex'); } /** @@ -32,7 +40,7 @@ export function calculateFileHash(filePath: string): string { */ export function getFileHashes(folderPath: string, regExp: RegExp): { [fileName: string]: string } { const files: string[] = readdirSync(folderPath); - let hashes: { [fileName: string]: string } = {}; + const hashes: { [fileName: string]: string } = {}; for (const file of files) { if (file.match(regExp)) { diff --git a/webpack/webpack.browser.ts b/webpack/webpack.browser.ts index 3528775831c..955f9d7c377 100644 --- a/webpack/webpack.browser.ts +++ b/webpack/webpack.browser.ts @@ -4,7 +4,7 @@ import { buildAppConfig } from '../src/config/config.server'; import { commonExports } from './webpack.common'; const CompressionPlugin = require('compression-webpack-plugin'); -const zlib = require('zlib'); +const zlib = require('node:zlib'); module.exports = Object.assign({}, commonExports, { target: 'web', @@ -34,13 +34,13 @@ module.exports = Object.assign({}, commonExports, { setupMiddlewares(middlewares, server) { buildAppConfig(join(process.cwd(), 'src/assets/config.json')); return middlewares; - } + }, }, watchOptions: { // Ignore directories that should not be watched for recompiling angular ignored: [ '**/node_modules', '**/_build', '**/.git', '**/docker', - '**/.angular', '**/.idea', '**/.vscode', '**/.history', '**/.vsix' - ] + '**/.angular', '**/.idea', '**/.vscode', '**/.history', '**/.vsix', + ], }, }); diff --git a/webpack/webpack.common.ts b/webpack/webpack.common.ts index 42993511fe5..55c1d5348a6 100644 --- a/webpack/webpack.common.ts +++ b/webpack/webpack.common.ts @@ -1,6 +1,12 @@ -import { globalCSSImports, projectRoot, getFileHashes, calculateFileHash } from './helpers'; import { EnvironmentPlugin } from 'webpack'; +import { + calculateFileHash, + getFileHashes, + globalCSSImports, + projectRoot, +} from './helpers'; + const CopyWebpackPlugin = require('copy-webpack-plugin'); const path = require('node:path'); const sass = require('sass'); @@ -11,7 +17,7 @@ export const copyWebpackOptions = { { from: path.join(__dirname, '..', 'node_modules', '@fortawesome', 'fontawesome-free', 'webfonts'), to: path.join('assets', 'fonts'), - force: undefined + force: undefined, }, { from: path.join(__dirname, '..', 'src', 'assets', '**', '*.json5').replace(/\\/g, '/'), @@ -26,7 +32,7 @@ export const copyWebpackOptions = { }, transform(content) { return JSON.stringify(JSON5.parse(content.toString())); - } + }, }, { from: path.join(__dirname, '..', 'src', 'assets'), @@ -50,26 +56,26 @@ export const copyWebpackOptions = { }, { from: path.join(__dirname, '..', 'src', 'robots.txt.ejs'), - to: 'assets/robots.txt.ejs' - } - ] + to: 'assets/robots.txt.ejs', + }, + ], }; const SCSS_LOADERS = [ { loader: 'postcss-loader', options: { - sourceMap: true - } + sourceMap: true, + }, }, { loader: 'sass-loader', options: { sourceMap: true, sassOptions: { - includePaths: [projectRoot('./')] - } - } + includePaths: [projectRoot('./')], + }, + }, }, ]; @@ -84,34 +90,34 @@ export const commonExports = { rules: [ { test: /\.ts$/, - loader: '@ngtools/webpack' + loader: '@ngtools/webpack', }, { test: /\.scss$/, exclude: [ /node_modules/, - /(_exposed)?_variables.scss$|[\/|\\]src[\/|\\]themes[\/|\\].+?[\/|\\]styles[\/|\\].+\.scss$/ + /(_exposed)?_variables.scss$|[\/|\\]src[\/|\\]themes[\/|\\].+?[\/|\\]styles[\/|\\].+\.scss$/, ], use: [ ...SCSS_LOADERS, { loader: 'sass-resources-loader', options: { - resources: globalCSSImports() + resources: globalCSSImports(), }, - } - ] + }, + ], }, { test: /(_exposed)?_variables.scss$|[\/|\\]src[\/|\\]themes[\/|\\].+?[\/|\\]styles[\/|\\].+\.scss$/, exclude: [/node_modules/], use: [ ...SCSS_LOADERS, - ] + ], }, ], }, ignoreWarnings: [ /src\/themes\/[^/]+\/.*theme.module.ts is part of the TypeScript compilation but it's unused/, - ] + ], }; diff --git a/webpack/webpack.mirador.config.ts b/webpack/webpack.mirador.config.ts index 13d24c90e38..d7c12170b30 100644 --- a/webpack/webpack.mirador.config.ts +++ b/webpack/webpack.mirador.config.ts @@ -6,23 +6,23 @@ const fs = require('node:fs'); module.exports = { mode: 'production', entry: { - mirador: fs.existsSync('./src/mirador-viewer/config.local.js')? './src/mirador-viewer/config.local.js' : - './src/mirador-viewer/config.default.js' + mirador: fs.existsSync('./src/mirador-viewer/config.local.js') ? './src/mirador-viewer/config.local.js' : + './src/mirador-viewer/config.default.js', }, output: { path: path.resolve(__dirname, '..' , 'dist/iiif/mirador'), - filename: '[name].js' + filename: '[name].js', }, devServer: { contentBase: '../dist/iiif/mirador', }, resolve: { fallback: { - url: false - }}, + url: false, + } }, plugins: [new CopyWebpackPlugin({ patterns: [ - {from: './src/mirador-viewer/mirador.html', to: './index.html'} - ] - })] + { from: './src/mirador-viewer/mirador.html', to: './index.html' }, + ], + })], }; From 2299c5129b6b6084e8400e155d7f716f5aaf5ec0 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 1 Oct 2025 09:49:29 +0300 Subject: [PATCH 4/5] scripts/merge-i18n-files.ts: remove unused lodash import Now that we are properly linting the scripts directory there are a few new errors we need to fix. --- scripts/merge-i18n-files.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/merge-i18n-files.ts b/scripts/merge-i18n-files.ts index 01d473a4238..cd750313296 100644 --- a/scripts/merge-i18n-files.ts +++ b/scripts/merge-i18n-files.ts @@ -4,7 +4,6 @@ const commander = require('commander'); const fs = require('node:fs'); const JSON5 = require('json5'); const _cliProgress = require('cli-progress'); -const _ = require('lodash'); const program = new commander.Command(); program.version('1.0.0', '-v, --version'); From 254ea5bd53acded1d1cd58b5636f82ed7a8ac894 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 1 Oct 2025 09:55:48 +0300 Subject: [PATCH 5/5] scripts/sync-i18n-files.ts: scope import of lodash Now that we are properly linting the scripts directory there are a few issues we missed before. This imports the specific modules we need from lodash instead of the entire package. --- scripts/sync-i18n-files.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/sync-i18n-files.ts b/scripts/sync-i18n-files.ts index 1626b059989..b26fb696853 100644 --- a/scripts/sync-i18n-files.ts +++ b/scripts/sync-i18n-files.ts @@ -1,10 +1,12 @@ +import isEmpty from 'lodash/isEmpty'; +import isEqual from 'lodash/isEqual'; + import { projectRoot } from '../webpack/helpers'; const commander = require('commander'); const fs = require('node:fs'); const JSON5 = require('json5'); const _cliProgress = require('cli-progress'); -const _ = require('lodash'); const program = new commander.Command(); program.version('1.0.0', '-v, --version'); @@ -201,13 +203,13 @@ function createNewChunkComparingSourceAndTarget(correspondingTargetChunk, source if (oldKeyValueInTargetComments != null) { const oldKeyValueUncommented = getSubStringWithRegex(oldKeyValueInTargetComments[0], '".*')[0]; - if (!(_.isEmpty(correspondingTargetChunk) && _.isEmpty(commentSource)) && !removeWhiteLines(correspondingTargetChunk).includes(removeWhiteLines(commentSource.trim()))) { + if (!(isEmpty(correspondingTargetChunk) && isEmpty(commentSource)) && !removeWhiteLines(correspondingTargetChunk).includes(removeWhiteLines(commentSource.trim()))) { commentsOfSourceHaveChanged = true; newChunk.push(COMMENTS_CHANGED_TODO); } const parsedOldKey = JSON5.stringify('{' + oldKeyValueUncommented + '}'); const parsedSourceKey = JSON5.stringify('{' + keyValueSource + '}'); - if (!_.isEqual(parsedOldKey, parsedSourceKey)) { + if (!isEqual(parsedOldKey, parsedSourceKey)) { messageOfSourceHasChanged = true; newChunk.push(MESSAGE_CHANGED_TODO); }