diff --git a/src/handlers/get.js b/src/handlers/get.js index 87916a3b..3f16c13a 100644 --- a/src/handlers/get.js +++ b/src/handlers/get.js @@ -11,6 +11,7 @@ */ import { getSource } from '../routes/source.js'; import getList from '../routes/list.js'; +import getListPaginated from '../routes/list-paginated.js'; import logout from '../routes/logout.js'; import { getConfig } from '../routes/config.js'; import { getVersionSource, getVersionList } from '../routes/version.js'; @@ -24,13 +25,14 @@ function getRobots() { return { body, status: 200 }; } -export default async function getHandler({ env, daCtx }) { +export default async function getHandler({ req, env, daCtx }) { const { path } = daCtx; if (path.startsWith('/favicon.ico')) return get404(); if (path.startsWith('/robots.txt')) return getRobots(); if (path.startsWith('/source')) return getSource({ env, daCtx }); + if (path.startsWith('/list-paginated')) return getListPaginated({ req, env, daCtx }); if (path.startsWith('/list')) return getList({ env, daCtx }); if (path.startsWith('/config')) return getConfig({ env, daCtx }); if (path.startsWith('/versionlist')) return getVersionList({ env, daCtx }); diff --git a/src/index.js b/src/index.js index ca62928d..47016bf3 100644 --- a/src/index.js +++ b/src/index.js @@ -36,7 +36,7 @@ export default { respObj = await headHandler({ env, daCtx }); break; case 'GET': - respObj = await getHandler({ env, daCtx }); + respObj = await getHandler({ req, env, daCtx }); break; case 'PUT': respObj = await postHandler({ req, env, daCtx }); diff --git a/src/routes/list-paginated.js b/src/routes/list-paginated.js new file mode 100644 index 00000000..ea45bfe8 --- /dev/null +++ b/src/routes/list-paginated.js @@ -0,0 +1,32 @@ +/* + * Copyright 2024 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +import listBuckets from '../storage/bucket/list.js'; +import { listObjectsPaginated } from '../storage/object/list.js'; +import { getChildRules, hasPermission } from '../utils/auth.js'; + +export default async function getListPaginated({ req, env, daCtx }) { + if (!daCtx.org) return listBuckets(env, daCtx); + if (!hasPermission(daCtx, daCtx.key, 'read')) return { status: 403 }; + + // Get the child rules of the current folder and store this in daCtx.aclCtx + getChildRules(daCtx); + + const { searchParams } = new URL(req.url); + const limit = Number.parseInt(searchParams.get('limit'), 10); + const offset = Number.parseInt(searchParams.get('offset'), 10); + + function numOrUndef(num) { + return Number.isNaN(num) ? undefined : num; + } + + return /* await */ listObjectsPaginated(env, daCtx, numOrUndef(limit), numOrUndef(offset)); +} diff --git a/src/storage/object/list.js b/src/storage/object/list.js index 9132fef2..4414da4a 100644 --- a/src/storage/object/list.js +++ b/src/storage/object/list.js @@ -15,18 +15,77 @@ import { } from '@aws-sdk/client-s3'; import getS3Config from '../utils/config.js'; -import formatList from '../utils/list.js'; +import formatList, { formatPaginatedList } from '../utils/list.js'; -function buildInput({ org, key, maxKeys }) { +const LIST_LIMIT = 5000; + +function buildInput({ + org, key, maxKeys, continuationToken, +}) { const input = { Bucket: `${org}-content`, Prefix: key ? `${key}/` : null, Delimiter: '/', }; if (maxKeys) input.MaxKeys = maxKeys; + if (continuationToken) input.ContinuationToken = continuationToken; return input; } +async function scanFiles({ + daCtx, env, offset, limit, +}) { + const config = getS3Config(env); + const client = new S3Client(config); + + let continuationToken = null; + let visibleFiles = []; + const fetchedItems = []; + const fetchedPrefixes = []; + + while (visibleFiles.length < offset + limit) { + const remainingKeys = offset + limit - visibleFiles.length; + // fetch 25 extra to account for some hidden files (reduce likelihood of continuation token) + const numKeysToFetch = Math.min(1000, remainingKeys + 25); + + const input = buildInput({ ...daCtx, maxKeys: numKeysToFetch, continuationToken }); + const command = new ListObjectsV2Command(input); + + const resp = await client.send(command); + continuationToken = resp.NextContinuationToken; + + fetchedItems.push(...(resp.Contents ?? [])); + fetchedPrefixes.push(...(resp.CommonPrefixes ?? [])); + visibleFiles = formatPaginatedList(fetchedItems, fetchedPrefixes, daCtx); + + if (!continuationToken) break; + } + + return visibleFiles.slice(offset, offset + limit); +} + +export async function listObjectsPaginated(env, daCtx, maxKeys = 1000, offset = 0) { + if (offset + maxKeys > LIST_LIMIT) { + return { status: 400 }; + } + + try { + const files = await scanFiles({ + daCtx, env, limit: maxKeys, offset, + }); + return { + body: JSON.stringify({ + offset, + limit: maxKeys, + data: files, + }), + status: 200, + }; + } catch (e) { + return { body: '', status: 404 }; + } +} + export default async function listObjects(env, daCtx, maxKeys) { const config = getS3Config(env); const client = new S3Client(config); @@ -35,7 +94,6 @@ export default async function listObjects(env, daCtx, maxKeys) { const command = new ListObjectsV2Command(input); try { const resp = await client.send(command); - // console.log(resp); const body = formatList(resp, daCtx); return { body: JSON.stringify(body), diff --git a/src/storage/utils/list.js b/src/storage/utils/list.js index ee8b962c..1749931d 100644 --- a/src/storage/utils/list.js +++ b/src/storage/utils/list.js @@ -13,63 +13,92 @@ import { ListObjectsV2Command, } from '@aws-sdk/client-s3'; -export default function formatList(resp, daCtx) { +function mapPrefixes(CommonPrefixes, daCtx) { + return CommonPrefixes?.map((prefix) => { + const name = prefix.Prefix.slice(0, -1).split('/').pop(); + const splitName = name.split('.'); + + // Do not add any extension folders + if (splitName.length > 1) return null; + + const path = `/${daCtx.org}/${prefix.Prefix.slice(0, -1)}`; + + return { path, name }; + }).filter((x) => !!x) ?? []; +} + +function mapContents(Contents, folders, daCtx) { + return Contents?.map((content) => { + let key = content.Key; + const itemName = key.split('/').pop(); + const splitName = itemName.split('.'); + // file.jpg.props should not be a part of the list + // hidden files (.props) should not be a part of this list + if (splitName.length !== 2) return null; + + const [name, ext, props] = splitName; + + // Do not show any props sidecar files + if (props) return null; + + // See if the folder is already in the list + if (ext === 'props') { + if (folders.some((item) => item.name === name && !item.ext)) return null; + + // Remove props from the key so it can look like a folder + key = key.replace('.props', ''); + } + + // Do not show any hidden files. + if (!name) return null; + const item = { path: `/${daCtx.org}/${key}`, name }; + if (ext !== 'props') { + item.ext = ext; + item.lastModified = content.LastModified.getTime(); + } + + return item; + }).filter((x) => !!x) ?? []; +} + +// Performs the same as formatList, but doesn't sort (returns exactly how it was +// sorted in the S3 client response) +// This prevents bugs when sorting across pages of the paginated api response +// However, the order is slightly different to the formatList return value +// for strings with the same prefix but different length +export function formatPaginatedList(items, prefixes, daCtx) { function compare(a, b) { - if (a.name < b.name) return -1; - if (a.name > b.name) return 1; + const aN = a.name; + const bN = b.name; + if (aN.startsWith(bN) || bN.startsWith(aN)) return bN.length - aN.length; + if (aN < bN) return -1; + if (aN > bN) return 1; return undefined; } - const { CommonPrefixes, Contents } = resp; + const folders = mapPrefixes(prefixes, daCtx); + const files = mapContents(items, folders, daCtx); const combined = []; + combined.push(...files, ...folders); - if (CommonPrefixes) { - CommonPrefixes.forEach((prefix) => { - const name = prefix.Prefix.slice(0, -1).split('/').pop(); - const splitName = name.split('.'); - - // Do not add any extension folders - if (splitName.length > 1) return; + return combined.sort(compare); +} - const path = `/${daCtx.org}/${prefix.Prefix.slice(0, -1)}`; - combined.push({ path, name }); - }); +export default function formatList(resp, daCtx) { + function compare(a, b) { + if (a.name < b.name) return -1; + if (a.name > b.name) return 1; + return undefined; } - if (Contents) { - Contents.forEach((content) => { - const itemName = content.Key.split('/').pop(); - const splitName = itemName.split('.'); - // file.jpg.props should not be a part of the list - // hidden files (.props) should not be a part of this list - if (splitName.length !== 2) return; - - const [name, ext, props] = splitName; - - // Do not show any props sidecar files - if (props) return; - - if (ext === 'props') { - // Do not add if it already exists as a folder (does not have an extension) - if (combined.some((item) => item.name === name && !item.ext)) return; - - // Remove props from the key so it can look like a folder - // eslint-disable-next-line no-param-reassign - content.Key = content.Key.replace('.props', ''); - } - - // Do not show any hidden files. - if (!name) return; - const item = { path: `/${daCtx.org}/${content.Key}`, name }; - if (ext !== 'props') { - item.ext = ext; - item.lastModified = content.LastModified.getTime(); - } - - combined.push(item); - }); - } + const { CommonPrefixes, Contents } = resp; + + const folders = mapPrefixes(CommonPrefixes, daCtx); + const files = mapContents(Contents, folders, daCtx); + + const combined = []; + combined.push(...files, ...folders); return combined.sort(compare); } diff --git a/test/routes/list-paginated.test.js b/test/routes/list-paginated.test.js new file mode 100644 index 00000000..b951e6b8 --- /dev/null +++ b/test/routes/list-paginated.test.js @@ -0,0 +1,95 @@ +/* + * Copyright 2025 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +import assert from 'assert'; +import esmock from 'esmock'; + +describe('List Route', () => { + it('Test getListPaginated with permissions', async () => { + const loCalled = []; + const listObjectsPaginated = (e, c) => { + loCalled.push({ e, c }); + return {}; + } + + const ctx = { org: 'foo', key: 'q/q/q' }; + const hasPermission = (c, k, a) => { + if (k === 'q/q/q' && a === 'read') { + return false; + } + return true; + } + + const getListPaginated = await esmock( + '../../src/routes/list-paginated.js', { + '../../src/storage/object/list.js': { + listObjectsPaginated + }, + '../../src/utils/auth.js': { + hasPermission + } + } + ); + + const req = { + url: new URL('https://admin.da.live/list/foo/bar'), + } + + const resp = await getListPaginated({ req, env: {}, daCtx: ctx, aclCtx: {} }); + assert.strictEqual(403, resp.status); + assert.strictEqual(0, loCalled.length); + + const aclCtx = { pathLookup: new Map() }; + await getListPaginated({ req, env: {}, daCtx: { org: 'bar', key: 'q/q', users: [], aclCtx }}); + assert.strictEqual(1, loCalled.length); + assert.strictEqual('q/q', loCalled[0].c.key); + + const childRules = aclCtx.childRules; + assert.strictEqual(1, childRules.length); + assert(childRules[0].startsWith('/q/q/**='), 'Should have defined some child rule'); + }); + + it('parses request params', async () => { + const loCalled = []; + const listObjectsPaginated = (e, c, limit, offset) => { + console.log({offset, limit}) + loCalled.push({ offset, limit }); + return {}; + } + + const hasPermission = () => true; + + const getListPaginated = await esmock( + '../../src/routes/list-paginated.js', { + '../../src/storage/object/list.js': { + listObjectsPaginated + }, + '../../src/utils/auth.js': { + hasPermission, + getChildRules: () => {} + } + } + ); + + const ctx = { org: 'foo', }; + const reqs = [ + { url: 'https://admin.da.live/list/foo/bar?limit=12&offset=1' }, + { url: 'https://admin.da.live/list/foo/bar?limit=asdf&offset=17' }, + { url: 'https://admin.da.live/list/foo/bar?limit=12&offset=asdf' }, + ]; + await getListPaginated({ req: reqs[0], env: {}, daCtx: ctx, aclCtx: {} }); + assert.deepStrictEqual(loCalled[0], { limit: 12, offset: 1 }); + await getListPaginated({ req: reqs[1], env: {}, daCtx: ctx, aclCtx: {} }); + assert.deepStrictEqual(loCalled[1], { limit: undefined, offset: 17 }); + await getListPaginated({ req: reqs[2], env: {}, daCtx: ctx, aclCtx: {} }); + assert.deepStrictEqual(loCalled[2], { limit: 12, offset: undefined }); + }); +}); diff --git a/test/storage/object/list.test.js b/test/storage/object/list.test.js index 032e11d7..529660ac 100644 --- a/test/storage/object/list.test.js +++ b/test/storage/object/list.test.js @@ -16,9 +16,11 @@ import { mockClient } from 'aws-sdk-client-mock'; const s3Mock = mockClient(S3Client); -import listObjects from '../../../src/storage/object/list.js'; +import listObjects, {listObjectsPaginated} from '../../../src/storage/object/list.js'; const Contents = [ + { Key: 'wknd/abc1234.html', LastModified: new Date() }, + { Key: 'wknd/abc123.html', LastModified: new Date() }, { Key: 'wknd/index.html', LastModified: new Date() }, { Key: 'wknd/nav.html', LastModified: new Date() }, { Key: 'wknd/footer.html', LastModified: new Date() }, @@ -40,7 +42,7 @@ describe('List Objects', () => { const daCtx = { org: 'adobe', key: 'wknd' }; const resp = await listObjects({}, daCtx); const data = JSON.parse(resp.body); - assert.strictEqual(data.length, 3); + assert.strictEqual(data.length, Contents.length); assert(data.every((item) => item.ext && item.lastModified)); }); @@ -57,4 +59,123 @@ describe('List Objects', () => { const data = JSON.parse(resp.body); assert.strictEqual(data.length, 2, 'Should only return 2 items'); }); -}) + + it('sorts the results with shorter prefixes first', async () => { + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + }).resolves({ $metadata: { httpStatusCode: 200 }, Contents }); + + const daCtx = { org: 'adobe', key: 'wknd' }; + const resp = await listObjects({}, daCtx); + const data = JSON.parse(resp.body); + + const firstIndex = data.findIndex((x) => x.name === 'abc123'); + const secondIndex = data.findIndex((x) => x.name === 'abc1234'); + assert.strictEqual(true, firstIndex < secondIndex); + }); +}); + +describe('list paginated objects', async () => { + it('correctly handles continuation token', async () => { + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + }).resolves({ + $metadata: { httpStatusCode: 200 }, + Contents: [Contents[0], Contents[1]], + NextContinuationToken: 'token' + }); + + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + ContinuationToken: 'token' + }).resolves({ $metadata: { httpStatusCode: 200 }, Contents: [Contents[2], Contents[3]] }); + + const daCtx = { org: 'adobe', key: 'wknd' }; + const resp = await listObjectsPaginated({}, daCtx); + const { data, limit, offset } = JSON.parse(resp.body); + assert.strictEqual(data.length, 4, 'Should return all items'); + assert.strictEqual(limit, 1000, 'Should use default limit if no limit passed'); + assert.strictEqual(offset, 0, 'Should use default offset if no limit passed'); + }); + + it('correctly passes limit and offset', async () => { + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + MaxKeys: 27, + }).resolves({ + $metadata: { httpStatusCode: 200 }, + Contents: Contents, + NextContinuationToken: 'token', + }); + + const daCtx = { org: 'adobe', key: 'wknd' }; + const resp = await listObjectsPaginated({}, daCtx, 2, 1); + const { data, limit, offset } = JSON.parse(resp.body); + assert.strictEqual(data.length, 2, 'Should return 2 items'); + assert.strictEqual(data[1].name, 'index', 'Should return correct items'); + assert.strictEqual(limit, 2, 'Should use default limit if no limit passed'); + assert.strictEqual(offset, 1, 'Should use default offset if no limit passed'); + }); + + it('fetches more until enough files are present', async () => { + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + MaxKeys: 29, + }).resolves({ + $metadata: { httpStatusCode: 200 }, + Contents: new Array(29).fill({ Key: '.ignored', LastModified: new Date() }), + NextContinuationToken: 'token', + }); + + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + MaxKeys: 29, + ContinuationToken: 'token', + }).resolves({ + $metadata: { httpStatusCode: 200 }, + Contents: Contents, + NextContinuationToken: 'token', + }); + + const daCtx = { org: 'adobe', key: 'wknd' }; + const resp = await listObjectsPaginated({}, daCtx, 4, 0); + const { data } = JSON.parse(resp.body); + assert.strictEqual(data.length, 4, 'Should return 4 items'); + }); + + it('sorts the results with longer prefixes first', async () => { + s3Mock.on(ListObjectsV2Command, { + Bucket: 'adobe-content', + Prefix: 'wknd/', + Delimiter: '/', + }).resolves({ $metadata: { httpStatusCode: 200 }, Contents }); + + const daCtx = { org: 'adobe', key: 'wknd' }; + const resp = await listObjectsPaginated({}, daCtx); + const data = JSON.parse(resp.body).data; + + const firstIndex = data.findIndex((x) => x.name === 'abc1234'); + const secondIndex = data.findIndex((x) => x.name === 'abc123'); + assert.strictEqual(true, firstIndex < secondIndex); + }); + + it('enforces size limit', async () => { + const daCtx = { org: 'adobe', key: 'wknd' }; + const resp1 = await listObjectsPaginated({}, daCtx, 5001, 0); + assert.strictEqual(resp1.status, 400); + const resp2 = await listObjectsPaginated({}, daCtx, 500, 4501); + assert.strictEqual(resp2.status, 400); + }); +}); diff --git a/test/storage/utils/list.test.js b/test/storage/utils/list.test.js index 12da216b..3caa1939 100644 --- a/test/storage/utils/list.test.js +++ b/test/storage/utils/list.test.js @@ -3,59 +3,64 @@ import assert from 'assert'; import sinon from 'sinon'; import getDaCtx from '../../../src/utils/daCtx.js'; -import formatList, { listCommand } from '../../../src/storage/utils/list.js'; - -const MOCK = { - CommonPrefixes: [ - { Prefix: 'da-aem-boilerplate/' }, - { Prefix: 'blog/' }, - { Prefix: 'da/' }, - { Prefix: 'dac/' }, - { Prefix: 'milo/' }, - { Prefix: 'dark-alley.jpg/' }, - ], - Contents: [ - { - Key: 'blog.props', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'da.props', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'folder-only.props', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'test.html', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'dark-alley.jpg.props', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'dark-alley.jpg', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'empty-folder-with-sibling-file.props', - LastModified: new Date('2025-01-01'), - }, - { - Key: 'empty-folder-with-sibling-file.html', - LastModified: new Date('2025-01-01'), - } - ], -}; +import formatList, {formatPaginatedList, listCommand} from '../../../src/storage/utils/list.js'; + +function getMock() { + const MOCK = { + CommonPrefixes: [ + { Prefix: 'da-aem-boilerplate/' }, + { Prefix: 'blog/' }, + { Prefix: 'da/' }, + { Prefix: 'dac/' }, + { Prefix: 'milo/' }, + { Prefix: 'dark-alley.jpg/' }, + ], + Contents: [ + { + Key: 'blog.props', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'da.props', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'folder-only.props', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'test.html', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'dark-alley.jpg.props', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'dark-alley.jpg', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'empty-folder-with-sibling-file.props', + LastModified: new Date('2025-01-01'), + }, + { + Key: 'empty-folder-with-sibling-file.html', + LastModified: new Date('2025-01-01'), + } + ], + }; + return MOCK; +} + const req = new Request('https://example.com/source/adobecom'); const daCtx = getDaCtx(req, {}); describe('Format object list', () => { - const list = formatList(MOCK, daCtx); + const mockInput = getMock(); + const list = formatList(mockInput, daCtx); it('should return a true folder / common prefix', () => { assert.strictEqual(list[0].name, 'blog'); @@ -86,14 +91,14 @@ describe('Format object list', () => { }); it('should handle empty CommonPrefixes', () => { - const emptyMock = { Contents: MOCK.Contents }; + const emptyMock = { Contents: getMock().Contents }; const result = formatList(emptyMock, daCtx); assert(Array.isArray(result)); assert(result.length > 0); }); it('should handle empty Contents', () => { - const emptyMock = { CommonPrefixes: MOCK.CommonPrefixes }; + const emptyMock = { CommonPrefixes: getMock().CommonPrefixes }; const result = formatList(emptyMock, daCtx); assert(Array.isArray(result)); assert(result.length > 0); @@ -190,6 +195,10 @@ describe('Format object list', () => { assert.strictEqual(result[1].name, 'beta'); assert.strictEqual(result[2].name, 'zebra'); }); + + it('formatting should not have side effects', () => { + assert.deepStrictEqual(mockInput, getMock()); + }); }); describe('listCommand', () => { @@ -200,7 +209,7 @@ describe('listCommand', () => { mockS3Client = { send: sinon.stub() }; - + // Create a proper daCtx object for testing testDaCtx = { bucket: 'test-bucket', @@ -217,7 +226,7 @@ describe('listCommand', () => { it('should return sourceKeys array when item has extension', async () => { const daCtxWithExt = { ...testDaCtx, ext: 'html' }; const result = await listCommand(daCtxWithExt, {}, mockS3Client); - + assert.deepStrictEqual(result, { sourceKeys: [testDaCtx.key] }); assert.strictEqual(mockS3Client.send.callCount, 0); }); @@ -230,11 +239,11 @@ describe('listCommand', () => { ], NextContinuationToken: 'next-token' }; - + mockS3Client.send.resolves(mockResponse); - + const result = await listCommand(testDaCtx, {}, mockS3Client); - + assert.strictEqual(mockS3Client.send.callCount, 1); assert.deepStrictEqual(result, { sourceKeys: [testDaCtx.key, `${testDaCtx.key}.props`, 'adobecom/test/file1.html', 'adobecom/test/file2.html'], @@ -248,12 +257,12 @@ describe('listCommand', () => { { Key: 'adobecom/test/file3.html' } ] }; - + mockS3Client.send.resolves(mockResponse); - + const details = { continuationToken: 'prev-token' }; const result = await listCommand(testDaCtx, details, mockS3Client); - + assert.strictEqual(mockS3Client.send.callCount, 1); const callArgs = mockS3Client.send.firstCall.args[0]; console.log('Call args:', JSON.stringify(callArgs, null, 2)); @@ -269,11 +278,11 @@ describe('listCommand', () => { const mockResponse = { Contents: [] }; - + mockS3Client.send.resolves(mockResponse); - + const result = await listCommand(testDaCtx, {}, mockS3Client); - + assert.deepStrictEqual(result, { sourceKeys: [testDaCtx.key, `${testDaCtx.key}.props`], continuationToken: undefined @@ -286,14 +295,157 @@ describe('listCommand', () => { { Key: 'adobecom/test/file1.html' } ] }; - + mockS3Client.send.resolves(mockResponse); - + const result = await listCommand(testDaCtx, {}, mockS3Client); - + assert.deepStrictEqual(result, { sourceKeys: [testDaCtx.key, `${testDaCtx.key}.props`, 'adobecom/test/file1.html'], continuationToken: undefined }); }); }); + +describe('format paginated object list', () => { + const mockInput = getMock(); + const list = formatPaginatedList(mockInput.Contents, mockInput.CommonPrefixes, daCtx); + + it('should return a true folder / common prefix', () => { + assert.strictEqual(list[0].name, 'blog'); + }); + + it('should return a contents-based folder', () => { + const folderOnly = list.find((item) => { return item.name === 'folder-only' }); + assert.strictEqual(folderOnly.name, 'folder-only'); + }); + + it('should not return a props file of same folder name', () => { + const found = list.reduce((acc, item) => { + if (item.name === 'blog') acc.push(item); + return acc; + },[]); + + assert.strictEqual(found.length, 1); + }); + + it('should not have a filename props file in the list', () => { + const propsSidecar = list.find((item) => { return item.name === 'dark-alley.jpg.props' }); + assert.strictEqual(propsSidecar, undefined); + }); + + it('should handle empty folders with sibling file names of same name', () => { + const filtered = list.filter((item) => { return item.name === 'empty-folder-with-sibling-file' }); + assert.strictEqual(filtered.length, 2); + }); + + it('should handle empty CommonPrefixes', () => { + const emptyMock = { Contents: getMock().Contents }; + const result = formatList(emptyMock, daCtx); + assert(Array.isArray(result)); + assert(result.length > 0); + }); + + it('should handle empty Contents', () => { + const emptyMock = { CommonPrefixes: getMock().CommonPrefixes }; + const result = formatList(emptyMock, daCtx); + assert(Array.isArray(result)); + assert(result.length > 0); + }); + + it('should handle both empty CommonPrefixes and Contents', () => { + const emptyMock = {}; + const result = formatList(emptyMock, daCtx); + assert(Array.isArray(result)); + assert.strictEqual(result.length, 0); + }); + + it('should filter out extension folders from CommonPrefixes', () => { + const mockWithExtensionFolder = { + CommonPrefixes: [ + { Prefix: 'file.jpg/' }, + { Prefix: 'normal-folder/' } + ] + }; + const result = formatList(mockWithExtensionFolder, daCtx); + const extensionFolder = result.find(item => item.name === 'file.jpg'); + assert.strictEqual(extensionFolder, undefined); + const normalFolder = result.find(item => item.name === 'normal-folder'); + assert(normalFolder); + }); + + it('should handle files with more than 2 dot separators', () => { + const mockWithComplexFile = { + Contents: [ + { + Key: 'file.name.with.multiple.dots', + LastModified: new Date('2025-01-01'), + } + ] + }; + const result = formatList(mockWithComplexFile, daCtx); + assert.strictEqual(result.length, 0); + }); + + it('should handle hidden files (starting with dot)', () => { + const mockWithHiddenFile = { + Contents: [ + { + Key: '.hidden-file', + LastModified: new Date('2025-01-01'), + } + ] + }; + const result = formatList(mockWithHiddenFile, daCtx); + assert.strictEqual(result.length, 0); + }); + + it('should handle files with props extension correctly', () => { + const mockWithProps = { + Contents: [ + { + Key: 'test.props', + LastModified: new Date('2025-01-01'), + } + ] + }; + const result = formatList(mockWithProps, daCtx); + const propsItem = result.find(item => item.name === 'test'); + assert(propsItem); + assert.strictEqual(propsItem.ext, undefined); + assert.strictEqual(propsItem.lastModified, undefined); + }); + + it('should not add props file if folder already exists', () => { + const mockWithBoth = { + CommonPrefixes: [{ Prefix: 'test/' }], + Contents: [ + { + Key: 'test.props', + LastModified: new Date('2025-01-01'), + } + ] + }; + const result = formatList(mockWithBoth, daCtx); + const testItems = result.filter(item => item.name === 'test'); + assert.strictEqual(testItems.length, 1); + }); + + it('should sort results alphabetically', () => { + const mockForSorting = { + Contents: [ + { Key: 'zebra.html', LastModified: new Date('2025-01-01') }, + { Key: 'alpha.html', LastModified: new Date('2025-01-01') }, + { Key: 'beta.html', LastModified: new Date('2025-01-01') } + ] + }; + const result = formatList(mockForSorting, daCtx); + assert.strictEqual(result[0].name, 'alpha'); + assert.strictEqual(result[1].name, 'beta'); + assert.strictEqual(result[2].name, 'zebra'); + }); + + it('formatting should not have side effects', () => { + assert.deepStrictEqual(mockInput, getMock()); + }); +});