diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a4237adcb..7be08d58a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -38,7 +38,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [16, 18, 20, 22] + node-version: [18, 20, 22] services: postgres: image: postgres diff --git a/Dockerfile b/Dockerfile index 79938bd86..6ae35bba2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,5 @@ - - -################### -# Build source -################### FROM node:22-slim AS build +# Build artifacts from sources RUN mkdir -p /app/dist /app/source WORKDIR /app @@ -29,12 +25,23 @@ COPY source/ui /app/source/ui RUN npm run build-ui # outputs files in /app/dist +FROM node:22-slim AS utilities +# Fetch utilities that needs to be included + +#Install dependencies +RUN apt-get -qqy update && apt-get -qqy install --no-install-recommends \ + xz-utils lbzip2 \ + ca-certificates \ + curl \ + && rm -rf /var/lib/apt/lists/* /var/tmp/* +#Install ktx +RUN curl -fsSL -o "/tmp/KTX-Software-Linux-x86_64.tar.bz2" "https://github.com/KhronosGroup/KTX-Software/releases/download/v4.4.2/KTX-Software-4.4.2-Linux-x86_64.tar.bz2"\ + && tar -xf "/tmp/KTX-Software-Linux-x86_64.tar.bz2" -C /usr/local/ --strip-components=1 \ + && rm -rf /tmp/* -################### -# The actual container to be published -################### FROM node:22-slim +# Build the distributed container LABEL org.opencontainers.image.source=https://github.com/Holusion/eCorpus LABEL org.opencontainers.image.description="eCorpus base image" LABEL org.opencontainers.image.documentation="https://ecorpus.eu" @@ -51,6 +58,15 @@ ENV BUILD_REF=${BUILD_REF} ENV NODE_ENV=production +#Install additional runtime dependencies +RUN apt-get -qqy update && apt-get -qqy install --no-install-recommends \ + ocl-icd-libopencl1 \ + && ln -s libOpenCL.so.1 /usr/lib/x86_64-linux-gnu/libOpenCL.so\ + && rm -rf /var/lib/apt/lists/* /var/tmp/* + +COPY --from=utilities /usr/local/lib /usr/local/lib +COPY --from=utilities /usr/local/bin /usr/local/bin + WORKDIR /app COPY source/server/package*.json /app/ diff --git a/source/e2e/__test_fixtures/Diffuse.jpg b/source/e2e/__test_fixtures/Diffuse.jpg new file mode 100644 index 000000000..0bf7d2a47 Binary files /dev/null and b/source/e2e/__test_fixtures/Diffuse.jpg differ diff --git a/source/e2e/__test_fixtures/cube.blend b/source/e2e/__test_fixtures/cube.blend new file mode 100644 index 000000000..6d6a36836 Binary files /dev/null and b/source/e2e/__test_fixtures/cube.blend differ diff --git a/source/e2e/__test_fixtures/cube.mtl b/source/e2e/__test_fixtures/cube.mtl new file mode 100644 index 000000000..03fb35edb --- /dev/null +++ b/source/e2e/__test_fixtures/cube.mtl @@ -0,0 +1,12 @@ +# Blender 4.2.16 LTS MTL File: 'cube.blend' +# www.blender.org + +newmtl Material +Ns 250.000000 +Ka 1.000000 1.000000 1.000000 +Ks 0.500000 0.500000 0.500000 +Ke 0.000000 0.000000 0.000000 +Ni 1.450000 +d 1.000000 +illum 2 +map_Kd Diffuse.jpg diff --git a/source/e2e/__test_fixtures/cube.obj b/source/e2e/__test_fixtures/cube.obj new file mode 100644 index 000000000..89a536616 --- /dev/null +++ b/source/e2e/__test_fixtures/cube.obj @@ -0,0 +1,39 @@ +# Blender 4.2.16 LTS +# www.blender.org +mtllib cube.mtl +o Cube +v 1.000000 1.000000 -1.000000 +v 1.000000 -1.000000 -1.000000 +v 1.000000 1.000000 1.000000 +v 1.000000 -1.000000 1.000000 +v -1.000000 1.000000 -1.000000 +v -1.000000 -1.000000 -1.000000 +v -1.000000 1.000000 1.000000 +v -1.000000 -1.000000 1.000000 +vn -0.0000 1.0000 -0.0000 +vn -0.0000 -0.0000 1.0000 +vn -1.0000 -0.0000 -0.0000 +vn -0.0000 -1.0000 -0.0000 +vn 1.0000 -0.0000 -0.0000 +vn -0.0000 -0.0000 -1.0000 +vt 0.000000 0.333333 +vt 0.333333 0.333333 +vt 0.333333 0.666667 +vt 0.000000 0.666667 +vt 0.666667 1.000000 +vt 0.333333 1.000000 +vt 0.666667 0.666667 +vt 0.666667 0.333333 +vt 1.000000 0.333333 +vt 1.000000 0.666667 +vt 0.666667 0.000000 +vt 1.000000 0.000000 +vt 0.333333 0.000000 +s 0 +usemtl Material +f 1/1/1 5/2/1 7/3/1 3/4/1 +f 4/5/2 3/6/2 7/3/2 8/7/2 +f 8/7/3 7/3/3 5/2/3 6/8/3 +f 6/8/4 2/9/4 4/10/4 8/7/4 +f 2/8/5 1/11/5 3/12/5 4/9/5 +f 6/8/6 5/2/6 1/13/6 2/11/6 diff --git a/source/e2e/fixtures.ts b/source/e2e/fixtures.ts index ed82c6468..3615dcb64 100644 --- a/source/e2e/fixtures.ts +++ b/source/e2e/fixtures.ts @@ -13,12 +13,15 @@ export type CreateSceneOptions = { autoDelete?:boolean, } +type AccessLevel = "use" | "create" | "admin"; + +type UniqueAccount = {username:string, password:string, uid:number}; + type TestFixture = { adminPage:Page, userPage:Page, createScene:(opts?:CreateSceneOptions)=>Promise, - uniqueAccount: {username:string, password:string, uid:number}, - + uniqueAccount: (level?:AccessLevel)=>Promise, } export {expect} from "@playwright/test"; @@ -70,16 +73,18 @@ export const test = base.extend({ await ctx.close(); }, uniqueAccount: async ({browser}, use)=>{ + let adminContext = await browser.newContext({storageState: "playwright/.auth/admin.json"}); + + await use(async (level :AccessLevel = "create")=>{ let username = `testUserLogin${randomBytes(2).readUInt16LE().toString(36)}`; let password = randomBytes(16).toString("base64"); - let adminContext = await browser.newContext({storageState: "playwright/.auth/admin.json"}); //Create a user for this specific test - let res = await adminContext.request.post("/users", { + let res = await adminContext.request.post("/users", { data: JSON.stringify({ username, email: `${username}@example.com`, password, - level: "create", + level, }), headers:{ "Content-Type": "application/json", @@ -87,8 +92,10 @@ export const test = base.extend({ }); let body = JSON.parse(await res.text()); expect(body).toHaveProperty("uid"); - let uid :number =body.uid; - await use({username, password, uid}); - await adminContext.close(); - }, + let uid :number = body.uid; + return {username, password, uid}; + }); + + await adminContext.close(); + } }); \ No newline at end of file diff --git a/source/e2e/package-lock.json b/source/e2e/package-lock.json index 2e450be13..290166d84 100644 --- a/source/e2e/package-lock.json +++ b/source/e2e/package-lock.json @@ -15,7 +15,7 @@ }, "devDependencies": { "@playwright/test": "^1.49.1", - "@types/node": "^22.10.7" + "@types/node": "^22.19.12" } }, "node_modules/@playwright/test": { @@ -34,11 +34,12 @@ } }, "node_modules/@types/node": { - "version": "22.10.7", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.7.tgz", - "integrity": "sha512-V09KvXxFiutGp6B7XkpaDXlNadZxrzajcY50EuoLIpQ6WWYCSvf19lVIazzfIzQvhUN2HjX12spLojTnhuKlGg==", + "version": "22.19.12", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.12.tgz", + "integrity": "sha512-0QEp0aPJYSyf6RrTjDB7HlKgNMTY+V2C7ESTaVt6G9gQ0rPLzTGz7OF2NXTLR5vcy7HJEtIUsyWLsfX0kTqJBA==", + "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "undici-types": "~6.21.0" } }, "node_modules/@types/yauzl": { @@ -112,9 +113,10 @@ "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==" }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "license": "MIT" }, "node_modules/xml-js": { "version": "1.6.11", diff --git a/source/e2e/package.json b/source/e2e/package.json index 3afc70ceb..15beb2c72 100644 --- a/source/e2e/package.json +++ b/source/e2e/package.json @@ -11,7 +11,7 @@ "license": "ISC", "devDependencies": { "@playwright/test": "^1.49.1", - "@types/node": "^22.10.7" + "@types/node": "^22.19.12" }, "dependencies": { "@types/yauzl": "^2.10.3", diff --git a/source/e2e/playwright.config.ts b/source/e2e/playwright.config.ts index 156a0f742..e13329d11 100644 --- a/source/e2e/playwright.config.ts +++ b/source/e2e/playwright.config.ts @@ -1,5 +1,5 @@ import { defineConfig, devices } from '@playwright/test'; -import path from 'path'; +import path from 'node:path'; /** * Read environment variables from file. diff --git a/source/e2e/tests/accessRights.spec.ts b/source/e2e/tests/accessRights.spec.ts index d5e065e52..813107533 100644 --- a/source/e2e/tests/accessRights.spec.ts +++ b/source/e2e/tests/accessRights.spec.ts @@ -1,8 +1,6 @@ import path from "node:path"; -import fs, { readFile } from "node:fs/promises"; -import { randomBytes, randomUUID } from "node:crypto"; -import { test, expect } from '../fixtures'; +import { test, expect } from '../fixtures.js'; const fixtures = path.resolve(import.meta.dirname, "../__test_fixtures"); @@ -13,47 +11,22 @@ test.use({ storageState: { cookies: [], origins: [] }, locale: "cimode" }); //The _actual_ route access rights are tested in unit tests //We want to have the user properly informed and that's what we're testing here -test("can't create a new scene", async function({page, request}){ - const name = randomUUID(); - +test("can't see upload page anonymously", async function({page}){ await page.goto("/ui/upload/"); - const f = page.getByRole("form", {name: "titles.createOrUpdateScene"}); - await expect(f).toBeVisible(); - await f.getByRole("button", {name: "labels.selectFile"}).setInputFiles(path.join(fixtures, "cube.glb")); - await f.getByRole("textbox", {name: "labels.sceneTitle"}).fill(name); - await f.getByRole("button", {name: "buttons.upload"}).click(); - - await expect(page.getByRole("status").getByText("Unauthorized")).toBeVisible(); - - let res = await request.get(`/scenes/${name}`); - await expect(res).not.toBeOK(); + await expect(page.getByRole("heading", {name: "Error"})).toBeVisible(); + await expect(page.getByText('errors.requireUser')).toBeVisible(); }); +test("can't see upload page as non-creator user", async function({page, uniqueAccount}){ + const {username, password} = await uniqueAccount("use"); + await page.goto("/auth/login/"); + await page.getByRole("textbox", {name: "labels.username"}).fill(username); + await page.getByRole("textbox", {name: "labels.password"}).fill(password); + await page.getByRole("button", {name: "labels.signin"}).click(); + await page.goto(`/ui/upload`); -test("can't upload a zip", async function({page, userPage}){ - const name = randomUUID(); - let res = await userPage.request.post(`/scenes/${name}`,{ - data: await fs.readFile(path.join(fixtures, "cube.glb")), - }); - await expect(res).toBeOK(); - res = await userPage.request.get(`/scenes/${name}`, { - headers: { - "Accept": "application/zip", - } - }); + await expect(page.getByRole("heading", {name: "Error"})).toBeVisible(); + await expect(page.getByText('errors.requireCreate')).toBeVisible(); - let body = await res.body(); - await page.goto("/ui/upload/"); - const f = page.getByRole("form", {name: "titles.createOrUpdateScene"}); - await expect(f).toBeVisible(); - await f.getByRole("button", {name: "labels.selectFile"}).setInputFiles({ - name: "scene.zip", - mimeType: "application/zip", - buffer: body, - }); - - await f.getByRole("button", {name: "buttons.upload"}).click(); - - await expect(page.getByText("scene: Error: [401] Unauthorized", {exact: true})).toBeVisible(); -}); +}) diff --git a/source/e2e/tests/download.spec.ts b/source/e2e/tests/download.spec.ts index 2d5ece1d6..3f0b0cd3e 100644 --- a/source/e2e/tests/download.spec.ts +++ b/source/e2e/tests/download.spec.ts @@ -50,7 +50,7 @@ test("downloads a scene archive", async ({page, request})=>{ await once(zip, "end"); expect(entries).toHaveLength(2); expect(entries.map(e=>e.fileName).sort()).toEqual([ - `scenes/${name}/models/${name}.glb`, + `scenes/${name}/${name}.glb`, `scenes/${name}/scene.svx.json`, ]); }); diff --git a/source/e2e/tests/scene_settings.spec.ts b/source/e2e/tests/scene_settings.spec.ts index a4d2f39d2..980d7cad3 100644 --- a/source/e2e/tests/scene_settings.spec.ts +++ b/source/e2e/tests/scene_settings.spec.ts @@ -1,8 +1,4 @@ -import path from "node:path"; -import fs from "node:fs/promises"; - - -import { expect, test } from '../fixtures'; +import { expect, test } from '../fixtures.js'; import { randomUUID } from "node:crypto"; diff --git a/source/e2e/tests/upload_object.spec.ts b/source/e2e/tests/upload_object.spec.ts index bbdac7f70..8b550abd8 100644 --- a/source/e2e/tests/upload_object.spec.ts +++ b/source/e2e/tests/upload_object.spec.ts @@ -9,21 +9,24 @@ const fixtures = path.resolve(import.meta.dirname, "../__test_fixtures"); //Authenticated as admin test.use({ storageState: 'playwright/.auth/user.json' }); -test("uploads and rename a glb", async ({page, request})=>{ +test("uploads and rename a glb", async ({ page, request }) => { await page.goto("/ui/upload"); //We are forced to use the rename otherwise we'd have a name collision const name = randomUUID(); - const f = page.getByRole("form", {name: "create or update a scene"}); + await page.locator("input[type=\"file\"]").setInputFiles(path.join(fixtures, "cube.glb")); + + await expect(page.getByRole("listitem").getByText("✓")).toBeVisible(); + + const f = page.getByRole("form", { name: "Create or update a scene" }); await expect(f).toBeVisible(); - await expect(f.getByRole("combobox", {name: "language"})).toHaveValue("en"); - await f.getByRole("button", {name: "files"}).setInputFiles(path.join(fixtures, "cube.glb")); - await f.getByRole("textbox", {name: "scene title"}).fill(name) - await f.getByRole("button", {name: "create a scene"}).click(); + await expect(f.getByRole("combobox", { name: "Default language" })).toHaveValue("en"); + await f.getByRole("textbox", { name: "Scene name" }).fill(name); + await page.getByRole("button", { name: "create a scene" }).click(); - const uploads = page.getByRole("region", {name: "uploads"}); + const uploads = page.getByRole("region", { name: "Created Scenes" }); await expect(uploads).toBeVisible(); //Don't check for actual progress bar visibility because that could be too quick to register - const link = uploads.getByRole("link", {name: name}); + const link = uploads.getByRole("link", { name: name }); await link.click(); await expect(page).toHaveURL(`/ui/scenes/${name}`); await expect(page.locator("h1")).toHaveText(name); @@ -33,30 +36,36 @@ test("uploads and rename a glb", async ({page, request})=>{ let doc = JSON.parse((await res.body()).toString()); expect(doc).toHaveProperty("setups"); expect(doc.setups).toHaveLength(1); - expect(doc.setups[0]).toHaveProperty("language", {language: "EN"}); + expect(doc.setups[0]).toHaveProperty("language", { language: "EN" }); - res = await request.get(`/scenes/${name}/models/${name}.glb`); + res = await request.get(`/scenes/${name}/cube.glb`); await expect(res).toBeOK(); expect(res.headers()).toHaveProperty("etag", "W/4diz3Hx67bxWyU9b_iCJD864pVJ6OGYCPh9sU40QyLs"); }); -test("uploads and rename a glb (force FR)", async ({page, request})=>{ +test("uploads and rename a glb (force FR)", async ({ page, request }) => { await page.goto("/ui/upload"); //We are forced to use the rename otherwise we'd have a name collision const name = randomUUID(); - const uploads = page.getByRole("region", {name: "uploads"}); - const f = page.getByRole("form", {name: "create or update a scene"}); + await page.locator("input[type=\"file\"]").setInputFiles(path.join(fixtures, "cube.glb")); + + await expect(page.getByRole("listitem").getByText("✓")).toBeVisible(); + + const f = page.getByRole("form", { name: "Create or update a scene" }); + const uploads = page.getByRole("region", { name: "Created Scenes" }); + await expect(f).toBeVisible(); await expect(uploads).not.toBeVisible(); - await f.getByRole("combobox", {name: "language"}).selectOption("fr"); - await f.getByRole("button", {name: "files"}).setInputFiles(path.join(fixtures, "cube.glb")); - await f.getByRole("textbox", {name: "scene title"}).fill(name) - await f.getByRole("button", {name: "create a scene"}).click(); + + await f.getByRole("combobox", { name: "Default language" }).selectOption("fr"); + await f.getByRole("textbox", { name: "Scene name" }).fill(name); + + await page.getByRole("button", { name: "create a scene" }).click(); await expect(uploads).toBeVisible(); //Don't check for actual progress bar visibility because that could be too quick to register - const link = uploads.getByRole("link", {name: name}); + const link = uploads.getByRole("link", { name: name }); await link.click(); await expect(page).toHaveURL(`/ui/scenes/${name}`); await expect(page.locator("h1")).toHaveText(name); @@ -66,49 +75,177 @@ test("uploads and rename a glb (force FR)", async ({page, request})=>{ let doc = JSON.parse((await res.body()).toString()); expect(doc).toHaveProperty("setups"); expect(doc.setups).toHaveLength(1); - expect(doc.setups[0]).toHaveProperty("language", {language: "FR"}); + expect(doc.setups[0]).toHaveProperty("language", { language: "FR" }); - res = await request.get(`/scenes/${name}/models/${name}.glb`); + res = await request.get(`/scenes/${name}/cube.glb`); await expect(res).toBeOK(); expect(res.headers()).toHaveProperty("etag", "W/4diz3Hx67bxWyU9b_iCJD864pVJ6OGYCPh9sU40QyLs"); }); -test("upload many glb", async ({page, request})=>{ +test("upload many glb", async ({ page, request }) => { await page.goto("/ui/upload"); - //We are forced to use the rename otherwise we'd have a name collision - const f = page.getByRole("form", {name: "create or update a scene"}); - await expect(f).toBeVisible(); - await expect(f.getByRole("combobox", {name: "language"})).toHaveValue("en"); + const name = randomUUID(); const content = await readFile(path.join(fixtures, "cube.glb")); - let files :{ + let files: { name: string; mimeType: string; buffer: Buffer; }[] = []; - for(let i = 0; i < 10; i++){ + for (let i = 0; i < 10; i++) { const buffer = Buffer.from(content); files.push({ - name: randomUUID()+".glb", - mimeType: "model/gltf+binary", + name: randomUUID() + ".glb", + mimeType: "model/gltf-binary", buffer, }) } + const section = page.locator("section"); + //Check that we can actually open the filechooser by clicking on the button + const fileChooserPromise = page.waitForEvent('filechooser'); + await page.getByText("select one or several files").click(); + const fileChooser = await fileChooserPromise; + fileChooser.setFiles(files); - await f.getByRole("button", {name: "files"}).setInputFiles(files); - await f.getByRole("button", {name: "create a scene"}).click(); + //Don't check for actual progress bar visibility because that could be too quick to register + //Just wait for all files to be done + for (let file of files) { + await expect(page.locator(`#upload-${file.name.replace(/[^-_a-z0-9]/g, "_")}.upload-done`)).toBeVisible(); + } + + const f = section.getByRole("form", { name: "Create or update a scene" }); + const btn = section.getByRole("button", { name: "create a scene" }); + await expect(f).toBeVisible(); + await expect(btn).toBeVisible(); + await btn.scrollIntoViewIfNeeded(); + + await f.getByRole("textbox", { name: "Scene name" }).fill(name); + await f.getByRole("combobox", { name: "Default language" }).selectOption("fr"); - const uploads = page.getByRole("region", {name: "uploads"}); + //Submit + await btn.click(); + + const uploads = page.getByRole("region", { name: "Created Scenes" }); await expect(uploads).toBeVisible(); - //Don't check for actual progress bar visibility because that could be too quick to register - for(let file of files){ - const name = file.name.split(".").slice(0, -1).join("."); - await expect(uploads.getByRole("link", {name: name})).toHaveAttribute("href", `/ui/scenes/${name}`) + const link = uploads.getByRole("link", { name: name }); + await link.click(); + await expect(page).toHaveURL(`/ui/scenes/${name}`); + await expect(page.locator("h1")).toHaveText(name); + + let res = await request.get(`/scenes/${name}/scene.svx.json`); + await expect(res).toBeOK(); + let doc = JSON.parse((await res.body()).toString()); + expect(doc).toHaveProperty("setups"); + expect(doc.setups).toHaveLength(1); + expect(doc.setups[0]).toHaveProperty("language", { language: "FR" }); + expect(doc.models).toHaveProperty("length", files.length); + + for (let file of files) { + res = await request.get(`/scenes/${name}/${file.name}`); + await expect(res).toBeOK(); + expect(res.headers()).toHaveProperty("etag", "W/4diz3Hx67bxWyU9b_iCJD864pVJ6OGYCPh9sU40QyLs"); } +}); + +test("uploads an obj with mtl and texture", async ({ page, request }) => { + await page.goto("/ui/upload"); + //We are forced to use the rename otherwise we'd have a name collision + const name = randomUUID(); + await page.locator("input[type=\"file\"]").setInputFiles([ + path.join(fixtures, "cube.obj"), + path.join(fixtures, "cube.mtl"), + path.join(fixtures, "Diffuse.jpg"), + ]); + + await expect(page.getByRole("listitem").getByText("✓")).toHaveCount(3); + + const f = page.getByRole("form", { name: "Create or update a scene" }); + await expect(f).toBeVisible(); + await expect(f.getByRole("combobox", { name: "Default language" })).toHaveValue("en"); + await f.getByRole("textbox", { name: "Scene name" }).fill(name); + await page.getByRole("button", { name: "create a scene" }).click(); + + const uploads = page.getByRole("region", { name: "Created Scenes" }); + await expect(uploads).toBeVisible(); + //Don't check for actual progress bar visibility because that could be too quick to register + const link = uploads.getByRole("link", { name: name }); + await link.click(); + await expect(page).toHaveURL(`/ui/scenes/${name}`); + await expect(page.locator("h1")).toHaveText(name); + + let res = await request.get(`/scenes/${name}/scene.svx.json`); + await expect(res).toBeOK(); + let doc = JSON.parse((await res.body()).toString()); + + expect(doc).toHaveProperty("setups"); + expect(doc.setups).toHaveLength(1); + expect(doc.setups[0]).toHaveProperty("language", { language: "EN" }); + + expect(doc).toHaveProperty("models"); + expect(doc.models).toHaveLength(1); + expect(doc.models[0]).toHaveProperty("derivatives"); + expect(doc.models[0].derivatives).toEqual([{ + "assets": [{ + "byteSize": 3884, + "imageSize": 96, + "numFaces": 12, + "type": "Model", + "uri": "cube.glb" + }], + "quality": "High", + "usage": "Web3D" + }]); + + + res = await request.get(`/scenes/${name}/cube.glb`); + await expect(res).toBeOK(); + // It may change without it being a problem. Check the actual file if necessary. + expect(res.headers()).toHaveProperty("etag", "W/yhH03TGHdkBQgKlzJcPpDFD9XdQk9Wq_vBxCzThegYY"); +}); + + +test("uploads and optimize a glb", async ({ page, request }) => { + await page.goto("/ui/upload"); + //We are forced to use the rename otherwise we'd have a name collision + const name = randomUUID(); + await page.locator("input[type=\"file\"]").setInputFiles(path.join(fixtures, "cube.glb")); + + await expect(page.getByRole("listitem").getByText("✓")).toBeVisible(); + + const f = page.getByRole("form", { name: "Create or update a scene" }); + await expect(f).toBeVisible(); + await expect(f.getByRole("combobox", { name: "Default language" })).toHaveValue("en"); + await f.getByRole("textbox", { name: "Scene name" }).fill(name); + await page.getByRole("checkbox", { name: "Optimize models", exact: false }).click(); + + await page.getByRole("button", { name: "create a scene" }).click(); + + const uploads = page.getByRole("region", { name: "Created Scenes" }); + await expect(uploads).toBeVisible(); + //Don't check for actual progress bar visibility because that could be too quick to register + const link = uploads.getByRole("link", { name: name }); + await link.click(); + await expect(page).toHaveURL(`/ui/scenes/${name}`); + await expect(page.locator("h1")).toHaveText(name); + + let res = await request.get(`/scenes/${name}/scene.svx.json`); + await expect(res).toBeOK(); + let doc = JSON.parse((await res.body()).toString()); + expect(doc).toHaveProperty("setups"); + expect(doc.setups).toHaveLength(1); + expect(doc.setups[0]).toHaveProperty("language", { language: "EN" }); + + + res = await request.get(`/scenes/${name}/cube.glb`); + await expect(res).toBeOK(); + const headers = res.headers(); + //We check the etag is different from what we'd have if we didn't request optimization + expect(headers).toHaveProperty("etag"); + expect(headers).not.toEqual("W/4diz3Hx67bxWyU9b_iCJD864pVJ6OGYCPh9sU40QyLs"); }); \ No newline at end of file diff --git a/source/e2e/tests/upload_zip.spec.ts b/source/e2e/tests/upload_zip.spec.ts index e25e4e507..fb05acb03 100644 --- a/source/e2e/tests/upload_zip.spec.ts +++ b/source/e2e/tests/upload_zip.spec.ts @@ -21,18 +21,18 @@ function reducePropfind(text:string) :ReducedWebDAVProps[]{ const multistatus = root.elements[0]; expect(multistatus).toHaveProperty("name", "D:multistatus"); const responses = multistatus.elements; - return responses.map(({elements})=>{ - const href = elements.find(e=>e.name === "D:href"); - expect(href, `find D:href in ${elements.map(p=>p.name)}`).toBeTruthy(); + return responses.map(({elements}:any)=>{ + const href = elements.find((e: any)=>e.name === "D:href"); + expect(href, `find D:href in ${elements.map((p:any)=>p.name)}`).toBeTruthy(); let item: ReducedWebDAVProps = { - path: new URL(href.elements.find(e=>e.type === "text").text).pathname, + path: new URL(href.elements.find((e: any)=>e.type === "text").text).pathname, }; - const propstat = elements.find(e=>e.name === "D:propstat"); - expect(propstat, `find D:propstat in ${elements.map(p=>p.name)}`).toBeTruthy(); - const props = propstat.elements.find(e=>e.name === "D:prop"); + const propstat = elements.find((e: any)=>e.name === "D:propstat"); + expect(propstat, `find D:propstat in ${elements.map((p: any)=>p.name)}`).toBeTruthy(); + const props = propstat.elements.find((e: any)=>e.name === "D:prop"); for(const el of props.elements){ - const content = el.elements?.find(e=>e.type ==="text")?.text; + const content = el.elements?.find((e: any)=>e.type ==="text")?.text; switch(el.name){ case "D:getetag": item.etag = content; @@ -78,23 +78,27 @@ test("uploads a scene zip", async ({page, request})=>{ await page.goto("/ui/upload"); - const f = page.getByRole("form", {name: "titles.createOrUpdateScene"}); - await expect(f).toBeVisible(); - await f.getByRole("button", {name: "labels.selectFile"}).setInputFiles({ + + + await page.locator("input[type=\"file\"]").setInputFiles({ name: "scene.zip", mimeType: "application/zip", buffer: body, }); - await f.getByRole("button", {name: "buttons.upload"}).click(); + + //We expect to see a list of scenes that will be uploaded + //In our case we expect "create" status since the scene has been deleted + + await page.getByRole("button", {name: "buttons.upload"}).click(); - const uploads = page.getByRole("region", {name: "uploads"}); + const uploads = page.getByRole("region", {name: "titles.createdScenes"}); await expect(uploads).toBeVisible(); //Don't check for actual progress bar visibility because that could be too quick to register const link = uploads.getByRole("link", {name: name}); await link.click(); await expect(page).toHaveURL(`/ui/scenes/${name}`); - await expect(page.getByRole("heading", {name})).toBeVisible(); + await expect(page.getByRole("heading", {name, }).first()).toBeVisible(); }); @@ -127,27 +131,34 @@ test("uploads a multi-scene zip", async ({page, request})=>{ let body = await res.body(); - await Promise.all(names.map(async (name)=>{ - //Delete the scene - res = await request.delete(`/scenes/${name}?archive=false`); - await expect(res).toBeOK(); - })); - + //Delete the first scene + res = await request.delete(`/scenes/${names[0]}?archive=false`); + await expect(res).toBeOK(); await page.goto("/ui/upload"); - const f = page.getByRole("form", {name: "titles.createOrUpdateScene"}); - await expect(f).toBeVisible(); - await f.getByRole("button", {name: "labels.selectFile"}).setInputFiles({ + + + await page.locator("input[type=\"file\"]").setInputFiles({ name: "scene.zip", mimeType: "application/zip", buffer: body, }); - await f.getByRole("button", {name: "buttons.upload"}).click(); + + const btn = page.getByRole("button", {name: "buttons.upload"}); + + await expect(btn).toBeVisible(); + + //Expect first scene to be "create", because we just deleted it. Second scene should be "update" + await expect(page.getByText(`[CREATE] ${names[0]}`)).toBeVisible(); + await expect(page.getByText(`[UPDATE] ${names[1]}`)).toBeVisible(); + + + await btn.click(); - const uploads = page.getByRole("region", {name: "uploads"}); + const uploads = page.getByRole("region", {name: "titles.createdScenes"}); for (let name of names){ await expect(uploads).toBeVisible(); //Don't check for actual progress bar visibility because that could be too quick to register diff --git a/source/e2e/tests/userSettings.spec.ts b/source/e2e/tests/userSettings.spec.ts index ed3244013..a489b1dde 100644 --- a/source/e2e/tests/userSettings.spec.ts +++ b/source/e2e/tests/userSettings.spec.ts @@ -2,7 +2,7 @@ import path from "node:path"; import fs, { readFile } from "node:fs/promises"; import { randomBytes, randomUUID } from "node:crypto"; -import { test, expect } from '../fixtures'; +import { test, expect } from '../fixtures.js'; const fixtures = path.resolve(import.meta.dirname, "../__test_fixtures"); @@ -13,9 +13,12 @@ const fixtures = path.resolve(import.meta.dirname, "../__test_fixtures"); //Runs with a per-test storageState, in locale "cimode" test.use({ storageState: { cookies: [], origins: [] }, locale: "cimode" }); -test.beforeEach(async ({page, uniqueAccount:{username, password}})=>{ +let account: {username:string, password:string, uid:number}; + +test.beforeEach(async ({page, uniqueAccount})=>{ + account = await uniqueAccount(); let res = await page.request.post("/auth/login", { - data: JSON.stringify({username, password}), + data: JSON.stringify({username: account.username, password: account.password}), headers:{ "Content-Type": "application/json", } @@ -33,9 +36,9 @@ test("can read user settings page", async function({page}){ }); -test("can change email", async ({page, uniqueAccount})=>{ +test("can change email", async ({page})=>{ //Ensure this is unique, otherwise it is rejected - let new_email = `${uniqueAccount.username}-replacement@example2.com` + let new_email = `${account.username}-replacement@example2.com` await page.goto("/ui/user/"); const form = page.getByRole("form", {name: "titles.userProfile"}); await expect(form).toBeVisible(); @@ -53,7 +56,8 @@ test("can change email", async ({page, uniqueAccount})=>{ await expect(emailField).toHaveValue(new_email); }); -test("can change password", async ({baseURL, page, uniqueAccount:{username, password}})=>{ +test("can change password", async ({baseURL, page})=>{ + const {username, password} = account; const new_password = randomBytes(10).toString("base64"); let res = await fetch(new URL(`/auth/login`, baseURL), { @@ -109,7 +113,8 @@ test("can logout", async ({page})=>{ expect(await res.json()).toHaveProperty("level", "none"); }); -test("can show archived scenes", async ({page, uniqueAccount:{username}})=>{ +test("can show archived scenes", async ({page})=>{ + const {username} = account; const name = randomUUID(); const fs = await import("node:fs/promises"); const data = await fs.readFile(path.join(fixtures, "cube.glb")) diff --git a/source/e2e/tsconfig.json b/source/e2e/tsconfig.json new file mode 100644 index 000000000..34a478a74 --- /dev/null +++ b/source/e2e/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "lib": ["ESNext"], + "module": "NodeNext", + "target": "ES2021", + "strict": true, + "skipLibCheck": true, + "types": ["@playwright/test"], + "outDir": "./dist", + "rootDir": "." + }, + "include": ["./**/*.ts"], + "exclude": [ + "node_modules" + ] +} diff --git a/source/server/__test_fixtures/cube.glb b/source/server/__test_fixtures/cube.glb index de8af0be6..190fe2a0a 100644 Binary files a/source/server/__test_fixtures/cube.glb and b/source/server/__test_fixtures/cube.glb differ diff --git a/source/server/__test_fixtures/cube.mtl b/source/server/__test_fixtures/cube.mtl new file mode 100644 index 000000000..78a331544 --- /dev/null +++ b/source/server/__test_fixtures/cube.mtl @@ -0,0 +1,12 @@ +# Blender 4.2.18 LTS MTL File: 'None' +# www.blender.org + +newmtl Material +Ns 250.000000 +Ka 1.000000 1.000000 1.000000 +Kd 0.800000 0.800000 0.800000 +Ks 0.500000 0.500000 0.500000 +Ke 0.000000 0.000000 0.000000 +Ni 1.450000 +d 1.000000 +illum 2 diff --git a/source/server/__test_fixtures/cube.obj b/source/server/__test_fixtures/cube.obj new file mode 100644 index 000000000..17ac39311 --- /dev/null +++ b/source/server/__test_fixtures/cube.obj @@ -0,0 +1,40 @@ +# Blender 4.2.18 LTS +# www.blender.org +mtllib cube.mtl +o Cube +v 1.000000 1.000000 -1.000000 +v 1.000000 -1.000000 -1.000000 +v 1.000000 1.000000 1.000000 +v 1.000000 -1.000000 1.000000 +v -1.000000 1.000000 -1.000000 +v -1.000000 -1.000000 -1.000000 +v -1.000000 1.000000 1.000000 +v -1.000000 -1.000000 1.000000 +vn -0.0000 1.0000 -0.0000 +vn -0.0000 -0.0000 1.0000 +vn -1.0000 -0.0000 -0.0000 +vn -0.0000 -1.0000 -0.0000 +vn 1.0000 -0.0000 -0.0000 +vn -0.0000 -0.0000 -1.0000 +vt 0.625000 0.500000 +vt 0.875000 0.500000 +vt 0.875000 0.750000 +vt 0.625000 0.750000 +vt 0.375000 0.750000 +vt 0.625000 1.000000 +vt 0.375000 1.000000 +vt 0.375000 0.000000 +vt 0.625000 0.000000 +vt 0.625000 0.250000 +vt 0.375000 0.250000 +vt 0.125000 0.500000 +vt 0.375000 0.500000 +vt 0.125000 0.750000 +s 0 +usemtl Material +f 1/1/1 5/2/1 7/3/1 3/4/1 +f 4/5/2 3/4/2 7/6/2 8/7/2 +f 8/8/3 7/9/3 5/10/3 6/11/3 +f 6/12/4 2/13/4 4/5/4 8/14/4 +f 2/13/5 1/1/5 3/4/5 4/5/5 +f 6/11/6 5/10/6 1/1/6 2/13/6 diff --git a/source/server/__test_fixtures/cube_draco.glb b/source/server/__test_fixtures/cube_draco.glb new file mode 100644 index 000000000..1c141961d Binary files /dev/null and b/source/server/__test_fixtures/cube_draco.glb differ diff --git a/source/server/__test_fixtures/cube_etc1s.glb b/source/server/__test_fixtures/cube_etc1s.glb new file mode 100644 index 000000000..b4799c759 Binary files /dev/null and b/source/server/__test_fixtures/cube_etc1s.glb differ diff --git a/source/server/__test_fixtures/cube_meshopt.glb b/source/server/__test_fixtures/cube_meshopt.glb new file mode 100644 index 000000000..d3483c0cd Binary files /dev/null and b/source/server/__test_fixtures/cube_meshopt.glb differ diff --git a/source/server/__test_fixtures/cube_textured.glb b/source/server/__test_fixtures/cube_textured.glb new file mode 100644 index 000000000..85ee3e4e6 Binary files /dev/null and b/source/server/__test_fixtures/cube_textured.glb differ diff --git a/source/server/__test_fixtures/cube_webp.glb b/source/server/__test_fixtures/cube_webp.glb new file mode 100644 index 000000000..94d075599 Binary files /dev/null and b/source/server/__test_fixtures/cube_webp.glb differ diff --git a/source/server/auth/UserManager.ts b/source/server/auth/UserManager.ts index 13f50b9aa..945527967 100644 --- a/source/server/auth/UserManager.ts +++ b/source/server/auth/UserManager.ts @@ -180,13 +180,20 @@ export default class UserManager extends DbController { ]); } + /** + * Get a user using its unique identifier + * @throws {NotFoundError} if user_id is not found + */ + async getUserById(user_id: number) :Promise{ + let u = await this.db.get(`SELECT * FROM users WHERE user_id = $1`, [ user_id ]); + if(!u) throw new NotFoundError(`no user with user_id ${user_id}`); + return UserManager.deserialize(u); + } /** - * Reads users file and checks users validity. - * Also allow requests by email + * Also allow requests by username or email * @throws {BadRequestError} is username is invalid * @throws {NotFoundError} is username is not found - * @throws {Error} if fs.readFile fails (generally with error.code == ENOENT) */ async getUserByName(username : string) :Promise{ if(!UserManager.isValidUserName(username) && username.indexOf("@")== -1) throw new BadRequestError(`Invalid user name`); diff --git a/source/server/create.ts b/source/server/create.ts new file mode 100644 index 000000000..ed1133f72 --- /dev/null +++ b/source/server/create.ts @@ -0,0 +1,68 @@ +import { debuglog } from "node:util"; + +import type express from "express"; + + +import UserManager from "./auth/UserManager.js"; +import { mkdir } from "fs/promises"; + +import openDatabase from "./vfs/helpers/db.js"; +import Vfs from "./vfs/index.js"; +import defaultConfig from "./utils/config.js"; +import createServer from "./routes/index.js"; +import { TaskScheduler } from "./tasks/scheduler.js"; + + +const debug = debuglog("pg:connect"); + +export interface Services{ + app: express.Application; + vfs: Vfs; + taskScheduler: TaskScheduler; + userManager: UserManager; + close: ()=>Promise; +} + +export default async function createService(config = defaultConfig) :Promise{ + + await Promise.all([config.files_dir].map(d=>mkdir(d, {recursive: true}))); + let db = await openDatabase({uri: config.database_uri, forceMigration: config.force_migration}); + let uri = new URL(config.database_uri); + debug(`Connected to database ${uri.hostname}:${uri.port}${uri.pathname}`) + const vfs = await Vfs.Open(config.files_dir, {db}); + const userManager = new UserManager(db); + + const taskScheduler = new TaskScheduler({db, vfs, userManager, config}); + + + if(config.clean_database){ + setTimeout(()=>{ + //Clean file system after a while to prevent delaying startup + vfs.clean().then(()=>console.log("Cleanup done."), e=> console.error("Cleanup failed :", e)); + }, 6000).unref(); + + + setInterval(()=>{ + vfs.optimize(); + }, 2*3600*1000).unref(); + } + + const app = await createServer({ + userManager, + fileDir: config.files_dir, + vfs, + taskScheduler, + config, + }); + + return { + app, + vfs, + userManager, + taskScheduler, + async close(){ + await taskScheduler.close(); + await vfs.close(); + } + }; +} diff --git a/source/server/index.ts b/source/server/index.ts index 41b817d87..f710bc7f1 100644 --- a/source/server/index.ts +++ b/source/server/index.ts @@ -1,22 +1,5 @@ -/** - * 3D Foundation Project - * Copyright 2019 Smithsonian Institution - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - import path from "path"; -import createServer from "./routes/index.js"; +import createService from "./create.js"; import config from "./utils/config.js"; //@ts-ignore @@ -28,8 +11,8 @@ import("source-map-support").then((s)=>{ (async ()=>{ let root = path.resolve(config.root_dir); console.info("Serve directory : "+root+" on "+config.port); - const app = await createServer(config); - app.listen(config.port, () => { + const services = await createService(config); + services.app.listen(config.port, () => { console.info(`Server ready and listening on port ${config.port}\n`); }); })(); diff --git a/source/server/integration.test.ts b/source/server/integration.test.ts index ad8300f85..768fe013a 100644 --- a/source/server/integration.test.ts +++ b/source/server/integration.test.ts @@ -67,11 +67,11 @@ describe("Web Server Integration", function(){ .set("Content-Type", "application/octet-stream") .send(content) expect(r.status, `Expected status code 201 but received [${r.status}]: ${r.text}`).to.equal(201); - let res = await this.agent.get("/scenes/bar/models/bar.glb").expect(200); + let res = await this.agent.get("/scenes/bar/bar.glb").expect(200); expect(res.text.slice(0,4).toString()).to.equal("glTF"); expect(res.text.length).to.equal(content.length); - let {body:doc} = await this.agent.get("/scenes/bar/bar.svx.json").expect(200); + let {body:doc} = await this.agent.get("/scenes/bar/scene.svx.json").expect(200); expect(doc).to.have.property("models").an("array").to.have.length(1); }); diff --git a/source/server/migrations/005-tasks.sql b/source/server/migrations/005-tasks.sql new file mode 100644 index 000000000..0ebc78893 --- /dev/null +++ b/source/server/migrations/005-tasks.sql @@ -0,0 +1,43 @@ +-------------------------------------------------------------------------------- +-- Up +-------------------------------------------------------------------------------- + +CREATE TYPE task_status AS ENUM ('error', 'initializing', 'running', 'pending', 'success'); +CREATE TYPE log_severity AS ENUM('debug', 'log', 'warn', 'error'); + +CREATE TABLE tasks ( + task_id BIGSERIAL PRIMARY KEY, + fk_scene_id BIGINT REFERENCES scenes(scene_id) ON DELETE CASCADE, + fk_user_id BIGINT REFERENCES users(user_id) ON DELETE SET NULL, + ctime TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + -- task data + type TEXT NOT NULL, + parent BIGINT DEFAULT NULL REFERENCES tasks(task_id) ON DELETE CASCADE, + data JSONB NOT NULL DEFAULT '{}'::jsonb, + output JSON, + -- task state management + status task_status NOT NULL DEFAULT 'pending' +); + + +CREATE TABLE tasks_logs ( + log_id BIGSERIAL PRIMARY KEY, -- ensure consistent ordering + fk_task_id BIGINT REFERENCES tasks(task_id) ON DELETE CASCADE, + timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + severity log_severity NOT NULL DEFAULT 'log', + message TEXT NOT NULL +); + +-- Index to speed up lookups on logs insertion +CREATE INDEX idx_tasks_logs_fk_task_id ON tasks_logs(fk_task_id); + +-------------------------------------------------------------------------------- +-- Down +-------------------------------------------------------------------------------- + +DROP TABLE tasks_logs; + +DROP TABLE tasks; + +DROP TYPE task_status; +DROP TYPE log_severity; diff --git a/source/server/package-lock.json b/source/server/package-lock.json index 113f0df63..94008d71d 100644 --- a/source/server/package-lock.json +++ b/source/server/package-lock.json @@ -9,19 +9,27 @@ "version": "0.0.1", "license": "Apache-2.0", "dependencies": { + "@gltf-transform/core": "^4.3.0", + "@gltf-transform/extensions": "^4.3.0", + "@gltf-transform/functions": "^4.3.0", "body-parser": "^1.20.3", + "content-disposition": "^1.0.1", "cookie-parser": "^1.4.7", "cookie-session": "^2.1.0", + "draco3dgltf": "^1.5.7", "express": "^4.21.2", "express-rate-limit": "^7.5.0", "handlebars": "^4.7.8", "i18next": "^24.2.1", "i18next-fs-backend": "^2.6.0", + "meshoptimizer": "^1.0.1", "mime-types": "^2.1.35", "morgan": "^1.10.0", "nodemailer": "^6.9.16", + "obj2gltf": "^3.2.0", "pg": "^8.16.0", "pg-cursor": "^2.15.0", + "sharp": "^0.34.5", "source-map-support": "^0.5.21", "xml-js": "^1.6.11", "yauzl": "^3.2.0", @@ -30,8 +38,10 @@ "devDependencies": { "@types/chai": "^5.0.1", "@types/chai-as-promised": "^8.0.1", + "@types/content-disposition": "^0.5.9", "@types/cookie-parser": "^1.4.8", "@types/cookie-session": "^2.0.49", + "@types/draco3dgltf": "^1.4.3", "@types/express": "^5.0.0", "@types/mime-types": "^2.1.4", "@types/mocha": "^10.0.10", @@ -51,7 +61,7 @@ "typescript": "~5.3.3" }, "engines": { - "node": ">=16.14" + "node": ">=18.17.0" } }, "node_modules/@babel/runtime": { @@ -65,6 +75,57 @@ "node": ">=6.9.0" } }, + "node_modules/@cesium/engine": { + "version": "22.3.0", + "resolved": "https://registry.npmjs.org/@cesium/engine/-/engine-22.3.0.tgz", + "integrity": "sha512-oDl+nWX/qfHYQ0lEdGxLqZoKEtTMghvJDzZKTycYfiIuDYDh8Kh0Oy45wr3mSJse3PuTj1e6hDmbw8vbycCOxw==", + "license": "Apache-2.0", + "dependencies": { + "@cesium/wasm-splats": "^0.1.0-alpha.2", + "@spz-loader/core": "0.3.0", + "@tweenjs/tween.js": "^25.0.0", + "@zip.js/zip.js": "^2.8.1", + "autolinker": "^4.0.0", + "bitmap-sdf": "^1.0.3", + "dompurify": "^3.3.0", + "draco3d": "^1.5.1", + "earcut": "^3.0.0", + "grapheme-splitter": "^1.0.4", + "jsep": "^1.3.8", + "kdbush": "^4.0.1", + "ktx-parse": "^1.0.0", + "lerc": "^2.0.0", + "mersenne-twister": "^1.1.0", + "meshoptimizer": "^1.0.1", + "pako": "^2.0.4", + "protobufjs": "^8.0.0", + "rbush": "^4.0.1", + "topojson-client": "^3.1.0", + "urijs": "^1.19.7" + }, + "engines": { + "node": ">=20.19.0" + } + }, + "node_modules/@cesium/wasm-splats": { + "version": "0.1.0-alpha.2", + "resolved": "https://registry.npmjs.org/@cesium/wasm-splats/-/wasm-splats-0.1.0-alpha.2.tgz", + "integrity": "sha512-t9pMkknv31hhIbLpMa8yPvmqfpvs5UkUjgqlQv9SeO8VerCXOYnyP8/486BDaFrztM0A7FMbRjsXtNeKvqQghA==", + "license": "Apache-2.0" + }, + "node_modules/@cesium/widgets": { + "version": "14.3.0", + "resolved": "https://registry.npmjs.org/@cesium/widgets/-/widgets-14.3.0.tgz", + "integrity": "sha512-1bS+Nv/uXwP0/NV0o4XeUA5nLCWttjTmKwl+pHnbZXp0ZwDmClb0xVDruDyVtLrUuRhsk84JZ4rXI/IT7HXOvA==", + "license": "Apache-2.0", + "dependencies": { + "@cesium/engine": "^22.3.0", + "nosleep.js": "^0.12.0" + }, + "engines": { + "node": ">=20.19.0" + } + }, "node_modules/@cspotcode/source-map-support": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", @@ -87,6 +148,523 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@emnapi/runtime": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@gltf-transform/core": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@gltf-transform/core/-/core-4.3.0.tgz", + "integrity": "sha512-ZeaQfszGJ9LYwELszu45CuDQCsE26lJNNe36FVmN8xclaT6WDdCj7fwGpQXo0/l/YgAVAHX+uO7YNBW75/SRYw==", + "license": "MIT", + "dependencies": { + "property-graph": "^4.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/donmccurdy" + } + }, + "node_modules/@gltf-transform/extensions": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@gltf-transform/extensions/-/extensions-4.3.0.tgz", + "integrity": "sha512-XDAjQPYVMHa/VDpSbfCBwI+/1muwRJCaXhUpLgnUzAjn0D//PgvIAcbNm1EwBl3LIWBSwjDUCn2LiMAjp+aXVw==", + "license": "MIT", + "dependencies": { + "@gltf-transform/core": "^4.3.0", + "ktx-parse": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/donmccurdy" + } + }, + "node_modules/@gltf-transform/functions": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@gltf-transform/functions/-/functions-4.3.0.tgz", + "integrity": "sha512-FZggHVgt3DHOezgESBrf2vDzuD2FYQYaNT2sT/aP316SIwhuiIwby3z7rhV9joDvWqqUaPkf1UmkjlOaY9riSQ==", + "license": "MIT", + "dependencies": { + "@gltf-transform/core": "^4.3.0", + "@gltf-transform/extensions": "^4.3.0", + "ktx-parse": "^1.0.1", + "ndarray": "^1.0.19", + "ndarray-lanczos": "^0.3.0", + "ndarray-pixels": "^5.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/donmccurdy" + } + }, + "node_modules/@img/colour": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz", + "integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@img/sharp-darwin-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-darwin-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", + "cpu": [ + "arm" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-ppc64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.4.tgz", + "integrity": "sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==", + "cpu": [ + "ppc64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-riscv64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-riscv64/-/sharp-libvips-linux-riscv64-1.2.4.tgz", + "integrity": "sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==", + "cpu": [ + "riscv64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", + "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", + "cpu": [ + "s390x" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-linux-arm": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", + "cpu": [ + "arm" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-ppc64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.5.tgz", + "integrity": "sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==", + "cpu": [ + "ppc64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-ppc64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-riscv64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-riscv64/-/sharp-linux-riscv64-0.34.5.tgz", + "integrity": "sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==", + "cpu": [ + "riscv64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-riscv64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-s390x": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", + "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", + "cpu": [ + "s390x" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-wasm32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", + "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", + "cpu": [ + "wasm32" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.7.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.5.tgz", + "integrity": "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-ia32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", + "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", + "cpu": [ + "ia32" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", @@ -102,6 +680,80 @@ "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", "dev": true }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, + "node_modules/@spz-loader/core": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@spz-loader/core/-/core-0.3.0.tgz", + "integrity": "sha512-sbStwMHb/MIE29st7rRuMYWqhX1UmLSFzdpyGtUZUXLkFNIuYKblzjQdtiet8bau8sUf21uL1DQ451zuySGmcA==", + "license": "Apache-2.0", + "engines": { + "node": ">=16", + "pnpm": ">=8" + } + }, "node_modules/@tsconfig/node10": { "version": "1.0.9", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", @@ -126,6 +778,12 @@ "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", "dev": true }, + "node_modules/@tweenjs/tween.js": { + "version": "25.0.0", + "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-25.0.0.tgz", + "integrity": "sha512-XKLA6syeBUaPzx4j3qwMqzzq+V4uo72BnlbOjmuljLrRqdsd3qnzvZZoxvMHZ23ndsRS4aufU6JOZYpCbU6T1A==", + "license": "MIT" + }, "node_modules/@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -163,6 +821,13 @@ "@types/node": "*" } }, + "node_modules/@types/content-disposition": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/@types/content-disposition/-/content-disposition-0.5.9.tgz", + "integrity": "sha512-8uYXI3Gw35MhiVYhG3s295oihrxRyytcRHjSjqnqZVDDy/xcGBRny7+Xj1Wgfhv5QzRtN2hB2dVRBUX9XW3UcQ==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/cookie-parser": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.8.tgz", @@ -194,6 +859,23 @@ "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", "dev": true }, + "node_modules/@types/draco3d": { + "version": "1.4.10", + "resolved": "https://registry.npmjs.org/@types/draco3d/-/draco3d-1.4.10.tgz", + "integrity": "sha512-AX22jp8Y7wwaBgAixaSvkoG4M/+PlAcm3Qs4OW8yT9DM4xUpWKeFhLueTAyZF39pviAdcDdeJoACapiAceqNcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/draco3dgltf": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/draco3dgltf/-/draco3dgltf-1.4.3.tgz", + "integrity": "sha512-JTY574f8xRI9+bOsDajeVSQ/gnIo0q3dt/MAJhNRKWJKdH2TAP3hld+lQ+eQnG9Eb6Ae493EiKi2oDZZpciQgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/draco3d": "*" + } + }, "node_modules/@types/express": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.0.tgz", @@ -258,11 +940,16 @@ "@types/node": "*" } }, + "node_modules/@types/ndarray": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/@types/ndarray/-/ndarray-1.0.14.tgz", + "integrity": "sha512-oANmFZMnFQvb219SSBIhI1Ih/r4CvHDOzkWyJS/XRqkMrGH5/kaPSA1hQhdIBzouaE+5KpE/f5ylI9cujmckQg==", + "license": "MIT" + }, "node_modules/@types/node": { "version": "16.18.23", "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.23.tgz", - "integrity": "sha512-XAMpaw1s1+6zM+jn2tmw8MyaRDIJfXxqmIQIS0HfoGYPuf7dUWeiUKopwq13KFX9lEp1+THGtlaaYx39Nxr58g==", - "dev": true + "integrity": "sha512-XAMpaw1s1+6zM+jn2tmw8MyaRDIJfXxqmIQIS0HfoGYPuf7dUWeiUKopwq13KFX9lEp1+THGtlaaYx39Nxr58g==" }, "node_modules/@types/nodemailer": { "version": "6.4.17", @@ -356,6 +1043,13 @@ "@types/superagent": "^8.1.0" } }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true + }, "node_modules/@types/yauzl": { "version": "2.10.3", "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", @@ -374,6 +1068,17 @@ "@types/node": "*" } }, + "node_modules/@zip.js/zip.js": { + "version": "2.8.21", + "resolved": "https://registry.npmjs.org/@zip.js/zip.js/-/zip.js-2.8.21.tgz", + "integrity": "sha512-fkyzXISE3IMrstDO1AgPkJCx14MYHP/suIGiAovEYEuBjq3mffsuL6aMV7ohOSjW4rXtuACuUfpA3GtITgdtYg==", + "license": "BSD-3-Clause", + "engines": { + "bun": ">=0.7.0", + "deno": ">=1.0.0", + "node": ">=18.0.0" + } + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -420,7 +1125,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, "engines": { "node": ">=8" } @@ -429,7 +1133,6 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -491,6 +1194,18 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "dev": true }, + "node_modules/autolinker": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-4.1.5.tgz", + "integrity": "sha512-vEfYZPmvVOIuE567XBVCsx8SBgOYtjB2+S1iAaJ+HgH+DNjAcrHem2hmAeC9yaNGWayicv4yR+9UaJlkF3pvtw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.8.1" + }, + "engines": { + "pnpm": ">=10.10.0" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -522,6 +1237,18 @@ "node": ">=8" } }, + "node_modules/bitmap-sdf": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/bitmap-sdf/-/bitmap-sdf-1.0.4.tgz", + "integrity": "sha512-1G3U4n5JE6RAiALMxu0p1XmeZkTeCwGKykzsLTCqVzfSDaN6S7fKnkIkfejogz+iwqBWc0UYAIKnKHNN7pSfDg==", + "license": "MIT" + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "license": "MIT" + }, "node_modules/body-parser": { "version": "1.20.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", @@ -632,6 +1359,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/cesium": { + "version": "1.138.0", + "resolved": "https://registry.npmjs.org/cesium/-/cesium-1.138.0.tgz", + "integrity": "sha512-YX7Ttd4LzAxunuzcKPyOCQa+BPc2RmenqnkM5uZkk/GVwor724bd+F3kdVP4IyMbTgxFkchXuX2Aa8L1Y0/ZxA==", + "license": "Apache-2.0", + "workspaces": [ + "packages/engine", + "packages/widgets", + "packages/sandcastle" + ], + "dependencies": { + "@cesium/engine": "^22.3.0", + "@cesium/widgets": "^14.3.0" + }, + "engines": { + "node": ">=20.19.0" + } + }, "node_modules/chai": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", @@ -739,7 +1484,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, "dependencies": { "color-name": "~1.1.4" }, @@ -750,8 +1494,7 @@ "node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/combined-stream": { "version": "1.0.8", @@ -765,6 +1508,12 @@ "node": ">= 0.8" } }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "license": "MIT" + }, "node_modules/component-emitter": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.1.tgz", @@ -775,14 +1524,16 @@ } }, "node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "dependencies": { - "safe-buffer": "5.2.1" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/content-type": { @@ -869,6 +1620,15 @@ "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", "dev": true }, + "node_modules/cwise-compiler": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/cwise-compiler/-/cwise-compiler-1.1.3.tgz", + "integrity": "sha512-WXlK/m+Di8DMMcCjcWr4i+XzcQra9eCdXIJrgh4TUgh0pIS/yJduLxS9JgefsHJ/YVLdgPtXm9r62W92MvanEQ==", + "license": "MIT", + "dependencies": { + "uniq": "^1.0.0" + } + }, "node_modules/debug": { "version": "4.3.7", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", @@ -939,6 +1699,15 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, "node_modules/dezalgo": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", @@ -958,6 +1727,27 @@ "node": ">=0.3.1" } }, + "node_modules/dompurify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, + "node_modules/draco3d": { + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/draco3d/-/draco3d-1.5.7.tgz", + "integrity": "sha512-m6WCKt/erDXcw+70IJXnG7M3awwQPAsZvJGX5zY7beBqpELw6RDGkYVU0W43AFxye4pDZ5i2Lbyc/NNGqwjUVQ==", + "license": "Apache-2.0" + }, + "node_modules/draco3dgltf": { + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/draco3dgltf/-/draco3dgltf-1.5.7.tgz", + "integrity": "sha512-LeqcpmoHIyYUi0z70/H3tMkGj8QhqVxq6FJGPjlzR24BNkQ6jyMheMvFKJBI0dzGZrEOUyQEmZ8axM1xRrbRiw==", + "license": "Apache-2.0" + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -971,6 +1761,12 @@ "node": ">= 0.4" } }, + "node_modules/earcut": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/earcut/-/earcut-3.0.2.tgz", + "integrity": "sha512-X7hshQbLyMJ/3RPhyObLARM2sNxxmRALLKx1+NVFFnQ9gKzmCrxm9+uLIAdBcvc8FNLpctqlQ2V6AE92Ol9UDQ==", + "license": "ISC" + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -979,8 +1775,7 @@ "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/encodeurl": { "version": "2.0.0", @@ -1021,7 +1816,6 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, "engines": { "node": ">=6" } @@ -1110,6 +1904,18 @@ "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, + "node_modules/express/node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/express/node_modules/cookie": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", @@ -1238,6 +2044,20 @@ "node": ">= 0.6" } }, + "node_modules/fs-extra": { + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -1270,7 +2090,6 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, "engines": { "node": "6.* || 8.* || >= 10.*" } @@ -1333,6 +2152,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "license": "MIT" + }, "node_modules/handlebars": { "version": "4.7.8", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", @@ -1478,6 +2309,12 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "node_modules/iota-array": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/iota-array/-/iota-array-1.0.0.tgz", + "integrity": "sha512-pZ2xT+LOHckCatGQ3DcG/a+QuEqvoxqkiL7tvE8nn3uuu+f6i1TtpB5/FtWFbxUuVr5PZCx8KskuGatbJDXOWA==", + "license": "MIT" + }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -1498,6 +2335,12 @@ "node": ">=8" } }, + "node_modules/is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "license": "MIT" + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -1511,7 +2354,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, "engines": { "node": ">=8" } @@ -1558,6 +2400,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/jpeg-js": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.4.tgz", + "integrity": "sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==", + "license": "BSD-3-Clause" + }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -1570,6 +2418,33 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsep": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz", + "integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==", + "license": "MIT", + "engines": { + "node": ">= 10.16.0" + } + }, + "node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/kdbush": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz", + "integrity": "sha512-WbCVYJ27Sz8zi9Q7Q0xHC+05iwkm3Znipc2XTlrnJbsHMYktW4hPhXUE8Ys1engBrvffoSCqbil1JQAa7clRpA==", + "license": "ISC" + }, "node_modules/keygrip": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/keygrip/-/keygrip-1.1.0.tgz", @@ -1581,6 +2456,18 @@ "node": ">= 0.6" } }, + "node_modules/ktx-parse": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ktx-parse/-/ktx-parse-1.1.0.tgz", + "integrity": "sha512-mKp3y+FaYgR7mXWAbyyzpa/r1zDWeaunH+INJO4fou3hb45XuNSwar+7llrRyvpMWafxSIi99RNFJ05MHedaJQ==", + "license": "MIT" + }, + "node_modules/lerc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lerc/-/lerc-2.0.0.tgz", + "integrity": "sha512-7qo1Mq8ZNmaR4USHHm615nEW2lPeeWJ3bTyoqFbd35DLx0LUH7C6ptt5FDCTAlbIzs3+WKrk5SkJvw8AFDE2hg==", + "license": "Apache-2.0" + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -1612,6 +2499,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", + "license": "Apache-2.0" + }, "node_modules/loupe": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.2.tgz", @@ -1648,6 +2541,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/mersenne-twister": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mersenne-twister/-/mersenne-twister-1.1.0.tgz", + "integrity": "sha512-mUYWsMKNrm4lfygPkL3OfGzOPTR2DBlTkBNHM//F6hGp8cLThY897crAlk3/Jo17LEOOjQUrNAx6DvgO77QJkA==", + "license": "MIT" + }, + "node_modules/meshoptimizer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/meshoptimizer/-/meshoptimizer-1.0.1.tgz", + "integrity": "sha512-Vix+QlA1YYT3FwmBBZ+49cE5y/b+pRrcXKqGpS5ouh33d3lSp2PoTpCw19E0cKDFWalembrHnIaZetf27a+W2g==", + "license": "MIT" + }, "node_modules/methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -1816,34 +2721,155 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/ndarray": { + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/ndarray/-/ndarray-1.0.19.tgz", + "integrity": "sha512-B4JHA4vdyZU30ELBw3g7/p9bZupyew5a7tX1Y/gGeF2hafrPaQZhgrGQfsvgfYbgdFZjYwuEcnaobeM/WMW+HQ==", + "license": "MIT", + "dependencies": { + "iota-array": "^1.0.0", + "is-buffer": "^1.0.2" + } + }, + "node_modules/ndarray-lanczos": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/ndarray-lanczos/-/ndarray-lanczos-0.3.0.tgz", + "integrity": "sha512-5kBmmG3Zvyj77qxIAC4QFLKuYdDIBJwCG+DukT6jQHNa1Ft74/hPH1z5mbQXeHBt8yvGPBGVrr3wEOdJPYYZYg==", + "license": "MIT", + "dependencies": { + "@types/ndarray": "^1.0.11", + "ndarray": "^1.0.19" + } + }, + "node_modules/ndarray-ops": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ndarray-ops/-/ndarray-ops-1.2.2.tgz", + "integrity": "sha512-BppWAFRjMYF7N/r6Ie51q6D4fs0iiGmeXIACKY66fLpnwIui3Wc3CXiD/30mgLbDjPpSLrsqcp3Z62+IcHZsDw==", + "license": "MIT", + "dependencies": { + "cwise-compiler": "^1.0.0" + } + }, + "node_modules/ndarray-pixels": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ndarray-pixels/-/ndarray-pixels-5.0.1.tgz", + "integrity": "sha512-IBtrpefpqlI8SPDCGjXk4v5NV5z7r3JSuCbfuEEXaM0vrOJtNGgYUa4C3Lt5H+qWdYF4BCPVFsnXhNC7QvZwkw==", + "license": "MIT", + "dependencies": { + "@types/ndarray": "^1.0.14", + "ndarray": "^1.0.19", + "ndarray-ops": "^1.2.2", + "sharp": "^0.34.0" + } + }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", "engines": { - "node": ">= 0.6" + "node": ">= 0.6" + } + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" + }, + "node_modules/nodemailer": { + "version": "6.9.16", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.16.tgz", + "integrity": "sha512-psAuZdTIRN08HKVd/E8ObdV6NO7NTBY3KsC30F7M4H1OnmLCUNaS56FpYxyb26zWLSyYF9Ozch9KYHhHegsiOQ==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/nosleep.js": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/nosleep.js/-/nosleep.js-0.12.0.tgz", + "integrity": "sha512-9d1HbpKLh3sdWlhXMhU6MMH+wQzKkrgfRkYV0EBdvt99YJfj0ilCJrWRDYG2130Tm4GXbEoTCx5b34JSaP+HhA==", + "license": "MIT" + }, + "node_modules/obj2gltf": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/obj2gltf/-/obj2gltf-3.2.0.tgz", + "integrity": "sha512-1pCbHSK55tiTkJG8Td0Nfqx97jcCtIKNeoukWhmuiyEtty3gmLBxHRN6WdYM6XKKAVgZVgeJ/PxXAizeRbQFxQ==", + "license": "Apache-2.0", + "dependencies": { + "bluebird": "^3.7.2", + "cesium": "^1.86.1", + "fs-extra": "^11.0.0", + "jpeg-js": "^0.4.3", + "mime": "^3.0.0", + "pngjs": "^7.0.0", + "yargs": "^17.2.1" + }, + "bin": { + "obj2gltf": "bin/obj2gltf.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/obj2gltf/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" } }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" + "node_modules/obj2gltf/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } }, - "node_modules/nodemailer": { - "version": "6.9.16", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.16.tgz", - "integrity": "sha512-psAuZdTIRN08HKVd/E8ObdV6NO7NTBY3KsC30F7M4H1OnmLCUNaS56FpYxyb26zWLSyYF9Ozch9KYHhHegsiOQ==", + "node_modules/obj2gltf/node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, "engines": { - "node": ">=6.0.0" + "node": ">=12" } }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, + "node_modules/obj2gltf/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "license": "ISC", "engines": { - "node": ">=0.10.0" + "node": ">=12" } }, "node_modules/object-inspect": { @@ -1915,6 +2941,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/pako": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz", + "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==", + "license": "(MIT AND Zlib)" + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -2061,6 +3093,15 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pngjs": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz", + "integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==", + "license": "MIT", + "engines": { + "node": ">=14.19.0" + } + }, "node_modules/postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -2100,6 +3141,36 @@ "node": ">=0.10.0" } }, + "node_modules/property-graph": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/property-graph/-/property-graph-4.0.0.tgz", + "integrity": "sha512-I0hojAJfTbSCZy3y6xyK29eayxo14v1bj1VPiDkHjTdz33SV6RdfMz2AHnf4ai62Vng2mN5GkaKahkooBIo9gA==", + "license": "MIT" + }, + "node_modules/protobufjs": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-8.0.0.tgz", + "integrity": "sha512-jx6+sE9h/UryaCZhsJWbJtTEy47yXoGNYI4z8ZaRncM0zBKeRqjO2JEcOUYwrYGb1WLhXM1FfMzW3annvFv0rw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -2126,6 +3197,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/quickselect": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/quickselect/-/quickselect-3.0.0.tgz", + "integrity": "sha512-XdjUArbK4Bm5fLLvlm5KpTFOiOThgfWWI4axAZDWg4E/0mKdZyI9tNEfds27qCi1ze/vwTR16kvmmGhRra3c2g==", + "license": "ISC" + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -2157,6 +3234,15 @@ "node": ">= 0.8" } }, + "node_modules/rbush": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/rbush/-/rbush-4.0.1.tgz", + "integrity": "sha512-IP0UpfeWQujYC8Jg162rMNc01Rf0gWMMAb2Uxus/Q0qOFw4lCcq6ZnQEZwUoJqWyUGJ9th7JjwI4yIWo+uvoAQ==", + "license": "MIT", + "dependencies": { + "quickselect": "^3.0.0" + } + }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -2178,7 +3264,6 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -2212,6 +3297,18 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, + "node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/send": { "version": "0.19.0", "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", @@ -2289,6 +3386,50 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, + "node_modules/sharp": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", + "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@img/colour": "^1.0.0", + "detect-libc": "^2.1.2", + "semver": "^7.7.3" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.34.5", + "@img/sharp-darwin-x64": "0.34.5", + "@img/sharp-libvips-darwin-arm64": "1.2.4", + "@img/sharp-libvips-darwin-x64": "1.2.4", + "@img/sharp-libvips-linux-arm": "1.2.4", + "@img/sharp-libvips-linux-arm64": "1.2.4", + "@img/sharp-libvips-linux-ppc64": "1.2.4", + "@img/sharp-libvips-linux-riscv64": "1.2.4", + "@img/sharp-libvips-linux-s390x": "1.2.4", + "@img/sharp-libvips-linux-x64": "1.2.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", + "@img/sharp-libvips-linuxmusl-x64": "1.2.4", + "@img/sharp-linux-arm": "0.34.5", + "@img/sharp-linux-arm64": "0.34.5", + "@img/sharp-linux-ppc64": "0.34.5", + "@img/sharp-linux-riscv64": "0.34.5", + "@img/sharp-linux-s390x": "0.34.5", + "@img/sharp-linux-x64": "0.34.5", + "@img/sharp-linuxmusl-arm64": "0.34.5", + "@img/sharp-linuxmusl-x64": "0.34.5", + "@img/sharp-wasm32": "0.34.5", + "@img/sharp-win32-arm64": "0.34.5", + "@img/sharp-win32-ia32": "0.34.5", + "@img/sharp-win32-x64": "0.34.5" + } + }, "node_modules/side-channel": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", @@ -2396,7 +3537,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -2410,7 +3550,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -2510,6 +3649,20 @@ "node": ">=0.6" } }, + "node_modules/topojson-client": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/topojson-client/-/topojson-client-3.1.0.tgz", + "integrity": "sha512-605uxS6bcYxGXw9qi62XyrV6Q3xwbndjachmNxu8HWTtVPxZfEJN9fd/SZS1Q54Sn2y0TMyMxFj/cJINqGHrKw==", + "license": "ISC", + "dependencies": { + "commander": "2" + }, + "bin": { + "topo2geo": "bin/topo2geo", + "topomerge": "bin/topomerge", + "topoquantize": "bin/topoquantize" + } + }, "node_modules/ts-node": { "version": "10.9.2", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", @@ -2562,6 +3715,12 @@ "node": ">=0.3.1" } }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, "node_modules/tsscmp": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/tsscmp/-/tsscmp-1.0.6.tgz", @@ -2608,6 +3767,21 @@ "node": ">=0.8.0" } }, + "node_modules/uniq": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", + "integrity": "sha512-Gw+zz50YNKPDKXs+9d+aKAjVwpjNwqzvNpLigIruT4HA9lMZNdMqs9x07kKHB/L9WRzqp4+DlTU5s4wG2esdoA==", + "license": "MIT" + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -2616,6 +3790,12 @@ "node": ">= 0.8" } }, + "node_modules/urijs": { + "version": "1.19.11", + "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz", + "integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==", + "license": "MIT" + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -2653,7 +3833,6 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -2696,7 +3875,6 @@ "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, "engines": { "node": ">=10" } @@ -2743,86 +3921,349 @@ "node": ">=10" } }, - "node_modules/yauzl": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.2.0.tgz", - "integrity": "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w==", - "dependencies": { - "buffer-crc32": "~0.2.3", - "pend": "~1.2.0" - }, - "engines": { - "node": ">=12" + "node_modules/yauzl": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.2.0.tgz", + "integrity": "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w==", + "dependencies": { + "buffer-crc32": "~0.2.3", + "pend": "~1.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yazl": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/yazl/-/yazl-3.3.1.tgz", + "integrity": "sha512-BbETDVWG+VcMUle37k5Fqp//7SDOK2/1+T7X8TD96M3D9G8jK5VLUdQVdVjGi8im7FGkazX7kk5hkU8X4L5Bng==", + "dependencies": { + "buffer-crc32": "^1.0.0" + } + }, + "node_modules/yazl/node_modules/buffer-crc32": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@babel/runtime": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", + "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", + "requires": { + "regenerator-runtime": "^0.14.0" + } + }, + "@cesium/engine": { + "version": "22.3.0", + "resolved": "https://registry.npmjs.org/@cesium/engine/-/engine-22.3.0.tgz", + "integrity": "sha512-oDl+nWX/qfHYQ0lEdGxLqZoKEtTMghvJDzZKTycYfiIuDYDh8Kh0Oy45wr3mSJse3PuTj1e6hDmbw8vbycCOxw==", + "requires": { + "@cesium/wasm-splats": "^0.1.0-alpha.2", + "@spz-loader/core": "0.3.0", + "@tweenjs/tween.js": "^25.0.0", + "@zip.js/zip.js": "^2.8.1", + "autolinker": "^4.0.0", + "bitmap-sdf": "^1.0.3", + "dompurify": "^3.3.0", + "draco3d": "^1.5.1", + "earcut": "^3.0.0", + "grapheme-splitter": "^1.0.4", + "jsep": "^1.3.8", + "kdbush": "^4.0.1", + "ktx-parse": "^1.0.0", + "lerc": "^2.0.0", + "mersenne-twister": "^1.1.0", + "meshoptimizer": "^1.0.1", + "pako": "^2.0.4", + "protobufjs": "^8.0.0", + "rbush": "^4.0.1", + "topojson-client": "^3.1.0", + "urijs": "^1.19.7" + } + }, + "@cesium/wasm-splats": { + "version": "0.1.0-alpha.2", + "resolved": "https://registry.npmjs.org/@cesium/wasm-splats/-/wasm-splats-0.1.0-alpha.2.tgz", + "integrity": "sha512-t9pMkknv31hhIbLpMa8yPvmqfpvs5UkUjgqlQv9SeO8VerCXOYnyP8/486BDaFrztM0A7FMbRjsXtNeKvqQghA==" + }, + "@cesium/widgets": { + "version": "14.3.0", + "resolved": "https://registry.npmjs.org/@cesium/widgets/-/widgets-14.3.0.tgz", + "integrity": "sha512-1bS+Nv/uXwP0/NV0o4XeUA5nLCWttjTmKwl+pHnbZXp0ZwDmClb0xVDruDyVtLrUuRhsk84JZ4rXI/IT7HXOvA==", + "requires": { + "@cesium/engine": "^22.3.0", + "nosleep.js": "^0.12.0" + } + }, + "@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "requires": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "dependencies": { + "@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + } + } + }, + "@emnapi/runtime": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "optional": true, + "requires": { + "tslib": "^2.4.0" + } + }, + "@gltf-transform/core": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@gltf-transform/core/-/core-4.3.0.tgz", + "integrity": "sha512-ZeaQfszGJ9LYwELszu45CuDQCsE26lJNNe36FVmN8xclaT6WDdCj7fwGpQXo0/l/YgAVAHX+uO7YNBW75/SRYw==", + "requires": { + "property-graph": "^4.0.0" + } + }, + "@gltf-transform/extensions": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@gltf-transform/extensions/-/extensions-4.3.0.tgz", + "integrity": "sha512-XDAjQPYVMHa/VDpSbfCBwI+/1muwRJCaXhUpLgnUzAjn0D//PgvIAcbNm1EwBl3LIWBSwjDUCn2LiMAjp+aXVw==", + "requires": { + "@gltf-transform/core": "^4.3.0", + "ktx-parse": "^1.0.1" + } + }, + "@gltf-transform/functions": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@gltf-transform/functions/-/functions-4.3.0.tgz", + "integrity": "sha512-FZggHVgt3DHOezgESBrf2vDzuD2FYQYaNT2sT/aP316SIwhuiIwby3z7rhV9joDvWqqUaPkf1UmkjlOaY9riSQ==", + "requires": { + "@gltf-transform/core": "^4.3.0", + "@gltf-transform/extensions": "^4.3.0", + "ktx-parse": "^1.0.1", + "ndarray": "^1.0.19", + "ndarray-lanczos": "^0.3.0", + "ndarray-pixels": "^5.0.1" + } + }, + "@img/colour": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz", + "integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==" + }, + "@img/sharp-darwin-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", + "optional": true, + "requires": { + "@img/sharp-libvips-darwin-arm64": "1.2.4" + } + }, + "@img/sharp-darwin-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", + "optional": true, + "requires": { + "@img/sharp-libvips-darwin-x64": "1.2.4" + } + }, + "@img/sharp-libvips-darwin-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", + "optional": true + }, + "@img/sharp-libvips-darwin-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", + "optional": true + }, + "@img/sharp-libvips-linux-arm": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", + "optional": true + }, + "@img/sharp-libvips-linux-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", + "optional": true + }, + "@img/sharp-libvips-linux-ppc64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.4.tgz", + "integrity": "sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==", + "optional": true + }, + "@img/sharp-libvips-linux-riscv64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-riscv64/-/sharp-libvips-linux-riscv64-1.2.4.tgz", + "integrity": "sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==", + "optional": true + }, + "@img/sharp-libvips-linux-s390x": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", + "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", + "optional": true + }, + "@img/sharp-libvips-linux-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", + "optional": true + }, + "@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", + "optional": true + }, + "@img/sharp-libvips-linuxmusl-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", + "optional": true + }, + "@img/sharp-linux-arm": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", + "optional": true, + "requires": { + "@img/sharp-libvips-linux-arm": "1.2.4" + } + }, + "@img/sharp-linux-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", + "optional": true, + "requires": { + "@img/sharp-libvips-linux-arm64": "1.2.4" + } + }, + "@img/sharp-linux-ppc64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.5.tgz", + "integrity": "sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==", + "optional": true, + "requires": { + "@img/sharp-libvips-linux-ppc64": "1.2.4" + } + }, + "@img/sharp-linux-riscv64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-riscv64/-/sharp-linux-riscv64-0.34.5.tgz", + "integrity": "sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==", + "optional": true, + "requires": { + "@img/sharp-libvips-linux-riscv64": "1.2.4" } }, - "node_modules/yazl": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/yazl/-/yazl-3.3.1.tgz", - "integrity": "sha512-BbETDVWG+VcMUle37k5Fqp//7SDOK2/1+T7X8TD96M3D9G8jK5VLUdQVdVjGi8im7FGkazX7kk5hkU8X4L5Bng==", - "dependencies": { - "buffer-crc32": "^1.0.0" + "@img/sharp-linux-s390x": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", + "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", + "optional": true, + "requires": { + "@img/sharp-libvips-linux-s390x": "1.2.4" } }, - "node_modules/yazl/node_modules/buffer-crc32": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", - "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", - "engines": { - "node": ">=8.0.0" + "@img/sharp-linux-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", + "optional": true, + "requires": { + "@img/sharp-libvips-linux-x64": "1.2.4" } }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true, - "engines": { - "node": ">=6" + "@img/sharp-linuxmusl-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", + "optional": true, + "requires": { + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" } }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - }, - "dependencies": { - "@babel/runtime": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", - "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", + "@img/sharp-linuxmusl-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", + "optional": true, "requires": { - "regenerator-runtime": "^0.14.0" + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" } }, - "@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", - "dev": true, + "@img/sharp-wasm32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", + "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", + "optional": true, "requires": { - "@jridgewell/trace-mapping": "0.3.9" - }, - "dependencies": { - "@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", - "dev": true, - "requires": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - } + "@emnapi/runtime": "^1.7.0" } }, + "@img/sharp-win32-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.5.tgz", + "integrity": "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==", + "optional": true + }, + "@img/sharp-win32-ia32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", + "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", + "optional": true + }, + "@img/sharp-win32-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", + "optional": true + }, "@jridgewell/resolve-uri": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", @@ -2835,6 +4276,65 @@ "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", "dev": true }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "@spz-loader/core": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@spz-loader/core/-/core-0.3.0.tgz", + "integrity": "sha512-sbStwMHb/MIE29st7rRuMYWqhX1UmLSFzdpyGtUZUXLkFNIuYKblzjQdtiet8bau8sUf21uL1DQ451zuySGmcA==" + }, "@tsconfig/node10": { "version": "1.0.9", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", @@ -2859,6 +4359,11 @@ "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", "dev": true }, + "@tweenjs/tween.js": { + "version": "25.0.0", + "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-25.0.0.tgz", + "integrity": "sha512-XKLA6syeBUaPzx4j3qwMqzzq+V4uo72BnlbOjmuljLrRqdsd3qnzvZZoxvMHZ23ndsRS4aufU6JOZYpCbU6T1A==" + }, "@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -2896,6 +4401,12 @@ "@types/node": "*" } }, + "@types/content-disposition": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/@types/content-disposition/-/content-disposition-0.5.9.tgz", + "integrity": "sha512-8uYXI3Gw35MhiVYhG3s295oihrxRyytcRHjSjqnqZVDDy/xcGBRny7+Xj1Wgfhv5QzRtN2hB2dVRBUX9XW3UcQ==", + "dev": true + }, "@types/cookie-parser": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.8.tgz", @@ -2925,6 +4436,21 @@ "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", "dev": true }, + "@types/draco3d": { + "version": "1.4.10", + "resolved": "https://registry.npmjs.org/@types/draco3d/-/draco3d-1.4.10.tgz", + "integrity": "sha512-AX22jp8Y7wwaBgAixaSvkoG4M/+PlAcm3Qs4OW8yT9DM4xUpWKeFhLueTAyZF39pviAdcDdeJoACapiAceqNcw==", + "dev": true + }, + "@types/draco3dgltf": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/draco3dgltf/-/draco3dgltf-1.4.3.tgz", + "integrity": "sha512-JTY574f8xRI9+bOsDajeVSQ/gnIo0q3dt/MAJhNRKWJKdH2TAP3hld+lQ+eQnG9Eb6Ae493EiKi2oDZZpciQgw==", + "dev": true, + "requires": { + "@types/draco3d": "*" + } + }, "@types/express": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.0.tgz", @@ -2988,11 +4514,15 @@ "@types/node": "*" } }, + "@types/ndarray": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/@types/ndarray/-/ndarray-1.0.14.tgz", + "integrity": "sha512-oANmFZMnFQvb219SSBIhI1Ih/r4CvHDOzkWyJS/XRqkMrGH5/kaPSA1hQhdIBzouaE+5KpE/f5ylI9cujmckQg==" + }, "@types/node": { "version": "16.18.23", "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.23.tgz", - "integrity": "sha512-XAMpaw1s1+6zM+jn2tmw8MyaRDIJfXxqmIQIS0HfoGYPuf7dUWeiUKopwq13KFX9lEp1+THGtlaaYx39Nxr58g==", - "dev": true + "integrity": "sha512-XAMpaw1s1+6zM+jn2tmw8MyaRDIJfXxqmIQIS0HfoGYPuf7dUWeiUKopwq13KFX9lEp1+THGtlaaYx39Nxr58g==" }, "@types/nodemailer": { "version": "6.4.17", @@ -3086,6 +4616,12 @@ "@types/superagent": "^8.1.0" } }, + "@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "optional": true + }, "@types/yauzl": { "version": "2.10.3", "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", @@ -3104,6 +4640,11 @@ "@types/node": "*" } }, + "@zip.js/zip.js": { + "version": "2.8.21", + "resolved": "https://registry.npmjs.org/@zip.js/zip.js/-/zip.js-2.8.21.tgz", + "integrity": "sha512-fkyzXISE3IMrstDO1AgPkJCx14MYHP/suIGiAovEYEuBjq3mffsuL6aMV7ohOSjW4rXtuACuUfpA3GtITgdtYg==" + }, "accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -3134,14 +4675,12 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, "requires": { "color-convert": "^2.0.1" } @@ -3191,6 +4730,14 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "dev": true }, + "autolinker": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-4.1.5.tgz", + "integrity": "sha512-vEfYZPmvVOIuE567XBVCsx8SBgOYtjB2+S1iAaJ+HgH+DNjAcrHem2hmAeC9yaNGWayicv4yR+9UaJlkF3pvtw==", + "requires": { + "tslib": "^2.8.1" + } + }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -3218,6 +4765,16 @@ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "dev": true }, + "bitmap-sdf": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/bitmap-sdf/-/bitmap-sdf-1.0.4.tgz", + "integrity": "sha512-1G3U4n5JE6RAiALMxu0p1XmeZkTeCwGKykzsLTCqVzfSDaN6S7fKnkIkfejogz+iwqBWc0UYAIKnKHNN7pSfDg==" + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" + }, "body-parser": { "version": "1.20.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", @@ -3301,6 +4858,15 @@ "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true }, + "cesium": { + "version": "1.138.0", + "resolved": "https://registry.npmjs.org/cesium/-/cesium-1.138.0.tgz", + "integrity": "sha512-YX7Ttd4LzAxunuzcKPyOCQa+BPc2RmenqnkM5uZkk/GVwor724bd+F3kdVP4IyMbTgxFkchXuX2Aa8L1Y0/ZxA==", + "requires": { + "@cesium/engine": "^22.3.0", + "@cesium/widgets": "^14.3.0" + } + }, "chai": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", @@ -3381,7 +4947,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, "requires": { "color-name": "~1.1.4" } @@ -3389,8 +4954,7 @@ "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "combined-stream": { "version": "1.0.8", @@ -3401,6 +4965,11 @@ "delayed-stream": "~1.0.0" } }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, "component-emitter": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.1.tgz", @@ -3408,12 +4977,9 @@ "dev": true }, "content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "requires": { - "safe-buffer": "5.2.1" - } + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==" }, "content-type": { "version": "1.0.5", @@ -3486,6 +5052,14 @@ "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", "dev": true }, + "cwise-compiler": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/cwise-compiler/-/cwise-compiler-1.1.3.tgz", + "integrity": "sha512-WXlK/m+Di8DMMcCjcWr4i+XzcQra9eCdXIJrgh4TUgh0pIS/yJduLxS9JgefsHJ/YVLdgPtXm9r62W92MvanEQ==", + "requires": { + "uniq": "^1.0.0" + } + }, "debug": { "version": "4.3.7", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", @@ -3531,6 +5105,11 @@ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, + "detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==" + }, "dezalgo": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", @@ -3547,6 +5126,24 @@ "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", "dev": true }, + "dompurify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "requires": { + "@types/trusted-types": "^2.0.7" + } + }, + "draco3d": { + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/draco3d/-/draco3d-1.5.7.tgz", + "integrity": "sha512-m6WCKt/erDXcw+70IJXnG7M3awwQPAsZvJGX5zY7beBqpELw6RDGkYVU0W43AFxye4pDZ5i2Lbyc/NNGqwjUVQ==" + }, + "draco3dgltf": { + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/draco3dgltf/-/draco3dgltf-1.5.7.tgz", + "integrity": "sha512-LeqcpmoHIyYUi0z70/H3tMkGj8QhqVxq6FJGPjlzR24BNkQ6jyMheMvFKJBI0dzGZrEOUyQEmZ8axM1xRrbRiw==" + }, "dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -3557,6 +5154,11 @@ "gopd": "^1.2.0" } }, + "earcut": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/earcut/-/earcut-3.0.2.tgz", + "integrity": "sha512-X7hshQbLyMJ/3RPhyObLARM2sNxxmRALLKx1+NVFFnQ9gKzmCrxm9+uLIAdBcvc8FNLpctqlQ2V6AE92Ol9UDQ==" + }, "ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -3565,8 +5167,7 @@ "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "encodeurl": { "version": "2.0.0", @@ -3594,8 +5195,7 @@ "escalade": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==" }, "escape-html": { "version": "1.0.3", @@ -3651,6 +5251,14 @@ "vary": "~1.1.2" }, "dependencies": { + "content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "requires": { + "safe-buffer": "5.2.1" + } + }, "cookie": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", @@ -3759,6 +5367,16 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, + "fs-extra": { + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -3780,8 +5398,7 @@ "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, "get-intrinsic": { "version": "1.2.7", @@ -3823,6 +5440,16 @@ "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==" }, + "graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==" + }, "handlebars": { "version": "4.7.8", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", @@ -3914,6 +5541,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "iota-array": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/iota-array/-/iota-array-1.0.0.tgz", + "integrity": "sha512-pZ2xT+LOHckCatGQ3DcG/a+QuEqvoxqkiL7tvE8nn3uuu+f6i1TtpB5/FtWFbxUuVr5PZCx8KskuGatbJDXOWA==" + }, "ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -3928,6 +5560,11 @@ "binary-extensions": "^2.0.0" } }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -3937,8 +5574,7 @@ "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "is-glob": { "version": "4.0.3", @@ -3967,6 +5603,11 @@ "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", "dev": true }, + "jpeg-js": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.4.tgz", + "integrity": "sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==" + }, "js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -3976,6 +5617,25 @@ "argparse": "^2.0.1" } }, + "jsep": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz", + "integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==" + }, + "jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "kdbush": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz", + "integrity": "sha512-WbCVYJ27Sz8zi9Q7Q0xHC+05iwkm3Znipc2XTlrnJbsHMYktW4hPhXUE8Ys1engBrvffoSCqbil1JQAa7clRpA==" + }, "keygrip": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/keygrip/-/keygrip-1.1.0.tgz", @@ -3984,6 +5644,16 @@ "tsscmp": "1.0.6" } }, + "ktx-parse": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ktx-parse/-/ktx-parse-1.1.0.tgz", + "integrity": "sha512-mKp3y+FaYgR7mXWAbyyzpa/r1zDWeaunH+INJO4fou3hb45XuNSwar+7llrRyvpMWafxSIi99RNFJ05MHedaJQ==" + }, + "lerc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lerc/-/lerc-2.0.0.tgz", + "integrity": "sha512-7qo1Mq8ZNmaR4USHHm615nEW2lPeeWJ3bTyoqFbd35DLx0LUH7C6ptt5FDCTAlbIzs3+WKrk5SkJvw8AFDE2hg==" + }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -4003,6 +5673,11 @@ "is-unicode-supported": "^0.1.0" } }, + "long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==" + }, "loupe": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.2.tgz", @@ -4030,6 +5705,16 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==" }, + "mersenne-twister": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mersenne-twister/-/mersenne-twister-1.1.0.tgz", + "integrity": "sha512-mUYWsMKNrm4lfygPkL3OfGzOPTR2DBlTkBNHM//F6hGp8cLThY897crAlk3/Jo17LEOOjQUrNAx6DvgO77QJkA==" + }, + "meshoptimizer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/meshoptimizer/-/meshoptimizer-1.0.1.tgz", + "integrity": "sha512-Vix+QlA1YYT3FwmBBZ+49cE5y/b+pRrcXKqGpS5ouh33d3lSp2PoTpCw19E0cKDFWalembrHnIaZetf27a+W2g==" + }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -4160,6 +5845,43 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "ndarray": { + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/ndarray/-/ndarray-1.0.19.tgz", + "integrity": "sha512-B4JHA4vdyZU30ELBw3g7/p9bZupyew5a7tX1Y/gGeF2hafrPaQZhgrGQfsvgfYbgdFZjYwuEcnaobeM/WMW+HQ==", + "requires": { + "iota-array": "^1.0.0", + "is-buffer": "^1.0.2" + } + }, + "ndarray-lanczos": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/ndarray-lanczos/-/ndarray-lanczos-0.3.0.tgz", + "integrity": "sha512-5kBmmG3Zvyj77qxIAC4QFLKuYdDIBJwCG+DukT6jQHNa1Ft74/hPH1z5mbQXeHBt8yvGPBGVrr3wEOdJPYYZYg==", + "requires": { + "@types/ndarray": "^1.0.11", + "ndarray": "^1.0.19" + } + }, + "ndarray-ops": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ndarray-ops/-/ndarray-ops-1.2.2.tgz", + "integrity": "sha512-BppWAFRjMYF7N/r6Ie51q6D4fs0iiGmeXIACKY66fLpnwIui3Wc3CXiD/30mgLbDjPpSLrsqcp3Z62+IcHZsDw==", + "requires": { + "cwise-compiler": "^1.0.0" + } + }, + "ndarray-pixels": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ndarray-pixels/-/ndarray-pixels-5.0.1.tgz", + "integrity": "sha512-IBtrpefpqlI8SPDCGjXk4v5NV5z7r3JSuCbfuEEXaM0vrOJtNGgYUa4C3Lt5H+qWdYF4BCPVFsnXhNC7QvZwkw==", + "requires": { + "@types/ndarray": "^1.0.14", + "ndarray": "^1.0.19", + "ndarray-ops": "^1.2.2", + "sharp": "^0.34.0" + } + }, "negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -4181,6 +5903,61 @@ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true }, + "nosleep.js": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/nosleep.js/-/nosleep.js-0.12.0.tgz", + "integrity": "sha512-9d1HbpKLh3sdWlhXMhU6MMH+wQzKkrgfRkYV0EBdvt99YJfj0ilCJrWRDYG2130Tm4GXbEoTCx5b34JSaP+HhA==" + }, + "obj2gltf": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/obj2gltf/-/obj2gltf-3.2.0.tgz", + "integrity": "sha512-1pCbHSK55tiTkJG8Td0Nfqx97jcCtIKNeoukWhmuiyEtty3gmLBxHRN6WdYM6XKKAVgZVgeJ/PxXAizeRbQFxQ==", + "requires": { + "bluebird": "^3.7.2", + "cesium": "^1.86.1", + "fs-extra": "^11.0.0", + "jpeg-js": "^0.4.3", + "mime": "^3.0.0", + "pngjs": "^7.0.0", + "yargs": "^17.2.1" + }, + "dependencies": { + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, + "mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==" + }, + "yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "requires": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + } + }, + "yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" + } + } + }, "object-inspect": { "version": "1.13.3", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", @@ -4226,6 +6003,11 @@ "p-limit": "^3.0.2" } }, + "pako": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz", + "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==" + }, "parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -4325,6 +6107,11 @@ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, + "pngjs": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz", + "integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==" + }, "postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -4348,6 +6135,30 @@ "xtend": "^4.0.0" } }, + "property-graph": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/property-graph/-/property-graph-4.0.0.tgz", + "integrity": "sha512-I0hojAJfTbSCZy3y6xyK29eayxo14v1bj1VPiDkHjTdz33SV6RdfMz2AHnf4ai62Vng2mN5GkaKahkooBIo9gA==" + }, + "protobufjs": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-8.0.0.tgz", + "integrity": "sha512-jx6+sE9h/UryaCZhsJWbJtTEy47yXoGNYI4z8ZaRncM0zBKeRqjO2JEcOUYwrYGb1WLhXM1FfMzW3annvFv0rw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + } + }, "proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -4365,6 +6176,11 @@ "side-channel": "^1.0.6" } }, + "quickselect": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/quickselect/-/quickselect-3.0.0.tgz", + "integrity": "sha512-XdjUArbK4Bm5fLLvlm5KpTFOiOThgfWWI4axAZDWg4E/0mKdZyI9tNEfds27qCi1ze/vwTR16kvmmGhRra3c2g==" + }, "randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -4390,6 +6206,14 @@ "unpipe": "1.0.0" } }, + "rbush": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/rbush/-/rbush-4.0.1.tgz", + "integrity": "sha512-IP0UpfeWQujYC8Jg162rMNc01Rf0gWMMAb2Uxus/Q0qOFw4lCcq6ZnQEZwUoJqWyUGJ9th7JjwI4yIWo+uvoAQ==", + "requires": { + "quickselect": "^3.0.0" + } + }, "readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -4407,8 +6231,7 @@ "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" }, "safe-buffer": { "version": "5.2.1", @@ -4425,6 +6248,11 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, + "semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==" + }, "send": { "version": "0.19.0", "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", @@ -4497,6 +6325,40 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, + "sharp": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", + "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", + "requires": { + "@img/colour": "^1.0.0", + "@img/sharp-darwin-arm64": "0.34.5", + "@img/sharp-darwin-x64": "0.34.5", + "@img/sharp-libvips-darwin-arm64": "1.2.4", + "@img/sharp-libvips-darwin-x64": "1.2.4", + "@img/sharp-libvips-linux-arm": "1.2.4", + "@img/sharp-libvips-linux-arm64": "1.2.4", + "@img/sharp-libvips-linux-ppc64": "1.2.4", + "@img/sharp-libvips-linux-riscv64": "1.2.4", + "@img/sharp-libvips-linux-s390x": "1.2.4", + "@img/sharp-libvips-linux-x64": "1.2.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", + "@img/sharp-libvips-linuxmusl-x64": "1.2.4", + "@img/sharp-linux-arm": "0.34.5", + "@img/sharp-linux-arm64": "0.34.5", + "@img/sharp-linux-ppc64": "0.34.5", + "@img/sharp-linux-riscv64": "0.34.5", + "@img/sharp-linux-s390x": "0.34.5", + "@img/sharp-linux-x64": "0.34.5", + "@img/sharp-linuxmusl-arm64": "0.34.5", + "@img/sharp-linuxmusl-x64": "0.34.5", + "@img/sharp-wasm32": "0.34.5", + "@img/sharp-win32-arm64": "0.34.5", + "@img/sharp-win32-ia32": "0.34.5", + "@img/sharp-win32-x64": "0.34.5", + "detect-libc": "^2.1.2", + "semver": "^7.7.3" + } + }, "side-channel": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", @@ -4569,7 +6431,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -4580,7 +6441,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, "requires": { "ansi-regex": "^5.0.1" } @@ -4649,6 +6509,14 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, + "topojson-client": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/topojson-client/-/topojson-client-3.1.0.tgz", + "integrity": "sha512-605uxS6bcYxGXw9qi62XyrV6Q3xwbndjachmNxu8HWTtVPxZfEJN9fd/SZS1Q54Sn2y0TMyMxFj/cJINqGHrKw==", + "requires": { + "commander": "2" + } + }, "ts-node": { "version": "10.9.2", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", @@ -4678,6 +6546,11 @@ } } }, + "tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, "tsscmp": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/tsscmp/-/tsscmp-1.0.6.tgz", @@ -4704,11 +6577,26 @@ "integrity": "sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==", "optional": true }, + "uniq": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", + "integrity": "sha512-Gw+zz50YNKPDKXs+9d+aKAjVwpjNwqzvNpLigIruT4HA9lMZNdMqs9x07kKHB/L9WRzqp4+DlTU5s4wG2esdoA==" + }, + "universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==" + }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" }, + "urijs": { + "version": "1.19.11", + "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz", + "integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==" + }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -4740,7 +6628,6 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, "requires": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -4769,8 +6656,7 @@ "y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" }, "yargs": { "version": "16.2.0", diff --git a/source/server/package.json b/source/server/package.json index 67fee6e7b..7663d28e0 100755 --- a/source/server/package.json +++ b/source/server/package.json @@ -22,7 +22,7 @@ "license": "Apache-2.0", "homepage": "https://github.com/Holusion/ecorpus", "engines": { - "node": ">=16.14" + "node": ">=18.17.0" }, "mocha": { "ui": "bdd", @@ -40,19 +40,27 @@ "spec": "./**/*.test.ts" }, "dependencies": { + "@gltf-transform/core": "^4.3.0", + "@gltf-transform/extensions": "^4.3.0", + "@gltf-transform/functions": "^4.3.0", "body-parser": "^1.20.3", + "content-disposition": "^1.0.1", "cookie-parser": "^1.4.7", "cookie-session": "^2.1.0", + "draco3dgltf": "^1.5.7", "express": "^4.21.2", "express-rate-limit": "^7.5.0", "handlebars": "^4.7.8", "i18next": "^24.2.1", "i18next-fs-backend": "^2.6.0", + "meshoptimizer": "^1.0.1", "mime-types": "^2.1.35", "morgan": "^1.10.0", "nodemailer": "^6.9.16", + "obj2gltf": "^3.2.0", "pg": "^8.16.0", "pg-cursor": "^2.15.0", + "sharp": "^0.34.5", "source-map-support": "^0.5.21", "xml-js": "^1.6.11", "yauzl": "^3.2.0", @@ -61,8 +69,10 @@ "devDependencies": { "@types/chai": "^5.0.1", "@types/chai-as-promised": "^8.0.1", + "@types/content-disposition": "^0.5.9", "@types/cookie-parser": "^1.4.8", "@types/cookie-session": "^2.0.49", + "@types/draco3dgltf": "^1.4.3", "@types/express": "^5.0.0", "@types/mime-types": "^2.1.4", "@types/mocha": "^10.0.10", diff --git a/source/server/routes/index.ts b/source/server/routes/index.ts index dea84de75..f1c9d2c84 100644 --- a/source/server/routes/index.ts +++ b/source/server/routes/index.ts @@ -1,68 +1,38 @@ -import path from "path"; -import util, { debuglog } from "util"; +import { debuglog } from "util"; import cookieSession from "cookie-session"; -import express, { Request, Response } from "express"; +import express from "express"; -import UserManager from "../auth/UserManager.js"; -import { BadRequestError, HTTPError, UnauthorizedError } from "../utils/errors.js"; +import { HTTPError, UnauthorizedError } from "../utils/errors.js"; import { errorHandlerMdw, LogLevel, notFoundHandlerMdw } from "../utils/errorHandler.js"; -import { mkdir } from "fs/promises"; -import {AppLocals, getHost, getLocals, getUser, getUserManager, isUser} from "../utils/locals.js"; +import {AppLocals, AppParameters, getLocals, getUserManager} from "../utils/locals.js"; -import openDatabase from "../vfs/helpers/db.js"; -import Vfs from "../vfs/index.js"; -import defaultConfig from "../utils/config.js"; import User from "../auth/User.js"; -import Templates, { locales } from "../utils/templates.js"; +import Templates from "../utils/templates.js"; const debug = debuglog("pg:connect"); -export default async function createServer(config = defaultConfig) :Promise{ +export default async function createServer(locals:AppParameters) :Promise{ - await Promise.all([config.files_dir].map(d=>mkdir(d, {recursive: true}))); - let db = await openDatabase({uri: config.database_uri, forceMigration: config.force_migration}); - let uri = new URL(config.database_uri); - debug(`Connected to database ${uri.hostname}:${uri.port}${uri.pathname}`) - const vfs = await Vfs.Open(config.files_dir, {db}); - const userManager = new UserManager(db); - - const templates = new Templates({dir: config.templates_dir, cache: config.node_env == "production"}); + const templates = new Templates({dir: locals.config.templates_dir, cache: locals.config.node_env == "production"}); const app = express(); app.disable('x-powered-by'); - app.set("trust proxy", config.trust_proxy); - - if(config.clean_database){ - setTimeout(()=>{ - //Clean file system after a while to prevent delaying startup - vfs.clean().then(()=>console.log("Cleanup done."), e=> console.error("Cleanup failed :", e)); - }, 6000).unref(); - - - setInterval(()=>{ - vfs.optimize(); - }, 2*3600*1000).unref(); - } - + app.set("trust proxy", locals.config.trust_proxy); app.locals = Object.assign(app.locals, { - userManager, - fileDir: config.files_dir, - vfs, - templates, - config, sessionMaxAge: 31 * 24 * 60 * 60*1000, // 1 month, in milliseconds - }) as AppLocals; + templates, + }, locals) as AppLocals; app.use(cookieSession({ name: 'session', - keys: await userManager.getKeys(), + keys: await locals.userManager.getKeys(), // Cookie Options maxAge: (app.locals as AppLocals).sessionMaxAge, sameSite: "lax" @@ -112,7 +82,7 @@ export default async function createServer(config = defaultConfig) :Promise{ router.get("/", wrap(getScenes)); router.propfind("/", wrap(handlePropfind)); // additional checks are used in postScenes to allow people to overrite scenes they have write access on - router.post("/", isUser, wrap(handlePostScenes)); +router.post("/", isUser, bodyParser.json(), wrap(handlePostScenes)); //allow POST outside of canRead : overwrite permissions are otherwise checked router.post("/:scene", isCreator, wrap(handlePostScene)); diff --git a/source/server/routes/scenes/post.test.ts b/source/server/routes/scenes/post.test.ts index f2dcc90c7..8e498197d 100644 --- a/source/server/routes/scenes/post.test.ts +++ b/source/server/routes/scenes/post.test.ts @@ -27,8 +27,7 @@ describe("POST /scenes", function(){ userManager = locals.userManager; }); this.afterEach(async function(){ - await vfs.close(); - await fs.rm(this.dir, {recursive: true}); + await cleanIntegrationContext(this); }); describe("as create", function () { @@ -95,14 +94,13 @@ describe("POST /scenes", function(){ .auth("alice", "12345678") .set("Content-Type", "application/zip") .send(zip.body) - .expect(200); - - expect(res.body).to.be.an("object"); - expect(res.body.ok).to.deep.equal([ - 'foo', - 'foo/articles/hello.html', - 'foo/scene.svx.json' + .expect(200) + .expect("Content-Type", "application/json; charset=utf-8"); + + expect(res.body).to.deep.equal([ + {action: "create", name: "foo"}, ]); + await expect(vfs.getScene("foo"), `expect scene "foo" to be restored`).to.be.fulfilled; let {id}= await vfs.getScene("foo"); const doc = await vfs.getDoc(id); @@ -154,8 +152,8 @@ describe("POST /scenes", function(){ .set("Content-Type", "application/zip") .send(zip.body) .expect(401); - expect(res.body.failed_scenes).not.to.be.empty; - expect(res.body.failed_scenes).to.deep.equal({ foo: 'User does not have writting rights on the scene' }); + + expect(res.body.message).to.match(/permissions.*foo/); await expect(vfs.getScene("foo"), `scene "foo" should still exist`).to.be.fulfilled.to.be.ok; await expect(vfs.getFileProps({ scene: "foo", name: "articles/hello.html" })).to.be.rejectedWith(NotFoundError); @@ -190,13 +188,13 @@ describe("POST /scenes", function(){ .set("Content-Type", "application/zip") .send(zip.body) .expect(401); - expect(res.body.failed_scenes).not.to.be.empty; - expect(res.body.failed_scenes).to.deep.equal({foo: "User does not have writting rights on the scene"}); + + expect(res.body.message).to.match(/permissions.*foo/); await expect(vfs.getScene("foo"), `scene "foo" should still exist`).to.be.fulfilled.to.be.ok; await expect(vfs.getFileProps({ scene: "foo", name: "articles/hello.html" })).to.be.rejectedWith(NotFoundError); await expect(vfs.getScene("bar"), `scene "bar" should still exist`).to.be.fulfilled.to.be.ok; - expect(await vfs.getFileProps({ scene: "bar", name: "articles/hello.html" })).to.have.property("hash", fileHash); + await expect(vfs.getFileProps({ scene: "bar", name: "articles/hello.html" })).to.be.rejectedWith(NotFoundError); }); }) @@ -268,11 +266,8 @@ describe("POST /scenes", function(){ .send(zip.body) .expect(200); - expect(res.body).to.be.an("object"); - expect(res.body.ok).to.deep.equal([ - 'foo', - 'foo/articles/hello.html', - 'foo/scene.svx.json' + expect(res.body).to.deep.equal([ + {action: "update", name: "foo"}, ]); await expect(vfs.getScene("foo"), `expect scene "foo" to be restored`).to.be.fulfilled; let { id } = await vfs.getScene("foo"); @@ -325,8 +320,10 @@ describe("POST /scenes", function(){ .set("Content-Type", "application/zip") .send(zip.body) .expect(401); - expect(res.body.failed_scenes).not.to.be.empty; - expect(res.body.failed_scenes).to.deep.equal({foo: "User cannot create a scene"}); + expect(res.body).to.deep.equal({ + code: 401, + message: `Error: [401] User doesn't have write permissions on scene "foo"`, + }); await expect(vfs.getScene("foo")).to.be.rejectedWith(NotFoundError); }); @@ -354,8 +351,8 @@ describe("POST /scenes", function(){ .set("Content-Type", "application/zip") .send(zip.body) .expect(401); - expect(res.body.failed_scenes).not.to.be.empty; - expect(res.body.failed_scenes).to.deep.equal({foo: 'User does not have writting rights on the scene'}); + + expect(res.body.message).to.match(/permissions.*foo/); await expect(vfs.getScene("foo"), `scene "foo" should still exist`).to.be.fulfilled.to.be.ok; await expect(vfs.getFileProps({ scene: "foo", name: "articles/hello.html" })).to.be.rejectedWith(NotFoundError); @@ -390,13 +387,14 @@ describe("POST /scenes", function(){ .set("Content-Type", "application/zip") .send(zip.body) .expect(401); - expect(res.body.failed_scenes).not.to.be.empty; - expect(res.body.failed_scenes).to.deep.equal({foo: "User does not have writting rights on the scene"}); + + expect(res.body.message).to.match(/permissions.*foo/); + //Check we didn't change anything await expect(vfs.getScene("foo"), `scene "foo" should still exist`).to.be.fulfilled.to.be.ok; await expect(vfs.getFileProps({ scene: "foo", name: "articles/hello.html" })).to.be.rejectedWith(NotFoundError); await expect(vfs.getScene("bar"), `scene "bar" should still exist`).to.be.fulfilled.to.be.ok; - expect(await vfs.getFileProps({ scene: "bar", name: "articles/hello.html" })).to.have.property("hash", fileHash); + await expect(vfs.getFileProps({ scene: "bar", name: "articles/hello.html" })).to.be.rejectedWith(NotFoundError); }); }); diff --git a/source/server/routes/scenes/post.ts b/source/server/routes/scenes/post.ts index bf3af93eb..1f1fe09df 100644 --- a/source/server/routes/scenes/post.ts +++ b/source/server/routes/scenes/post.ts @@ -1,172 +1,71 @@ -import fs from "fs/promises"; -import { createReadStream } from "fs"; +import { createWriteStream } from "fs"; import path from "path"; -import { text } from 'stream/consumers'; import { Request, Response } from "express"; -import yauzl, { Entry, ZipFile } from "yauzl"; -import { BadRequestError, HTTPError, UnauthorizedError } from "../../utils/errors.js"; -import { getMimeType } from "../../utils/filetypes.js"; -import { getVfs, getUser, isCreator, getUserManager } from "../../utils/locals.js"; -import uid, { Uid } from "../../utils/uid.js"; -import { once } from "events"; -import { Readable } from "stream"; -import { finished, pipeline } from "stream/promises"; -import { isUserAtLeast } from "../../auth/User.js"; +import { BadRequestError, UnauthorizedError } from "../../utils/errors.js"; +import { getFilename } from "../../utils/filetypes.js"; +import { getUser, getUserManager, getTaskScheduler } from "../../utils/locals.js"; + + +import { pipeline } from "stream/promises"; import { Dictionary } from "../../utils/schema/types.js"; +import { ImportSceneResult } from "../../tasks/handlers/uploads.js"; +import { extractScenesArchive } from "../../tasks/handlers/extractZip.js"; interface ImportResults { - fail:Dictionary; - ok:string[]; + fail: Dictionary; + ok: string[]; } - -export default async function postScenes(req :Request, res :Response){ - let vfs = getVfs(req); +export async function postRawZipfile(req: Request, res: Response) { const requester = getUser(req); - if (requester === null){ throw new UnauthorizedError("No identified user")} + if (requester === null) { throw new UnauthorizedError("No identified user") } let userManager = getUserManager(req); - - if(req.is("multipart") || req.is("application/x-www-form-urlencoded")){ - throw new BadRequestError(`Form data is not supported on this route. Provide a raw Zip attachment`); - } - - let file_name = uid(12)+".zip"; - let tmpfile = path.join(vfs.uploadsDir, file_name); - let results: ImportResults = {fail:{}, ok:[]}; - let zipError: Error; - let handle = await fs.open(tmpfile, "wx+"); - try{ - for await (let data of req){ - await handle.write(data); - } - } catch (e) { - await fs.rm(tmpfile, { force: true }).catch(e => { }); - throw e; - } - finally{ - await handle.close(); - } - await vfs.isolate(async (vfs)=>{ - let zip = await new Promise((resolve,reject)=>yauzl.open(tmpfile, {lazyEntries: true, autoClose: true}, (err, zip)=>(err?reject(err): resolve(zip)))); - const openZipEntry = (record:Entry)=> new Promise((resolve, reject)=>zip.openReadStream(record, (err, rs)=>(err?reject(err): resolve(rs)))); - - //Directory entries are optional in a zip file so we should handle their absence - let scenes = new Map>(); - - const onEntry = async (record :Entry) =>{ - const pathParts = record.fileName.split("/").filter(p=>!!p); - if(pathParts[0] == "scenes") pathParts.shift(); - if(pathParts.length === 0) return; //Skip "scenes/" - const scene = pathParts.shift(); - const name = pathParts.join("/"); - if(!record.fileName.endsWith("/")) pathParts.pop();//Drop the file name unless it's a directory + let taskScheduler = getTaskScheduler(req); - if(!scene){ - results.fail[`${record.fileName}`] = "not matching pattern"; - return - } - if(!scenes.has(scene)){ - //Create the scene - try{ - if (isUserAtLeast(requester, "create")) { - await vfs.createScene(scene, requester.uid); - results.ok.push(scene); - } - }catch(e){ - if((e as HTTPError).code != 409) throw e; - //409 == Scene already exist, it's OK. - } - scenes.set(scene, new Set()); - } - if ((Object.keys(results.fail) && !Object.keys(results.fail).includes(scene)) || (Object.keys(results.fail).length == 0)) { - if (!results.ok.includes(scene)) { - try { - let rights = await userManager.getAccessRights(scene, requester.uid); - if ((rights != "write" && rights != "admin") && requester.level != "admin") { - results.fail[scene] = "User does not have writting rights on the scene"; - throw new UnauthorizedError("User does not have writting rights on the scene"); - } else { - results.ok.push(scene); - } - } - catch (e) { - // If the scene is not found, the actual error is that the user cannot create it - if ((e as HTTPError).code == 404) { - results.fail[scene] = "User cannot create a scene"; - throw new UnauthorizedError("User cannot create a scene"); - } - else throw e; - } - } + let filename = getFilename(req.headers) as string; + if (!filename) throw new BadRequestError(`Can't detect file type from Content-Disposition or Content-Type headers`); + let size = parseInt(req.headers["content-length"]!); + if (!size || !Number.isInteger(size)) throw new BadRequestError(`Chunked encoding not supported for this request. Use upload tasks to transfer large files`); - if (!name) return; - let folders = scenes.get(scene)!; - let dirpath = ""; - while(pathParts.length){ - dirpath = path.join(dirpath, pathParts.shift()!); - if(folders.has(dirpath)) continue; - folders.add(dirpath); - try{ - await vfs.createFolder({scene, name: dirpath, user_id: requester.uid}); - results.ok.push(`${scene}/${dirpath}/`); - }catch(e){ - if((e as HTTPError).code != 409) throw e; - //409 == Folder already exist, it's OK. - } - } - - if(/\/$/.test(record.fileName)){ - // Is a directory. Do nothing, handled above. - }else if(name.endsWith(".svx.json")){ - let data = Buffer.alloc(record.uncompressedSize), size = 0; - let rs = await openZipEntry(record); - rs.on("data", (chunk)=>{ - chunk.copy(data, size); - size += chunk.length; - }); - await finished(rs); - await vfs.writeDoc(data, {scene, user_id: requester.uid, name, mime: "application/si-dpo-3d.document+json"}); - }else{ - //Add the file - let rs = await openZipEntry(record); - let mime = getMimeType(name); - if (mime.startsWith('text/')){ - await vfs.writeDoc(await text(rs), {user_id: requester.uid, scene, name, mime}); - } - else { - await vfs.writeFile(rs, {user_id: requester.uid, scene, name, mime}); - } - } - - results.ok.push(`${scene}/${name}`); - } - }; - - zip.on("entry", (record)=>{ - onEntry(record).then(()=>{ - zip.readEntry() - }, (e)=>{ - if ((e as HTTPError).code == 401) { // If the error is unauthorised, we keep checking the rest of the scenes - zip.readEntry(); - } - else { - zip.close(); - zipError=e; - } + const output = await taskScheduler.run({ + scene_id: null, + user_id: requester.uid, + type: "handlePostScene", + immediate: true, + handler: async function handlePostScene({ task, context: { vfs, logger } }): Promise { + const dir = await vfs.createTaskWorkspace(task.task_id); + //Ensure path sanitization, even though getFilename shouldn't yield absolute paths + const relPath = vfs.relative(path.join(dir, filename)); + const abs_filepath = vfs.absolute(relPath); + logger.log("Write upload file to :", relPath); + const ws = createWriteStream(abs_filepath); + await pipeline( + req, + ws, + ); + logger.log("file uploaded to :", relPath); + return await taskScheduler.run({ + scene_id: null, + user_id: requester.uid, + data: { fileLocation: relPath, size }, + handler: extractScenesArchive, }); - }); - zip.readEntry(); - await once(zip, "close"); - // If one or several files have raised unauthorised errors, we send the unauthorized http error after going through the whole zip to check for all errors - if (Object.keys(results.fail).length > 0 || zipError) { - res.status(zipError? ((zipError as HTTPError).code? (zipError as HTTPError).code : 500) : 401) - .send({failed_scenes: results.fail, message: zipError? zipError.message:""}); } - }).finally(() => fs.rm(tmpfile, { force: true })); + }); - if (Object.keys(results.fail).length == 0) { - res.status(200).send({ok : results.ok}); + res.status(200).send(output); +} + +export default async function postScenes(req: Request, res: Response) { + const requester = getUser(req); + if (requester === null) { throw new UnauthorizedError("No identified user") } + let userManager = getUserManager(req); + + if (req.is("multipart") || req.is("application/x-www-form-urlencoded")) { + throw new BadRequestError(`Form data is not supported on this route. Provide a raw Zip attachment`); } + + return await postRawZipfile(req, res); }; \ No newline at end of file diff --git a/source/server/routes/scenes/scene/files/get/document.ts b/source/server/routes/scenes/scene/files/get/document.ts index 65c865e56..eefddfa98 100644 --- a/source/server/routes/scenes/scene/files/get/document.ts +++ b/source/server/routes/scenes/scene/files/get/document.ts @@ -20,7 +20,7 @@ export default async function handleGetDocument(req :Request, res :Response){ } let data = Buffer.from(JSON.stringify(doc), "utf-8"); - let hash = createHash("sha256").update(data).digest("base64url"); + let hash = createHash("sha256").update(data as any).digest("base64url"); res.set("ETag", hash); diff --git a/source/server/routes/scenes/scene/files/get/file.ts b/source/server/routes/scenes/scene/files/get/file.ts index 097ac499a..027383478 100644 --- a/source/server/routes/scenes/scene/files/get/file.ts +++ b/source/server/routes/scenes/scene/files/get/file.ts @@ -2,7 +2,7 @@ import {pipeline} from "node:stream/promises"; import { Request, Response } from "express"; import { getVfs, getFileParams } from "../../../../../utils/locals.js"; -import { BadRequestError, RangeNotSatisfiable} from "../../../../../utils/errors.js"; +import { BadRequestError, RangeNotSatisfiableError} from "../../../../../utils/errors.js"; async function handleGetFileRange(req :Request, res :Response){ const vfs = getVfs(req); @@ -24,9 +24,9 @@ async function handleGetFileRange(req :Request, res :Response){ if (end && end > file.size){ res.set("Content-Range", "bytes */" + file.size); if(startRange.length > 0){ - throw new RangeNotSatisfiable("Range Not Satisfiable: end after end of file") + throw new RangeNotSatisfiableError("Range Not Satisfiable: end after end of file") }else{ - throw new RangeNotSatisfiable("Range Not Satisfiable: Suffix-length is bigger than lenght of file") + throw new RangeNotSatisfiableError("Range Not Satisfiable: Suffix-length is bigger than lenght of file") } } diff --git a/source/server/routes/scenes/scene/post.test.ts b/source/server/routes/scenes/scene/post.test.ts index 753c0c625..9da78aeab 100644 --- a/source/server/routes/scenes/scene/post.test.ts +++ b/source/server/routes/scenes/scene/post.test.ts @@ -43,7 +43,7 @@ describe("POST /scenes/:scene", function(){ await expect(vfs.getScenes()).to.eventually.have.property("length", 1); - await request(app).get("/scenes/foo/models/foo.glb") + await request(app).get("/scenes/foo/foo.glb") .auth(user.username, "12345678") .expect(200) .expect("Content-Type", "model/gltf-binary"); diff --git a/source/server/routes/scenes/scene/post.ts b/source/server/routes/scenes/scene/post.ts index ab872ac11..f3df00e9f 100644 --- a/source/server/routes/scenes/scene/post.ts +++ b/source/server/routes/scenes/scene/post.ts @@ -1,113 +1,84 @@ import { Request, Response } from "express"; -import { parse_glb } from "../../../utils/glTF.js"; -import { getVfs, getUserId } from "../../../utils/locals.js"; -import uid from "../../../utils/uid.js"; -import getDefaultDocument from "../../../utils/schema/default.js"; +import { getUserId, getLocals } from "../../../utils/locals.js"; import { BadRequestError, UnauthorizedError } from "../../../utils/errors.js"; +import { inspectGlb } from "../../../tasks/handlers/inspectGlb.js"; +import { createDocumentFromFiles } from "../../../tasks/handlers/createDocumentFromFiles.js"; +import { isSceneLanguage, SceneLanguage } from "../../../utils/languages.js"; -const sceneLanguages = ["EN", "ES", "DE", "NL", "JA", "FR", "HAW"] as const; -type SceneLanguage = typeof sceneLanguages[number]; -function isSceneLanguage(l:any) :l is SceneLanguage|undefined{ - return typeof l === "undefined" || sceneLanguages.indexOf(l.toUpperCase()) !== -1; -} -interface GetDocumentParams{ - scene :string; - filepath :string; - language?:SceneLanguage; -} - -/** - * Creates a new default document for a scene - * uses data embedded in the glb to fill the document where possible - * @param scene - * @param filepath - * @returns - */ -async function getDocument({scene, filepath, language}:GetDocumentParams){ - let orig = await getDefaultDocument(); - //dumb inefficient Deep copy because we want to mutate the doc in-place - let document = JSON.parse(JSON.stringify(orig)); - let meta = await parse_glb(filepath); - let mesh = meta.meshes[0]; //Take the first mesh for its name - document.nodes.push({ - "id": uid(), - "name": mesh?.name ?? scene, - "model": 0, - } as any); - document.scenes[0].nodes.push(document.nodes.length -1); - - if(language){ - document.setups[0].language = {language: language.toUpperCase()}; - } - - document.models = [{ - "units": "m", //glTF specification says it's always meters. It's what blender do. - "boundingBox": meta.bounds, - "derivatives":[{ - "usage": "Web3D", - "quality": "High", - "assets": [ - { - "uri": `models/${scene}.glb`, - "type": "Model", - "byteSize": meta.byteSize, - "numFaces": meta.meshes.reduce((acc, m)=> acc+m.numFaces, 0), - "imageSize": 8192 - } - ] - }], - "annotations":[], - }]; - document.metas = [{ - "collection": { - "titles": { - "EN": scene, - "FR": scene, - } - }, - }]; - document.scenes[document.scene].meta = 0; - - return document -} /** * Tries to create a scene. * has consistency problems : a scene could get created without its associated 3D object * Whether or not it's desired behaviour remains to be defined */ -export default async function postScene(req :Request, res :Response){ - let vfs = getVfs(req); +export default async function postScene(req: Request, res: Response) { + const { vfs, taskScheduler } = getLocals(req); let user_id = getUserId(req); - let {scene} = req.params; - let {language} = req.query; - - if(req.is("multipart")|| req.is("application/x-www-form-urlencoded")){ + let { scene } = req.params; + const { language: queryLanguage } = req.query; + if (req.is("multipart") || req.is("application/x-www-form-urlencoded")) { throw new BadRequestError(`${req.get("Content-Type")} content is not supported on this route. Provide a raw Zip attachment`); } - if(!isSceneLanguage(language)){ - throw new BadRequestError(`Invalid scene language requested: ${language}`) + if (queryLanguage && (typeof queryLanguage !== "string" || !isSceneLanguage(queryLanguage.toUpperCase()))) { + throw new BadRequestError(`Invalid scene language requested: ${queryLanguage}`) } - if(!user_id){ + const language = queryLanguage?.toUpperCase() as SceneLanguage | undefined; + if (!user_id) { throw new UnauthorizedError("Requires authenticated user"); } - + let scene_id = await vfs.createScene(scene, user_id); - try{ - let f = await vfs.writeFile(req, {user_id, scene: scene, mime:"model/gltf-binary", name: `models/${scene}.glb`}); - let document = await getDocument({ - scene, - filepath: vfs.filepath(f), + await taskScheduler.run({ + scene_id, + user_id, + type: "postScene", + data: { language, - }); - await vfs.writeDoc(JSON.stringify(document), {scene: scene_id, user_id: user_id, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - }catch(e){ + scene, + }, + handler: async function postsSceneHandler({ context: { logger, vfs } }) { + logger.debug("Draining the HTTP request into scene space"); + let f = await vfs.writeFile(req, { user_id, scene: scene, mime: "model/gltf-binary", name: `${scene}.glb` }); + if (f.size == 0 || !f.hash) throw new BadRequestError(`Body was empty. Can't create a scene.`); + logger.debug("Parse the created file"); + const meta = await taskScheduler.run({ + immediate: true, + data: { fileLocation: vfs.relative(vfs.getPath(f as { hash: string })) }, + handler: inspectGlb, + }); + + logger.debug(`Generate default document for model ${meta.name}`); + const document = await taskScheduler.run({ + immediate: true, + handler: createDocumentFromFiles, + data: { + scene: scene, + language: language, + models: [{ + uri: f.name, + quality: "High", + usage: "Web3D", + byteSize: f.size, + ...meta, + }], + } + }); + logger.debug(`Write scene document`); + await vfs.writeDoc(JSON.stringify(document), { + scene: scene_id, + user_id: user_id, + name: "scene.svx.json", + mime: "application/si-dpo-3d.document+json" + }); + } + }).catch(async e => { //If written, the file will stay as a loose object but will get cleaned-up later - await vfs.removeScene(scene_id).catch(e=>console.warn(e)); + await vfs.removeScene(scene_id).catch(e => console.warn(e)); throw e; - } - res.status(201).send({code: 201, message: "created scene with id :"+scene_id}); + }); + + res.status(201).send({ code: 201, message: "created scene with id :" + scene_id }); }; diff --git a/source/server/routes/services/opensearch.ts b/source/server/routes/services/opensearch.ts index 459afd73d..f16a9d9b2 100644 --- a/source/server/routes/services/opensearch.ts +++ b/source/server/routes/services/opensearch.ts @@ -31,7 +31,7 @@ export function renderOpenSearch(req:Request, res: Response):void{ })); let eTag = createHash("sha256"); - eTag.update(body); + eTag.update(body as any); res.set("Cache-Control", `max-age=${3600*24}, public`); res.set("ETag", "W/"+eTag.digest("base64url")); if(req.fresh){ diff --git a/source/server/routes/tasks/index.ts b/source/server/routes/tasks/index.ts new file mode 100644 index 000000000..a280392ad --- /dev/null +++ b/source/server/routes/tasks/index.ts @@ -0,0 +1,57 @@ +import { NextFunction, Request, Response, Router } from "express"; + +import wrap from "../../utils/wrapAsync.js"; + +import { canAdmin, canRead, getLocals, getUser, isCreator, isUser } from "../../utils/locals.js"; + +import { createUserTask } from "./post.js"; +import { putTaskArtifact } from "./task/artifacts/put.js"; +import bodyParser from "body-parser"; +import { getTaskArtifact } from "./task/artifacts/get.js"; +import { getTask } from "./task/get.js"; +import { deleteTask } from "./task/delete.js"; +import { getTaskTree } from "./task/tree/get.js"; +import { UnauthorizedError } from "../../utils/errors.js"; +import { AccessType, toAccessLevel } from "../../auth/UserManager.js"; + +const jsonParser = bodyParser.json(); + +const router = Router(); + +router.use("/", isUser); +router.post("/", isCreator, jsonParser, wrap(createUserTask)); + + +function taskAccess(name: AccessType) { + const minLevel = toAccessLevel(name); + return function taskAccessMiddleware(req: Request, res: Response, next: NextFunction) { + const { + vfs, + taskScheduler, + userManager, + } = getLocals(req); + const requester = getUser(req)!; + if (!requester) return next(new UnauthorizedError(`Route requires a valid user`)); + + const { id: idString } = req.params; + const id = parseInt(idString); + taskScheduler.getTask(id).then(async (task) => { + if (requester.level == "admin") return next(); //Even if requester is admin, check for task existence + else if (task.user_id && task.user_id == requester.uid) return next(); + else if (!task.scene_id || toAccessLevel(await userManager.getAccessRights(task.scene_id, requester.uid)) < minLevel) { + return next(new UnauthorizedError(`Administrative rights are required to delete tasks`)) + } else { + return next(); + } + }).catch(next); + } +} + + +router.get("/:id(\\d+)", isUser, wrap(getTask)); +router.get("/:id(\\d+)/tree", taskAccess("read"), wrap(getTaskTree)); +router.delete("/:id(\\d+)", taskAccess("admin"), wrap(deleteTask)); +router.put("/:id(\\d+)/artifact", taskAccess("admin"), wrap(putTaskArtifact)); +router.get("/:id(\\d+)/artifact", taskAccess("read"), wrap(getTaskArtifact)); + +export default router; diff --git a/source/server/routes/tasks/post.ts b/source/server/routes/tasks/post.ts new file mode 100644 index 000000000..f10b42e3f --- /dev/null +++ b/source/server/routes/tasks/post.ts @@ -0,0 +1,49 @@ +import { Request, Response } from "express"; +import { getTaskScheduler, getUser, getVfs } from "../../utils/locals.js"; +import { BadRequestError } from "../../utils/errors.js"; + +import * as handlers from "../../tasks/handlers/index.js"; + +function isUserTaskType(t: any): t is keyof typeof handlers{ + return typeof t === "string" && typeof (handlers as any)[t] === "function"; +} + + +/** + * + */ +export async function createUserTask(req: Request, res: Response){ + const vfs = getVfs(req); + const taskScheduler = getTaskScheduler(req); + const requester = getUser(req)!; + let {type, data, status = "pending"} = req.body; + if(!type || typeof type !=="string"){ + throw new BadRequestError(`No task type provided`); + }else if(!isUserTaskType(type) ){ + throw new BadRequestError(`Unsupported task type: ${type}`); + }else if(["initializing", "pending"].indexOf(status)=== -1){ + throw new BadRequestError(`Invalid task status: ${status}`); + } + //We perform **NO** data validation here, which might be a security hole + //especially if task handlers can't be relied-upon to check their own data + let task = await taskScheduler.create({ + scene_id:null, + user_id:requester.uid, + type, + data, + status, + }); + + + if(status == "pending"){ + await taskScheduler.runTask({task, handler: handlers[type] as any}); + //We could just refresh task.output from the result of runTask and task.status + //But it's safer to just fetch the whole task again + task = await taskScheduler.getTask(task.task_id); + }else{ + //Create the workspace immediately + await vfs.createTaskWorkspace(task.task_id); + } + + res.status(201).send(task); +} diff --git a/source/server/routes/tasks/task/artifacts/get.ts b/source/server/routes/tasks/task/artifacts/get.ts new file mode 100644 index 000000000..035a7f1e4 --- /dev/null +++ b/source/server/routes/tasks/task/artifacts/get.ts @@ -0,0 +1,29 @@ +import { Request, Response } from "express"; +import { getVfs, getUser, getTaskScheduler } from "../../../../utils/locals.js"; +import { MethodNotAllowedError, NotImplementedError, UnauthorizedError } from "../../../../utils/errors.js"; + +import { isUploadTask } from "./put.js"; +import path from "node:path"; +import { UploadHandlerParams } from "../../../../tasks/handlers/uploads.js"; +import { isArtifactTask } from "../../../../tasks/types.js"; + + + +export async function getTaskArtifact(req: Request, res: Response){ + const vfs = getVfs(req); + const taskScheduler = getTaskScheduler(req); + const requester = getUser(req)!; + const {id:idString} = req.params; + const id = parseInt(idString); + const task = await taskScheduler.getTask(id); + if(task.user_id !== requester.uid){ + throw new UnauthorizedError(`This task does not belong to this user`); + } + if(task.status != 'success'){ + throw new MethodNotAllowedError(`Task status is ${task.status}. GET is only allowed on tasks that have status = "success"`); + } + if(!isArtifactTask(task.output)){ + throw new NotImplementedError(`Artifacts download not supported for task type ${task.type}`); + } + res.sendFile(task.output.fileLocation, {root: vfs.baseDir}); +} \ No newline at end of file diff --git a/source/server/routes/tasks/task/artifacts/put.test.ts b/source/server/routes/tasks/task/artifacts/put.test.ts new file mode 100644 index 000000000..8a39cd80c --- /dev/null +++ b/source/server/routes/tasks/task/artifacts/put.test.ts @@ -0,0 +1,131 @@ +import fs from "fs/promises"; +import request from "supertest"; + +import User from "../../../../auth/User.js"; +import UserManager from "../../../../auth/UserManager.js"; +import Vfs from "../../../../vfs/index.js"; +import { randomBytes, randomInt } from "node:crypto"; +import path from "node:path"; +import { fixturesDir } from "../../../../__test_fixtures/fixtures.js"; + + + + + +describe("PUT /tasks/:id/artifact", function(){ + let vfs :Vfs, userManager :UserManager, user :User, admin :User; + let filename: string, size: number, task: number; + + this.beforeAll(async function(){ + let locals = await createIntegrationContext(this); + vfs = locals.vfs; + userManager = locals.userManager; + user = await userManager.addUser("bob", "12345678"); + admin = await userManager.addUser("alice", "12345678", "admin"); + }); + + this.afterAll(async function(){ + await cleanIntegrationContext(this); + }); + + this.beforeEach(async function(){ + //Create a task for each test + filename = randomBytes(4).toString("hex")+".bin"; + size = randomInt(16, 512); + const {body} = await request(this.server).post(`/tasks`) + .auth("alice", "12345678") + .set("Content-Type", "application/json") + .send({ + type: "parseUserUpload", + data: {filename, size}, + status: "initializing", + }) + .expect(201); + expect(body).to.have.property("task_id").a("number"); + task = body.task_id; + }); + + it("Can handle a single-chunk upload (no headers)", async function(){ + const data = randomBytes(size); + await request(this.server).put(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .send(data) + .expect(201); + + console.log("GET to :", `/tasks/${task}/artifact`) + const {body} = await request(this.server).get(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .expect(200); + + expect(body.toString("hex")).to.equal(data.toString("hex")); + }); + + it("Can handle a single-chunk upload (chunk headers)", async function(){ + const data = randomBytes(size); + await request(this.server).put(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .set("Content-Length", size.toString()) + .set("Content-Range", `bytes 0-${size-1 /*end is inclusive*/}/${size}`) + .send(data) + .expect(201); + + const {body} = await request(this.server).get(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .expect(200); + + expect(body.toString("hex")).to.equal(data.toString("hex")); + }); + + it("Can handle a multi-chunk upload", async function(){ + const data = randomBytes(size); + + let offset = 0; + let chunkSize = Math.floor(size / 4); + while(offset < size){ + const len = Math.min(size - offset, chunkSize); + await request(this.server).put(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .set("Content-Length", len.toString()) + .set("Content-Range", `bytes ${offset}-${offset+len-1 /*end is inclusive*/}/${size}`) + .send(data.subarray(offset, offset + len)) + .expect(offset + len < size? 206 : 201); + offset += len; + } + + const {body} = await request(this.server).get(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .expect(200); + + expect(body.toString("hex")).to.equal(data.toString("hex")); + }); + + it("can parse uploaded contents (simple)", async function(){ + const data = randomBytes(size); + await request(this.server).put(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .send(data) + .expect(201); + + const {body} = await request(this.server).get(`/tasks/${task}`) + .auth("alice", "12345678") + .expect(200); + + expect(body).to.have.property("task").to.have.property("status", "success"); + expect(body).to.have.property("task").to.have.property("output").to.deep.equal({ + fileLocation: `artifacts/${task}/${filename}`, + isModel: false, + mime: "application/octet-stream", + }); + }); + + it("rejects out-of-order bytes", async function(){ + const data = randomBytes(6); + const res = await request(this.server).put(`/tasks/${task}/artifact`) + .auth("alice", "12345678") + .set("Content-Length", '6') + .set("Content-Range", `bytes 10-15/${size}`) + .send(data) + .expect(416); + expect(res.body).to.have.property("message", `Error: [416] Missing bytes 0-10`); + }); +}) \ No newline at end of file diff --git a/source/server/routes/tasks/task/artifacts/put.ts b/source/server/routes/tasks/task/artifacts/put.ts new file mode 100644 index 000000000..266fac41c --- /dev/null +++ b/source/server/routes/tasks/task/artifacts/put.ts @@ -0,0 +1,135 @@ +import { Request, Response } from "express"; +import parseRange from "range-parser"; +import path from "node:path"; +import { stat } from "node:fs/promises"; +import { getVfs, getUser, getTaskScheduler } from "../../../../utils/locals.js"; +import { BadRequestError, LengthRequiredError, RangeNotSatisfiableError, UnauthorizedError } from "../../../../utils/errors.js"; +import { TaskDefinition } from "../../../../tasks/types.js"; +import { createWriteStream } from "node:fs"; +import { pipeline } from "node:stream/promises"; +import { parseUserUpload, UploadHandlerParams, ParsedUserUpload } from "../../../../tasks/handlers/uploads.js"; + +export function isUploadTask(t:TaskDefinition) : t is TaskDefinition{ + return t.type === parseUserUpload.name; +} + +/** + * File upload handler + * Files can be sent in chunks. Uses `Content-Range` for the client to communicate chunk position + * and responds with a `Range` header to confirm current state. + * + * @see {@link https://docs.cloud.google.com/storage/docs/performing-resumable-uploads?hl=fr#chunked-upload Google Cloud Storage: Chunked upload} for a similar feature + */ +export async function putTaskArtifact(req: Request, res: Response){ + const vfs = getVfs(req); + const taskScheduler = getTaskScheduler(req); + const requester = getUser(req)!; + const {id:idString} = req.params; + const id = parseInt(idString); + const task = await taskScheduler.getTask(id); + if(task.user_id !== requester.uid){ + throw new UnauthorizedError(`This task does not belong to this user`); + } + if(!isUploadTask(task)){ + throw new BadRequestError(`Task ${id} is not a user upload task`); + }else if(!(typeof task.data?.filename === "string" && typeof task.data?.size === "number")){ + throw new BadRequestError(`Invalid task data: ${typeof task.data} ${JSON.stringify(task.data)}`); + } + + const {filename, size: filesize} = task.data; + const contentRange = req.get("Content-Range"); + + const contentLength = parseInt(req.get("Content-Length")!); + if(!contentLength || !Number.isInteger(contentLength)) throw new LengthRequiredError(`A valid Content-Length header must be provided`); + + if(task.status !== "initializing"){ + throw new BadRequestError(`Task ${id} is in state ${task.status}, which does not allow further data to be sent`); + } + + + //Call this once the upload has completed + async function processUpload(){ + await taskScheduler.runTask({ task, immediate: true, handler: parseUserUpload as any }); + } + + const filepath = path.join(vfs.getTaskWorkspace(task.task_id), filename); + + if(!contentRange){ + const ws = createWriteStream(filepath) + await pipeline( + req, + ws + ); + await processUpload(); + res.status(201); + return res.format({ + "text/plain": ()=>{ + res.send("Created") + }, + "application/json": ()=>{ + res.send({code: 201, message: "Created"}) + } + }); + } + const ranges = parseRange(filesize, contentRange.replace("bytes ", "bytes=")); + if(ranges == -1 ) throw new RangeNotSatisfiableError(); + else if(ranges == -2 || !Array.isArray(ranges) || ranges.length != 1 || ranges.type !== "bytes") throw new BadRequestError(`Malformed range header`); + + const { start, end} = ranges[0]; + + + if(contentLength !== end - start + 1) throw new BadRequestError(`a Content-Length of ${contentLength} can't satisfy a range of ${end - start +1}`); + + // @fixme check current file size + if(start !== 0){ + const {size: currentSize} = await stat(filepath).catch(e=>{if(e.code !=="ENOENT") throw e; return {size: 0}}); + if(currentSize < start){ + //Slightly non-standard use of the Range header to communicate what is the current file size + res.set("Range", `bytes=0-${currentSize}/${filesize}`); + throw new RangeNotSatisfiableError(`Missing bytes ${currentSize}-${start}`); + } + } + const ws = createWriteStream(filepath, {flags: start == 0? 'w':'r+', start}); + await pipeline( + req, + async function* rangeLength(source){ + let len = 0; + for await (const chunk of source){ + len += chunk.length; + if(contentLength < len) throw new BadRequestError(`Expected a content length of ${contentLength}. Received ${len} bytes.`); + yield chunk; + } + if(len < contentLength){ + throw new BadRequestError(`Expected a content length of ${contentLength}. Received ${len} bytes.`); + } + }, + ws + ); + + res.set("Range", `bytes=0-${end}/${filesize}`); + + if(end == filesize - 1){ + //Upload is complete + await processUpload(); + res.status(201); + res.format({ + "text/plain": ()=>{ + res.send("Created") + }, + "application/json": ()=>{ + res.send({code: 201, message: "Created"}) + } + }); + }else{ + //Partial Content + res.status(206); + res.format({ + "text/plain": ()=>{ + res.send("Partial Content") + }, + "application/json": ()=>{ + res.send({code: 206, message: "Partial Content"}) + } + }); + } +} diff --git a/source/server/routes/tasks/task/delete.test.ts b/source/server/routes/tasks/task/delete.test.ts new file mode 100644 index 000000000..65f075c5d --- /dev/null +++ b/source/server/routes/tasks/task/delete.test.ts @@ -0,0 +1,124 @@ +import request from "supertest"; + +import User from "../../../auth/User.js"; +import UserManager from "../../../auth/UserManager.js"; +import Vfs from "../../../vfs/index.js"; +import { TaskScheduler } from "../../../tasks/scheduler.js"; + + +describe("DELETE /tasks/:id", function(){ + let vfs :Vfs, userManager :UserManager, taskScheduler :TaskScheduler; + let user :User, admin :User, other :User; + + this.beforeAll(async function(){ + let locals = await createIntegrationContext(this); + vfs = locals.vfs; + userManager = locals.userManager; + taskScheduler = locals.taskScheduler; + user = await userManager.addUser("bob", "12345678"); + admin = await userManager.addUser("alice", "12345678", "admin"); + other = await userManager.addUser("charlie", "12345678"); + }); + + this.afterAll(async function(){ + await cleanIntegrationContext(this); + }); + + it("requires authentication", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .expect(401); + }); + + it("deletes a task and returns 204", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(204); + + // Verify the task is actually gone + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(404); + }); + + it("returns 404 for non-existent task", async function(){ + await request(this.server).delete(`/tasks/999999`) + .auth("bob", "12345678") + .expect(404); + }); + + describe("permissions", function(){ + + it("task owner can delete their own task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(204); + }); + + it("admin can delete any task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("alice", "12345678") + .expect(204); + }); + + it("unrelated user cannot delete another user's task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("charlie", "12345678") + .expect(401); + }); + + describe("scene-based access", function(){ + let scene_id :number; + + this.beforeAll(async function(){ + scene_id = await vfs.createScene("task-delete-test"); + await request(this.server).patch(`/scenes/task-delete-test`) + .auth("alice", "12345678") + .send({default_access: "none", public_access: "none"}) + .expect(200); + }); + + it("user with admin access to the scene can delete the task", async function(){ + await userManager.grant(scene_id, other.uid, "admin"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("charlie", "12345678") + .expect(204); + }); + + it("user with only read access to the scene cannot delete the task", async function(){ + await userManager.grant(scene_id, other.uid, "read"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + const res = await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("charlie", "12345678"); + expect(res.status).to.be.oneOf([401, 404]); + }); + + it("user without scene access cannot delete the task", async function(){ + await userManager.grant(scene_id, other.uid, "none"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + const res = await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("charlie", "12345678"); + expect(res.status).to.be.oneOf([401, 404]); + }); + }); + + it("rejects user for a task with no owner and no scene", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: null, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(401); + }); + + it("admin can delete a task with no owner and no scene", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: null, type: "test", data: {}}); + await request(this.server).delete(`/tasks/${task.task_id}`) + .auth("alice", "12345678") + .expect(204); + }); + }); +}); diff --git a/source/server/routes/tasks/task/delete.ts b/source/server/routes/tasks/task/delete.ts new file mode 100644 index 000000000..d73a15a10 --- /dev/null +++ b/source/server/routes/tasks/task/delete.ts @@ -0,0 +1,30 @@ +import { rm } from "node:fs/promises"; + +import { Request, Response } from "express"; +import { getUser, getLocals } from "../../../utils/locals.js"; +import { UnauthorizedError } from "../../../utils/errors.js"; + + +export async function deleteTask(req: Request, res: Response){ + const { + vfs, + taskScheduler, + userManager, + } = getLocals(req); + const requester = getUser(req)!; + const {id:idString} = req.params; + const id = parseInt(idString); + const task = await taskScheduler.getTask(id); + + if(requester.level !== "admin" + && task.user_id !== requester.uid + && (!task.scene_id + || await userManager.getAccessRights(task.scene_id, requester.uid)) != "admin" + ){ + throw new UnauthorizedError(`Administrative rights are required to delete tasks`); + } + + await taskScheduler.deleteTask(id); + await rm(vfs.getTaskWorkspace(id), {force: true, recursive: true}); + res.status(204).send(); +} diff --git a/source/server/routes/tasks/task/get.test.ts b/source/server/routes/tasks/task/get.test.ts new file mode 100644 index 000000000..bd236e35f --- /dev/null +++ b/source/server/routes/tasks/task/get.test.ts @@ -0,0 +1,117 @@ +import request from "supertest"; + +import User from "../../../auth/User.js"; +import UserManager from "../../../auth/UserManager.js"; +import Vfs from "../../../vfs/index.js"; +import { TaskScheduler } from "../../../tasks/scheduler.js"; + + +describe("GET /tasks/:id", function(){ + let vfs :Vfs, userManager :UserManager, taskScheduler :TaskScheduler; + let user :User, admin :User, other :User; + + this.beforeAll(async function(){ + let locals = await createIntegrationContext(this); + vfs = locals.vfs; + userManager = locals.userManager; + taskScheduler = locals.taskScheduler; + user = await userManager.addUser("bob", "12345678"); + admin = await userManager.addUser("alice", "12345678", "admin"); + other = await userManager.addUser("charlie", "12345678"); + }); + + this.afterAll(async function(){ + await cleanIntegrationContext(this); + }); + + it("requires authentication", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .expect(401); + }); + + it("returns task with logs", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + const {body} = await request(this.server).get(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(200); + expect(body).to.have.property("task"); + expect(body).to.have.property("logs").to.be.an("array"); + expect(body.task).to.have.property("task_id", task.task_id); + expect(body.task).to.have.property("type", "test"); + }); + + it("returns 404 for non-existent task", async function(){ + await request(this.server).get(`/tasks/999999`) + .auth("bob", "12345678") + .expect(404); + }); + + describe("permissions", function(){ + + it("task owner can access their own task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(200); + }); + + it("admin can access any task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("alice", "12345678") + .expect(200); + }); + + it("unrelated user cannot access another user's task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("charlie", "12345678") + .expect(401); + }); + + describe("scene-based access", function(){ + let scene_id :number; + + this.beforeAll(async function(){ + scene_id = await vfs.createScene("task-get-test"); + // Make the scene private so default access doesn't grant read + await request(this.server).patch(`/scenes/task-get-test`) + .auth("alice", "12345678") + .send({default_access: "none", public_access: "none"}) + .expect(200); + }); + + it("user with read access to the scene can access the task", async function(){ + await userManager.grant(scene_id, other.uid, "read"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("charlie", "12345678") + .expect(200); + }); + + it("user without scene access cannot access the task", async function(){ + await userManager.grant(scene_id, other.uid, "none"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + const res = await request(this.server).get(`/tasks/${task.task_id}`) + .auth("charlie", "12345678"); + // Access denial may be obfuscated as 404 + expect(res.status).to.be.oneOf([401, 404]); + }); + }); + + it("rejects user for a task with no owner and no scene", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: null, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("bob", "12345678") + .expect(401); + }); + + it("admin can access a task with no owner and no scene", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: null, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}`) + .auth("alice", "12345678") + .expect(200); + }); + }); +}); diff --git a/source/server/routes/tasks/task/get.ts b/source/server/routes/tasks/task/get.ts new file mode 100644 index 000000000..56756adc1 --- /dev/null +++ b/source/server/routes/tasks/task/get.ts @@ -0,0 +1,34 @@ +import { Request, Response } from "express"; +import { UnauthorizedError } from "../../../utils/errors.js"; +import { getLocals, getUser } from "../../../utils/locals.js"; +import { toAccessLevel } from "../../../auth/UserManager.js"; +import { TaskDefinition, TaskDataPayload, TaskLogEntry } from "../../../tasks/types.js"; + +export interface TaskResponse { + task: TaskDefinition; + logs: TaskLogEntry[]; +} + + +export async function getTask(req: Request, res: Response) { + const { + vfs, + taskScheduler, + userManager, + } = getLocals(req); + const requester = getUser(req)!; + const { id: idString } = req.params; + const id = parseInt(idString); + let task = await taskScheduler.getTask(id); + + if (requester.level !== "admin" + && task.user_id !== requester.uid + && (!task.scene_id + || toAccessLevel(await userManager.getAccessRights(task.scene_id, requester.uid)) < toAccessLevel("read") + ) + ) { + throw new UnauthorizedError(`Read rights are required to access this task`); + } + const logs = await taskScheduler.getLogs(id); + res.status(200).send({ task, logs }); +} \ No newline at end of file diff --git a/source/server/routes/tasks/task/tree/get.test.ts b/source/server/routes/tasks/task/tree/get.test.ts new file mode 100644 index 000000000..4ac8a9fb8 --- /dev/null +++ b/source/server/routes/tasks/task/tree/get.test.ts @@ -0,0 +1,117 @@ +import request from "supertest"; + +import User from "../../../../auth/User.js"; +import UserManager from "../../../../auth/UserManager.js"; +import Vfs from "../../../../vfs/index.js"; +import { TaskScheduler } from "../../../../tasks/scheduler.js"; + + +describe("GET /tasks/:id/tree", function(){ + let vfs :Vfs, userManager :UserManager, taskScheduler :TaskScheduler; + let user :User, admin :User, other :User; + + this.beforeAll(async function(){ + let locals = await createIntegrationContext(this); + vfs = locals.vfs; + userManager = locals.userManager; + taskScheduler = locals.taskScheduler; + user = await userManager.addUser("bob", "12345678"); + admin = await userManager.addUser("alice", "12345678", "admin"); + other = await userManager.addUser("charlie", "12345678"); + }); + + this.afterAll(async function(){ + await cleanIntegrationContext(this); + }); + + it("requires authentication", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .expect(401); + }); + + it("returns task tree with logs", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + const {body} = await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("bob", "12345678") + .expect(200); + expect(body).to.have.property("task"); + expect(body).to.have.property("logs").to.be.an("array"); + expect(body.task).to.have.property("task_id", task.task_id); + expect(body.task).to.have.property("children").to.be.an("array"); + }); + + it("returns 404 for non-existent task", async function(){ + await request(this.server).get(`/tasks/999999/tree`) + .auth("bob", "12345678") + .expect(404); + }); + + describe("permissions", function(){ + + it("task owner can access their own task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("bob", "12345678") + .expect(200); + }); + + it("admin can access any task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("alice", "12345678") + .expect(200); + }); + + it("unrelated user cannot access another user's task", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: user.uid, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("charlie", "12345678") + .expect(401); + }); + + describe("scene-based access", function(){ + let scene_id :number; + + this.beforeAll(async function(){ + scene_id = await vfs.createScene("task-tree-test"); + // Make the scene private so default access doesn't grant read + await request(this.server).patch(`/scenes/task-tree-test`) + .auth("alice", "12345678") + .send({default_access: "none", public_access: "none"}) + .expect(200); + }); + + it("user with read access to the scene can access the task tree", async function(){ + await userManager.grant(scene_id, other.uid, "read"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("charlie", "12345678") + .expect(200); + }); + + it("user without scene access cannot access the task tree", async function(){ + await userManager.grant(scene_id, other.uid, "none"); + const task = await taskScheduler.create({scene_id, user_id: null, type: "test", data: {}}); + const res = await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("charlie", "12345678"); + // Access denial may be obfuscated as 404 + expect(res.status).to.be.oneOf([401, 404]); + }); + }); + + it("rejects user for a task with no owner and no scene", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: null, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("bob", "12345678") + .expect(401); + }); + + it("admin can access a task with no owner and no scene", async function(){ + const task = await taskScheduler.create({scene_id: null, user_id: null, type: "test", data: {}}); + await request(this.server).get(`/tasks/${task.task_id}/tree`) + .auth("alice", "12345678") + .expect(200); + }); + }); +}); diff --git a/source/server/routes/tasks/task/tree/get.ts b/source/server/routes/tasks/task/tree/get.ts new file mode 100644 index 000000000..3c1fac80f --- /dev/null +++ b/source/server/routes/tasks/task/tree/get.ts @@ -0,0 +1,33 @@ +import { Request, Response } from "express"; +import { UnauthorizedError } from "../../../../utils/errors.js"; +import { getLocals, getUser } from "../../../../utils/locals.js"; +import { toAccessLevel } from "../../../../auth/UserManager.js"; +import { TaskDataPayload, TaskLogEntry, TaskNode } from "../../../../tasks/types.js"; + +export interface TaskTreeResponse { + task: TaskNode; + logs: TaskLogEntry[]; +} + + +export async function getTaskTree(req: Request, res: Response) { + const { + taskScheduler, + userManager, + } = getLocals(req); + const requester = getUser(req)!; + const { id: idString } = req.params; + const id = parseInt(idString); + + const { root, logs } = await taskScheduler.getTaskTree(id); + + if (requester.level !== "admin" + && root.user_id !== requester.uid + && (!root.scene_id + || toAccessLevel(await userManager.getAccessRights(root.scene_id, requester.uid)) < toAccessLevel("read") + ) + ) { + throw new UnauthorizedError(`Read rights are required to access task trees`); + } + res.status(200).send({ task: root, logs }); +} diff --git a/source/server/routes/views/index.ts b/source/server/routes/views/index.ts index 8b5cee869..36ea11388 100644 --- a/source/server/routes/views/index.ts +++ b/source/server/routes/views/index.ts @@ -1,14 +1,17 @@ import { Router, Request, Response, NextFunction } from "express"; -import { canRead, getHost, canWrite, getSession, getVfs, getUser, isAdministrator, getUserManager, isMemberOrManage, isManage, isEmbed, useTemplateProperties } from "../../utils/locals.js"; +import { canRead, getHost, canWrite, getSession, getVfs, getUser, isAdministrator, getUserManager, isMemberOrManage, isManage, isEmbed, useTemplateProperties, getTaskScheduler, getLocals, isUser, isCreator } from "../../utils/locals.js"; import wrap from "../../utils/wrapAsync.js"; import path from "path"; import { Scene } from "../../vfs/types.js"; import ScenesVfs from "../../vfs/Scenes.js"; import { qsToBool, qsToInt } from "../../utils/query.js"; import { isUserAtLeast, UserRoles } from "../../auth/User.js"; -import { BadRequestError } from "../../utils/errors.js"; +import { BadRequestError, ForbiddenError } from "../../utils/errors.js"; +import { debuglog } from "util"; +const debug = debuglog("http:views"); + function mapScene(req :Request, {thumb, name, ...s}:Scene):Scene{ if(thumb){ @@ -67,11 +70,68 @@ routes.get("/", wrap(async (req, res)=>{ }); })); -routes.get("/upload", (req, res)=>{ + + + + +routes.get("/upload", wrap(async (req, res)=>{ + const {templates}= getLocals(req); + const requester = getUser(req); + if(!isUserAtLeast(requester!, "create")){ + return res.status(401).render("error", { + error: { + message: templates.t(requester? "errors.requireCreate": "errors.requireUser", {lng: res.locals.lang, what: "/ui/upload"}) + }, + }); + } + const taskScheduler = getTaskScheduler(req); + const vfs = getVfs(req); + const {task} = req.query; + //Maybe we shouldn't fail on bad parameters and redirect to a blank page or just ignore them + const ids = [task].flat().filter(t=>typeof t === "string").map(t=>parseInt(t as string)); + if(ids.findIndex(t=>!Number.isInteger(t)) != -1){ + throw new BadRequestError(`Invalid list of tasks :${ids.join(", ")}`); + } + debug("Render previous upload tasks : ", ids); + let tasks = await Promise.all(ids.map(id=> taskScheduler.getTask(id))); + let scenes: Array<{name: string, action: "create"|"update"}|{error: string, action: "error"}> = []; + for(let task of tasks){ + if(!requester || task.user_id !== requester.uid && requester.level != "admin"){ + scenes.push({error: `Can't access results of task ${task.type}#${task.task_id}`, action: "error"}); + continue; + } + if(task.status !== "success"){ + console.warn(`Can't report on task ${task.type}#${task.task_id}: status is ${task.status}`); + scenes.push({error: `Task ${task.type}#${task.task_id} [${task.status}]${task.output?.message? " "+task.output.message: ""}`, action: "error"}); + }else if(task.type === "createSceneFromFiles"){ + if(typeof task.output !== "number"){ + console.warn("Unexpected output for %s :", task.type, task.output); + scenes.push({error: `Unexpected output for ${task.type}`, action: "error"}); + continue; + } + const scene = await vfs.getScene(task.output); + scenes.push({name: scene.name, action: "create"}); + }else if(task.type === "extractScenesArchives"){ + if(!Array.isArray(task.output)){ + console.warn("Unexpected output for %s :", task.type, task.output); + scenes.push({error: `Unexpected output for ${task.type}`, action: "error"}); + continue; + } + for(let {action, name } of task.output){ + scenes.push({action, name}); + } + }else{ + console.warn("Unsupported task type: %s. not an upload task?", task.type); + scenes.push({error: `Unexpected task type: ${task.type} for task #${task.task_id}`, action: "error"}); + continue; + } + } + res.render("upload", { title: "eCorpus: Create new scene", + scenes, }); -}) +})) routes.get("/tags", wrap(async (req, res)=>{ const vfs = getVfs(req); @@ -468,6 +528,74 @@ routes.get("/standalone", (req, res)=>{ }); +routes.get("/tasks", wrap(async (req, res) => { + const user = getUser(req); + if (!user || user.level === "none") { + throw new ForbiddenError("You must be logged in to view your tasks"); + } + const taskScheduler = getTaskScheduler(req); + // Parse and validate query params + const rawOwner = typeof req.query.owner === 'string' ? req.query.owner : undefined; + const rawType = typeof req.query.type === 'string' ? req.query.type : undefined; + const rawStatus = typeof req.query.status === 'string' ? req.query.status : undefined; + + // owner: 'mine'|'all' (only admins may request 'all') + const owner = rawOwner === 'all' ? 'all' : 'mine'; + if (owner === 'all' && user.level !== 'admin') { + throw new ForbiddenError("Only administrators can list all users' tasks"); + } + + // status: 'all'|'success'|'error' + const allowedStatus = ['all', 'success', 'error']; + const status = allowedStatus.includes(rawStatus ?? '') ? (rawStatus as 'all'|'success'|'error') : 'all'; + if(rawStatus && status !== rawStatus){ + throw new BadRequestError(`Invalid status requested : ${rawStatus}`); + } + // type: optional string, limit length to avoid abuse + let type: string | undefined = undefined; + if (rawType) { + if (rawType.length > 200) throw new BadRequestError("type parameter too long"); + type = rawType; + } + + // rootOnly: optional boolean flag controlling whether only root tasks are returned. + const rootOnly = qsToBool(req.query.rootOnly) ?? true; + + const userId = owner === 'mine' ? user.uid : undefined; + + const tasks = await taskScheduler.getTasks({ user_id: userId, type, status, rootOnly }); + res.render("tasks", { + title: "My tasks", + tasks, + params: { owner, type, status, rootOnly }, + }); +})); + +routes.get("/tasks/:id(\\d+)", wrap(async (req, res) => { + const { + taskScheduler, + userManager, + vfs, + requester + } = getLocals(req); + const id = parseInt(req.params.id); + const validLevels = ["debug", "log", "warn", "error"] as const; + type Level = typeof validLevels[number]; + const rawLevel = req.query.level as string | undefined; + const level: Level = (validLevels as readonly string[]).includes(rawLevel ?? "") ? rawLevel as Level : "log"; + const {root, logs} = await taskScheduler.getTaskTree(id, {level}); + + const owner = root.user_id? (root.user_id == requester?.uid ?requester.username :(await userManager.getUserById(root.user_id)).username):null; + const scene = root.scene_id? (await vfs.getScene(root.scene_id)).name : null; + res.render("task", { + title: `Task #${id} — ${root.type}`, + root, + logs, + level, + owner, + scene, + }); +})); export default routes; \ No newline at end of file diff --git a/source/server/scripts/obj2gltf.py b/source/server/scripts/obj2gltf.py new file mode 100644 index 000000000..c51e01c12 --- /dev/null +++ b/source/server/scripts/obj2gltf.py @@ -0,0 +1,93 @@ +from os import path +from contextlib import redirect_stdout +from sys import argv,stderr +import argparse +import io +import bpy + + +def fail(msg): + print(msg, file=stderr) + exit(1) + +def clean(): + bpy.ops.object.select_all(action='SELECT') + bpy.ops.object.delete() + if len(bpy.data.objects) != 0: + print('Error deleting Blender scene objects', file=stderr) + exit(1) + +def file_name(filepath): + return path.split(filepath)[1] + +def dir_path(filepath): + return path.split(filepath)[0] + +def file_suffix(filepath): + return path.splitext(file_name(filepath))[1] + +def import_func_wrapper(func, filepath): + func(filepath=filepath) + +def import_mesh(filepath): + import_func = { + '.obj': bpy.ops.wm.obj_import, + '.ply': bpy.ops.wm.ply_import, + '.stl': bpy.ops.wm.stl_import, + '.fbx': bpy.ops.import_scene.fbx, + '.blend': bpy.ops.wm.open_mainfile, + } + + stdout = io.StringIO() + with redirect_stdout(stdout): + import_func_wrapper(import_func[file_suffix(filepath)], filepath=filepath) + stdout.seek(0) + return stdout.read() + +if "--" not in argv: + argv = [] # as if no args are passed +else: + argv = argv[argv.index("--") + 1:] +parser = argparse.ArgumentParser(description='Blender mesh file to GLB conversion tool') +parser.add_argument('-i', '--input', help='mesh file to be converted') +parser.add_argument('-o', '--output', help='output GLB file') +parser.add_argument('--backface', action='store_true') +args = parser.parse_args(argv) + +if not (args.input and args.output): + fail('Command line arguments not supplied or inappropriate') + + +clean() + +try: + + stdout = import_mesh(args.input) + print("Imported source file. Starting conversion") + if len(bpy.data.objects) == 0: + # likely invalid file error, not an easy way to capture this from Blender + fail(stdout.replace("\n", "; ")) + + + for obj in bpy.data.objects: + if type(obj.data) != bpy.types.Mesh: + continue + bpy.context.view_layer.objects.active = obj + mesh = obj.data + for f in mesh.polygons: + f.use_smooth = True + mesh.shade_smooth() + + # Disable backface culling + for eachMat in bpy.data.materials: + eachMat.use_backface_culling = args.backface + # Compress later with gltf-transform + bpy.ops.export_scene.gltf( + filepath=args.output, + export_draco_mesh_compression_enable=False + ) + +except Exception as e: + fail(str(e).replace("\n", "; ")) + +print('Successfully converted') \ No newline at end of file diff --git a/source/server/tasks/errors.test.ts b/source/server/tasks/errors.test.ts new file mode 100644 index 000000000..089ab5de6 --- /dev/null +++ b/source/server/tasks/errors.test.ts @@ -0,0 +1,85 @@ +import { BadRequestError, HTTPError } from "../utils/errors.js"; +import { parseTaskError, serializeTaskError } from "./errors.js" + + +describe("Tasks errors", function(){ + describe("serializeTaskError()", function(){ + it("serializes a normal error", function(){ + const outputString = serializeTaskError(new Error("The message")); + const output = JSON.parse(outputString); + //Don't inspect stack content... + expect(output).to.have.property("stack").a("string"); + + expect(output).to.have.property("message", "The message"); + }); + + it.skip("serializes a HTTPError", function(){ + const outputString = serializeTaskError(new HTTPError(401, "The message")); + const output = JSON.parse(outputString); + //Don't inspect stack content... + expect(output).to.have.property("stack").a("string"); + expect(output).to.have.property("message", "[401] The message"); + expect(output).to.have.property("code", 401); + expect(output).to.have.property("name", "HTTPError"); + }); + + it.skip("serializes HTTPError subclasses", function(){ + const outputString = serializeTaskError(new BadRequestError("The message")); + const output = JSON.parse(outputString); + //Don't inspect stack content... + expect(output).to.have.property("stack").a("string"); + expect(output).to.have.property("message", "[400] The message"); + expect(output).to.have.property("code", 400); + expect(output).to.have.property("name", "HTTPError"); + }) + }); + + + describe("parseTaskError()", function(){ + describe("handles bad values", function(){ + //Shouldn't happen outside of unexpected bugs + //But we need this path to not be an additional nuisance in this case + [ + null, + undefined, + {} + ].forEach((v)=>{ + it(`parses ${v}`, function(){ + const out = parseTaskError(v); + expect(out).to.be.instanceof(Error); + }); + }); + it(`parses empty strings`, function(){ + const out = parseTaskError(""); + expect(out).to.be.instanceof(Error); + expect(out.message).to.have.length.above(0); + }); + }); + + describe("parses serialized error", function(){ + it("Error", function(){ + const outputString = serializeTaskError(new Error("The message")); + const out = parseTaskError(JSON.parse(outputString)); + expect(out).to.be.instanceOf(Error); + expect(out).to.have.property("message", "The message"); + }); + it.skip("HTTPError", function(){ + const _e = new HTTPError(401, "The message") + const outputString = serializeTaskError(_e); + const out = parseTaskError(JSON.parse(outputString)); + expect(out).to.be.instanceOf(HTTPError); + expect(out).to.have.property("code", 401); + expect(out).to.have.property("message", "[401] The message"); + }); + it.skip("BadRequestError", function(){ + const _e = new BadRequestError("The message"); + const outputString = serializeTaskError(_e); + const out = parseTaskError(JSON.parse(outputString)); + expect(out).to.be.instanceOf(HTTPError); + expect(out).to.have.property("code", 400); + expect(out).to.have.property("message", "[400] The message"); + }); + + }); +}) +}) diff --git a/source/server/tasks/errors.ts b/source/server/tasks/errors.ts new file mode 100644 index 000000000..f184dc77d --- /dev/null +++ b/source/server/tasks/errors.ts @@ -0,0 +1,35 @@ + +import { HTTPError } from "../utils/errors.js"; + + +/** + * Constructs an appropriate error from a generic object + * returned from the database + */ +export function parseTaskError(output: any) :Error|HTTPError{ + if(typeof output === "string" && output.length) return new Error(output); + let err :any = (output?.name == "HTTPError" && typeof output.code === "number")?new HTTPError(output.code, "Unknown task error"):new Error("Unknown task error"); + if(! output || typeof output !== "object") return err; + for(let key in output){ + err[key] = output[key]; + } + return err; +} +/** + * Serializes an `Error` or {@link HTTPError}. + * + * The string is guaranteed to be the serialization of an object with at least a "message" property + * Iterates over the `message` and `stack` properties of standard errors, which `JSON.stringify` wouldn't do + * @fixme we should possibly try to handle errors issued from postgresql too + */ +export function serializeTaskError(e: HTTPError|Error|string):string{ + if(typeof e === "string") return JSON.stringify({message: e}); + else if(!e || typeof e !== "object") return JSON.stringify({message: `Error: ${e}`}); + else if(!e.message) return JSON.stringify({message: `Error: ${JSON.stringify(e)}`}); + let obj :any = {} + for(let key of Object.getOwnPropertyNames(e)){ + obj[key] = (e as any)[key]; + } + obj["name"] = e.name; + return JSON.stringify(obj); +} diff --git a/source/server/tasks/handlers/createDocumentFromFiles.test.ts b/source/server/tasks/handlers/createDocumentFromFiles.test.ts new file mode 100644 index 000000000..3f05986d5 --- /dev/null +++ b/source/server/tasks/handlers/createDocumentFromFiles.test.ts @@ -0,0 +1,94 @@ +import { TaskScheduler } from "../scheduler.js"; +import { DocumentModel } from "./createDocumentFromFiles.js"; +import { createDocumentFromFiles } from "./createDocumentFromFiles.js"; + + +const makeModel = (opts: Partial = {}) => { + return { + uri: "foo.glb", + byteSize: 100, + numFaces: 0, + imageSize: 0, + bounds: null, + quality: "High", + usage: "Web3D", + ...opts, + } satisfies DocumentModel; +} + + +describe("Task createDocumentFromFiles", function () { + let taskScheduler: TaskScheduler; + this.beforeAll(async function () { + await createIntegrationContext(this); + taskScheduler = this.services.taskScheduler; + }); + this.afterAll(async function () { + await cleanIntegrationContext(this); + }) + it("initializes a task scheduler", function () { + expect(taskScheduler).to.be.instanceOf(TaskScheduler); + }); + + it("Creates a valid document", async function () { + const outputDoc = await taskScheduler.run({ + handler: createDocumentFromFiles, + data: { + scene: "foo", + language: "FR", + models: [makeModel({ byteSize: 100, imageSize: 10, numFaces: 10, bounds: { min: [-1, -1, -1], max: [1, 1, 1] } })] + } + }); + expect(outputDoc.models).to.have.length(1); + const model = outputDoc.models![0]; + console.log(JSON.stringify(model, null, 2)); + expect(model).to.have.property("boundingBox").to.deep.equal({ min: [-1, -1, -1], max: [1, 1, 1] }); + expect(model).to.have.property("derivatives").to.deep.equal([{ + "usage": "Web3D", + "quality": "High", + "assets": [ + { + "uri": "foo.glb", + "type": "Model", + "byteSize": 100, + "numFaces": 10, + "imageSize": 10 + } + ] + }]); + }); + + it("Initializes the scene's name", async function () { + const outputDoc = await taskScheduler.run({ + handler: createDocumentFromFiles, + data: { + scene: "foo", + language: "FR", + models: [] + } + }); + expect(outputDoc).to.have.property("metas").to.have.length(1); + }); + + it("removes optional properties when necessary", async function () { + //Handles a known exception where we report "no texture" as imageSize=0, while voyager requires imageSize >= 1 if present + // Same thing with numFaces and bounds + const outputDoc = await taskScheduler.run({ + handler: createDocumentFromFiles, + data: { + scene: "foo", + language: "FR", + models: [makeModel({ imageSize: 0 })] + } + }); + expect(outputDoc.models).to.have.length(1); + const model = outputDoc.models![0]; + expect(model).not.to.have.property("boundingBox"); + expect(model).to.have.property("derivatives").to.have.length(1); + expect(model.derivatives[0]).to.have.property("assets").to.have.length(1); + const asset = model.derivatives[0].assets[0] + expect(asset).to.have.property("type"); + expect(asset).not.to.have.property("imageSize"); + expect(asset).not.to.have.property("numFaces"); + }); +}); \ No newline at end of file diff --git a/source/server/tasks/handlers/createDocumentFromFiles.ts b/source/server/tasks/handlers/createDocumentFromFiles.ts new file mode 100644 index 000000000..659ec929d --- /dev/null +++ b/source/server/tasks/handlers/createDocumentFromFiles.ts @@ -0,0 +1,97 @@ +import getDefaultDocument from "../../utils/schema/default.js"; +import { IDocument } from "../../utils/schema/document.js"; +import { IAsset, IModel, TDerivativeQuality, TDerivativeUsage } from "../../utils/schema/model.js"; +import { BoundingBox } from "../../utils/gltf/inspect.js"; +import uid from "../../utils/uid.js"; +import { TaskHandlerParams } from "../types.js"; +import { SceneLanguage } from "../../utils/languages.js"; + + + +export interface DocumentModel { + name?: string; + /** + * Uri is slightly misleading as it's "relative to scene root" + * `uri` **WILL** be urlencoded by {@link createDocumentFromFiles} so it should be given in clear text + */ + uri: string; + byteSize: number; + numFaces: number; + imageSize: number; + bounds: BoundingBox | null; + quality: TDerivativeQuality; + usage: TDerivativeUsage; +}; + + + + + +export interface GetDocumentParams { + scene: string; + models: Array; + language: SceneLanguage | undefined; +} + + + + +export async function createDocumentFromFiles( + { + task: { + data: { scene, models, language = "EN" } + }, + }: TaskHandlerParams): Promise { + + let document = getDefaultDocument(); + //dumb inefficient Deep copy because we want to mutate the doc in-place + document.models ??= []; + for (let model of models) { + const asset: IAsset = { + "uri": encodeURIComponent(model.uri), + "type": "Model", + }; + for (const k of ["byteSize", "numFaces", "imageSize"] as const) { + //Ignore values that does not match schema for those properties + if (!Number.isInteger(model[k]) || model[k] < 1) continue; + asset[k] = model[k]; + } + const _m: IModel = { + "units": "m", //glTF specification says it's always meters. It's what blender do. + "derivatives": [{ + "usage": model.usage, + "quality": model.quality, + "assets": [asset] + }], + "annotations": [], + }; + if (model.bounds + && Array.isArray(model.bounds.min) + && Array.isArray(model.bounds.max) + && model.bounds.min.every(n => Number.isFinite(n)) + && model.bounds.max.every(n => Number.isFinite(n))) { + _m.boundingBox = model.bounds; + } + const index = document.models.push(_m) - 1; + const nodeIndex = document.nodes.push({ + "id": uid(), + "name": model.name ?? scene, + "model": index, + } as any) - 1; + document.scenes[0].nodes!.push(nodeIndex); + } + + + document.setups[0].language = { language: language }; + document.metas ??= []; + const meta_index = document.metas.push({ + "collection": { + "titles": { + [language]: scene, + } + }, + }) - 1; + document.scenes[document.scene].meta = meta_index; + + return document; +} diff --git a/source/server/tasks/handlers/extractZip.ts b/source/server/tasks/handlers/extractZip.ts new file mode 100644 index 000000000..18672a401 --- /dev/null +++ b/source/server/tasks/handlers/extractZip.ts @@ -0,0 +1,207 @@ +import { Readable } from "node:stream"; +import { once } from "node:events"; +import { FileArtifact, TaskHandlerParams } from "../types.js"; +import yauzl, { Entry, ZipFile } from "yauzl"; +import path from "node:path"; +import { isUserAtLeast } from "../../auth/User.js"; +import { BadRequestError, HTTPError, InternalError, UnauthorizedError } from "../../utils/errors.js"; +import { parseFilepath } from "../../utils/archives.js"; +import { toAccessLevel } from "../../auth/UserManager.js"; +import { getMimeType } from "../../utils/filetypes.js"; +import { text } from "node:stream/consumers"; +import { finished } from "node:stream/promises"; +import { UploadHandlerParams, ParsedUserUpload, UploadedArchive, UploadedFile } from "./uploads.js"; + + + +export interface ImportSuccessResult{ + name: string; + action: "create"|"update"; +} +interface ImportErrorResult{ + name: string; + action: "error"; + error: HTTPError|Error; +} +type ImportSceneResult = ImportSuccessResult|ImportErrorResult; + + +/** + * Analyze an uploaded file and create child tasks accordingly + */ +export async function extractScenesArchive({task: {scene_id: scene_id, user_id: user_id, data: {fileLocation}}, context:{vfs, logger, userManager, tasks}}:TaskHandlerParams):Promise{ + if(!fileLocation || typeof fileLocation !== "string") throw new Error(`invalid fileLocation provided`); + const filepath = vfs.absolute(fileLocation); + if(!user_id) throw new Error(`This task requires an authenticated user`); + const requester = await userManager.getUserById(user_id); + + let zipError: Error; + logger.debug("Open Zip file"); + const zip = await new Promise((resolve,reject)=>yauzl.open(filepath, {lazyEntries: true, autoClose: true}, (err, zip)=>(err?reject(err): resolve(zip)))); + const openZipEntry = (record:Entry)=> new Promise((resolve, reject)=>zip.openReadStream(record, (err, rs)=>(err?reject(err): resolve(rs)))); + + logger.debug("Open database transaction"); + const ts = Date.now(); + const results = await vfs.isolate(async (vfs)=>{ + //Directory entries are optional in a zip file so we should handle their absence + //We do this by maintaining a Map of scenes, and for each scene a Set of files + let scenes = new Map}>(); + // As soon as we have one or more errors, we skip trying to copy files + // but we continue processing entries to get a list of all unauthorized scenes + let has_errors = false; + /** + * Handles a zip entry. + */ + const onEntry = async (record :Entry) =>{ + const {scene, name, isDirectory} = parseFilepath(record.fileName); + if(!scene ) return; + if(!scenes.has(scene)){ + let result:ImportSceneResult; + //Create the scene + try{ + const rights = await userManager.getAccessRights(scene, requester.uid) + if(toAccessLevel("write") <= toAccessLevel(rights)){ + result = {name: scene, action: "update"}; + logger.log(`Scene ${scene} will be updated`); + }else{ + logger.warn(`Scene ${scene} can't be updated: User only has access level "${toAccessLevel(rights)}"`); + result = {name: scene, action: "error", error: new UnauthorizedError(`User doesn't have write permissions on scene "${scene}"`)}; + has_errors = true; + } + }catch(e){ + if((e as HTTPError).code != 404) throw e; + //404 == Not Found. Check if user can create the scene + if (isUserAtLeast(requester, "create")) { + await vfs.createScene(scene, requester.uid); + result = {name: scene, action: "create"}; + logger.log(`Scene ${scene} will be created`); + }else{ + result = {name: scene, action: "error", error: new UnauthorizedError(`User doesn't have write permissions on scene "${scene}"`)}; + has_errors = true; + } + } + scenes.set(scene, {folders: new Set(), ...result}); + } + //Don't create the files if any scene is rejected: The transaction will be reverted anyways + if(has_errors) return; + + const _s = scenes.get(scene); //having a scene not registered in this map is not supposed to happen. Something would be seriously wrong + if(!_s || _s.action === "error") throw new Error(`Scene ${scene} wasn't properly checked for permissions`); + + + + // Proceed with content creation. Start with folders + // We have to manually re-create the structure because folder entries are optional in zip archives + const { folders } = _s; + if (!name) return; + let dirpath = ""; + let pathParts = name.split("/"); + if(!isDirectory) pathParts.pop(); //Remove last segment except for directories + while(pathParts.length){ + dirpath = path.join(dirpath, pathParts.shift()!); + if(folders.has(dirpath)) continue; + folders.add(dirpath); + try{ + await vfs.createFolder({scene, name: dirpath, user_id: requester.uid}); + }catch(e){ + if((e as HTTPError).code != 409) throw e; + //409 == Folder already exist, it's OK. + } + } + + if(isDirectory){ + // Is a directory. Do nothing, handled above. + }else if(name.endsWith(".svx.json")){ + let data = Buffer.alloc(record.uncompressedSize), size = 0; + let rs = await openZipEntry(record); + rs.on("data", (chunk)=>{ + chunk.copy(data, size); + size += chunk.length; + }); + await finished(rs); + await vfs.writeDoc(data, {scene, user_id: requester.uid, name, mime: "application/si-dpo-3d.document+json"}); + }else{ + //Add the file + let rs = await openZipEntry(record); + let mime = getMimeType(name); + if (mime.startsWith('text/')){ + await vfs.writeDoc(await text(rs), {user_id: requester.uid, scene, name, mime}); + } else { + await vfs.writeFile(rs, {user_id: requester.uid, scene, name, mime}); + } + } + }; + + zip.on("entry", (record)=>{ + onEntry(record).then(()=>{ + zip.readEntry() + }, (e)=>{ + zip.close(); + zipError=e; + }); + }); + zip.readEntry(); + logger.debug("Start extracting zip entries"); + await once(zip, "close"); + if(zipError){ + logger.error("Zip extraction encountered an error. This is most probably due to an invalid zip"); + throw zipError; + } + const results = [...scenes.values()].map(({folders, ...r})=> r as any); + if(has_errors){ + let errors = results.filter(function(r):r is ImportErrorResult {return r.action == "error"}); + if(errors.length == 1){ + throw errors[0].error; + }else { + let unauthorized = errors.filter(r=>r.error instanceof UnauthorizedError); + if(unauthorized.length === errors.length){ + throw new UnauthorizedError( + `Multiple unauthorized scenes : ${errors.map(r=>r.name).join(",")}` + ); + } else { + throw new InternalError(`Mixed errors : ${errors.map(r=>r.error.message).join(", ")}`) + } + } + }else{ + logger.debug("zip file closed successfully. Running database triggers."); + return results as ImportSuccessResult[]; + } + }); + + logger.log("Database transaction took %dms", Date.now()- ts); + return results; +}; + +export function isUploadedArchive(t: UploadedFile): t is UploadedArchive { + return t.mime == "application/zip" && (t as any).scenes?.length; +} + +export async function extractScenesArchives({ context: { tasks, logger }, task: { data: { tasks: source_ids } } }: TaskHandlerParams<{ tasks: number[]; }>): Promise { + if (!source_ids.length) throw new BadRequestError(`This task requires at least one source file`); + + for (const task_id of source_ids) { + if (!Number.isInteger(task_id)) throw new BadRequestError(`Invalid source task id: ${task_id}`); + } + const source_tasks = await Promise.all(source_ids.map(id => tasks.getTask(id))); + const failed_tasks = source_tasks.filter(t => t.status !== "success"); + const invalid_outputs = source_tasks.filter(t => !isUploadedArchive(t.output)); + if (failed_tasks.length) throw new BadRequestError(`Source task${1 < failed_tasks.length ? "s" : ""} ${failed_tasks.map(t => t.task_id).join(", ")} has not completed successfully`); + if (invalid_outputs.length) throw new BadRequestError(`Source task${1 < invalid_outputs.length ? "s" : ""} ${invalid_outputs.map(t => t.task_id).join(", ")} did not output a zip file`); + + const archives = source_tasks.map(t => t.output as UploadedArchive); + // Unfortunately here it's possible to have partial failures if we have more than one file. + // it could be prevented with async-context support for database transactions + if(1 < archives.length) logger.warn(`Importing more than once archive file. Partial failures are possible`); + + let results = []; + for (let archive of archives) { + results.push(...await tasks.run({ + handler: extractScenesArchive, + data: { + fileLocation: archive.fileLocation, + } + })); + } + return results; +} + diff --git a/source/server/tasks/handlers/index.ts b/source/server/tasks/handlers/index.ts new file mode 100644 index 000000000..32c1f6887 --- /dev/null +++ b/source/server/tasks/handlers/index.ts @@ -0,0 +1,4 @@ + +export {createSceneFromFiles, parseUserUpload} from "./uploads.js"; + +export {extractScenesArchives} from "./extractZip.js"; \ No newline at end of file diff --git a/source/server/tasks/handlers/inspectGlb.ts b/source/server/tasks/handlers/inspectGlb.ts new file mode 100644 index 000000000..1c742b9cc --- /dev/null +++ b/source/server/tasks/handlers/inspectGlb.ts @@ -0,0 +1,17 @@ + +import type { FileArtifact, TaskHandlerParams } from '../types.js'; + + + +/** + * Parse a glb file to gather its main attributes + * @param param0 + * @returns + */ +export async function inspectGlb({context: {vfs}, task: {data: {fileLocation}}}:TaskHandlerParams){ + const {io} = await import("../../utils/gltf/io.js"); + const {inspectDocument} = await import("../../utils/gltf/inspect.js"); + const document = await io.read(vfs.absolute(fileLocation)); // → Document + return inspectDocument(document); +} + diff --git a/source/server/tasks/handlers/optimizeGlb.ts b/source/server/tasks/handlers/optimizeGlb.ts new file mode 100644 index 000000000..7ade07a4e --- /dev/null +++ b/source/server/tasks/handlers/optimizeGlb.ts @@ -0,0 +1,139 @@ +import path from "node:path"; +import { dedup, flatten, getSceneVertexCount, join, meshopt, prune, resample, simplify, sparse, VertexCountMethod, weld } from '@gltf-transform/functions'; + +import { toktx } from '../../utils/gltf/toktx.js'; + +import {type ProcessFileParams, type TaskHandlerParams, type ITaskLogger, FileArtifact} from "../types.js"; + +import {io} from '../../utils/gltf/io.js'; +import { MeshoptEncoder, MeshoptSimplifier } from 'meshoptimizer'; +import { TDerivativeQuality } from "../../utils/schema/model.js"; + +export interface TransformGlbParams{ + logger: ITaskLogger; + preset: TDerivativeQuality; + tmpdir: string; + /** Whether to resize textures to match maxSize */ + resize: boolean; +} + + +interface PresetSettings{ + ratio: number, + error: number, + etc1s_quality: number, + maxSize: number, +} + +function getPreset(quality: TDerivativeQuality): PresetSettings { + return { + "Thumb": {ratio: 0, error: 0.01, etc1s_quality: 128, maxSize: 512}, + "Low": {ratio: 1/4, error: 0.005, etc1s_quality: 180, maxSize: 1024}, + "Medium": {ratio: 1/2, error: 0.001, etc1s_quality: 220, maxSize: 2048}, + "High": {ratio: 1, error: 0.0001, etc1s_quality: 240, maxSize: 4096}, + "Highest": {ratio: 1, error: 0, etc1s_quality: 250, maxSize: 8192}, + "AR": {ratio: 0, error: 0.001, etc1s_quality: 180, maxSize: 2048}, + }[quality]; +} + + +export async function optimizeGlb({task: {task_id, data:{fileLocation, preset: presetName}}, context:{ vfs, logger, config }}:TaskHandlerParams):Promise{ + if(!fileLocation) throw new Error(`A file is required for this task to run`); + + const resize = true; //Might make it configurable + + //Takes a glb file as input, outputs an optimized file + //It's not yet clear if the output file's path is determined beforehand or generated as an output + let tmpdir = await vfs.createTaskWorkspace(task_id); + const inputFile = vfs.absolute(fileLocation); + let outputFile = path.join(tmpdir, path.basename(inputFile, ".glb")+".glb"); + + logger.log("Optimize with preset %s using gltf-transform", presetName); + logger.debug("Input file:", inputFile); + + + const document = await io.read(inputFile); // → Document + + document.setLogger({...logger, info: logger.log}); + + const root = document.getRoot(); + const scene = root.getDefaultScene(); + if(!scene) throw new Error("Empty glb (no root scene)"); + + async function time(name: string, p: Promise):Promise{ + const t = Date.now(); + let res = await p; + logger.debug(`${name.padEnd(27, ' ')} ${Date.now() - t}ms`); + return res; + } + + /** + * Preset and heuristics + */ + let preset = getPreset(presetName); + + //FIXME : Don't touch geometry if it's already compressed using draco or meshopt? + logger.log("Optimize geometry"); + + + await time("Flatten", document.transform(flatten())); + + await time("Join", document.transform(join())); + + await time("Weld", document.transform(weld())); + + let vertexCount = getSceneVertexCount(root.getDefaultScene()!, VertexCountMethod.UPLOAD); + + await time("Simplify", document.transform(simplify({ + error:preset.error, + ratio: Math.min(1, preset.ratio, (preset.ratio*1000000/vertexCount)), + lockBorder: true, + simplifier: MeshoptSimplifier, + }))); + + await time("Resample", document.transform(resample())); + + await time("Sparse", document.transform(sparse())); + + await time("Compress meshs", document.transform(meshopt({ + ...preset, + encoder: MeshoptEncoder, + level: "medium", + }))); + + + //Remove draco extension as it is now unused + let ext_draco = root.listExtensionsUsed().find(e=> e.extensionName === 'KHR_draco_mesh_compression'); + ext_draco?.dispose(); + + logger.log(("Optimize textures")) + /// Textures + /** @fixme we should handle textures resize fallback when toktx is not available */ + try{ + await time("Compress ORM textures",document.transform(toktx({ + mode: "uastc", + slots: /^(normal|occlusion|metallicRoughness)/, + tmpdir, + maxSize: resize? preset.maxSize: undefined, + }))); + + await time("Compress Color textures",document.transform(toktx({ + mode: "etc1s", + quality: preset.etc1s_quality, + slots: /^baseColor/, + tmpdir, + maxSize: resize? preset.maxSize: undefined, + }))); + //Remove webp extension as it is now unused + let ext_webp = root.listExtensionsUsed().find(e=> e.extensionName === 'EXT_texture_webp'); + ext_webp?.dispose(); + }catch(e){ + logger.warn("Couldn't compress textures to KTX: ", e); + } + + logger.log("Output file uses extensions:", root.listExtensionsUsed().map(e=>e.extensionName)); + await io.write(outputFile, document); + + //Resize textures + return {fileLocation: vfs.relative(outputFile)}; +}; diff --git a/source/server/tasks/handlers/toGlb.test.ts b/source/server/tasks/handlers/toGlb.test.ts new file mode 100644 index 000000000..291ad67ed --- /dev/null +++ b/source/server/tasks/handlers/toGlb.test.ts @@ -0,0 +1,49 @@ +import fs from "node:fs/promises"; +import { TaskScheduler } from "../scheduler.js"; +import { toGlb } from "./toGlb.js"; +import path from "node:path"; +import Vfs from "../../vfs/index.js"; +import { randomBytes } from "node:crypto"; +import { inspectGlb } from "./inspectGlb.js"; + + + +describe("Task: toGlb", function () { + let taskScheduler: TaskScheduler, vfs: Vfs; + this.beforeAll(async function () { + const locals = await createIntegrationContext(this); + taskScheduler = locals.taskScheduler; + vfs = locals.vfs; + }); + this.afterAll(async function () { + await cleanIntegrationContext(this); + }); + + describe("Wavefront OBJ", function () { + it("converts simple OBJ file ", async function () { + const name = `${randomBytes(2).toString("hex")}-dummy`; + const fileLocation = vfs.relative(name + ".obj"); + await fs.writeFile(vfs.absolute(fileLocation), `v 0 0 0\nv 1 0 0\nv 0 1 0\nf 1 2 3\n`); + const out = await taskScheduler.run({ + handler: toGlb, + data: { fileLocation } + }); + expect(out).to.have.property("fileLocation").a("string"); + expect(path.basename(out.fileLocation)).to.equal(name + ".glb"); + + const meta = await taskScheduler.run({ handler: inspectGlb, data: out }); + console.log("Meta: ", meta); + expect(meta).to.have.property("numFaces", 1); + }); + + it.skip("fails on missing mtl file", async function () { + const name = `${randomBytes(2).toString("hex")}-dummy`; + const fileLocation = vfs.relative(name + ".obj"); + await fs.writeFile(vfs.absolute(fileLocation), `mtllib cube.mtl\no Cube\nv 0 0 0\nv 1 0 0\nv 0 1 0\nf 1 2 3\n`); + await expect(taskScheduler.run({ + handler: toGlb, + data: { fileLocation } + })).to.be.rejectedWith("ENOENT"); + }); + }); +}); diff --git a/source/server/tasks/handlers/toGlb.ts b/source/server/tasks/handlers/toGlb.ts new file mode 100644 index 000000000..894fd069c --- /dev/null +++ b/source/server/tasks/handlers/toGlb.ts @@ -0,0 +1,36 @@ +import path from "node:path"; +import fs from "node:fs/promises"; +import obj2gltf from "obj2gltf"; +import { FileArtifact, TaskHandlerParams } from "../types.js"; +import { BadRequestError } from "../../utils/errors.js"; +import { getMimeType } from "../../utils/filetypes.js"; + + +export async function toGlb({context: {tasks, vfs, logger}, task:{task_id, data:{fileLocation}}}:TaskHandlerParams){ + const ext = path.extname(fileLocation).toLowerCase(); + const mime = getMimeType(fileLocation); + if(mime === "model/obj"){ + return await tasks.run({ + handler: objToGlb, + data: {fileLocation}, + }); + }else{ + throw new BadRequestError(`Unsupported file extension: ${ext}`); + } +} + +export async function objToGlb({context: {vfs}, task: {task_id, data:{fileLocation}}}:TaskHandlerParams):Promise{ + + const inputFilename = path.basename(fileLocation); + const destFilename = (/\.obj$/i.test(inputFilename)? inputFilename.slice(0, -4): inputFilename) + ".glb" + const dir = await vfs.createTaskWorkspace(task_id); + const destPath = path.join(dir, destFilename); + const gltfBuffer = await obj2gltf(vfs.absolute(fileLocation), { + binary: true, + secure: true, //won't read outside of the source file's directory + }); + await fs.writeFile(vfs.absolute(destPath), gltfBuffer); + return { + fileLocation: vfs.relative(destPath), + } +} diff --git a/source/server/tasks/handlers/uploads.test.ts b/source/server/tasks/handlers/uploads.test.ts new file mode 100644 index 000000000..e69de29bb diff --git a/source/server/tasks/handlers/uploads.ts b/source/server/tasks/handlers/uploads.ts new file mode 100644 index 000000000..3c2ed1868 --- /dev/null +++ b/source/server/tasks/handlers/uploads.ts @@ -0,0 +1,323 @@ +import { once } from "node:events"; +import fs from "node:fs/promises"; +import path from "node:path"; + +import yauzl, { ZipFile } from "yauzl"; +import { isUserAtLeast } from "../../auth/User.js"; +import { toAccessLevel } from "../../auth/UserManager.js"; +import { parseFilepath, isMainSceneFile } from "../../utils/archives.js"; +import { FileArtifact, TaskHandlerParams } from "../types.js"; +import { BadRequestError, InternalError } from "../../utils/errors.js"; +import { toGlb } from "./toGlb.js"; +import { getMimeType, isModelType, readMagicBytes } from "../../utils/filetypes.js"; +import { optimizeGlb } from "./optimizeGlb.js"; +import { inspectGlb } from "./inspectGlb.js"; +import { BoundingBox } from "../../utils/gltf/inspect.js"; +import { createDocumentFromFiles, DocumentModel } from "./createDocumentFromFiles.js"; +import { isSceneLanguage } from "../../utils/languages.js"; + + + +export interface ImportSceneResult { + name: string; + action: "create" | "update" | "error"; + error?: string; +} + +export interface UploadHandlerParams { + filename: string; + size: number; +} + +export interface UploadedFile extends FileArtifact { + mime: string; +} + +export interface UploadedArchive extends UploadedFile { + mime: "application/zip"; + files: string[]; + scenes: ImportSceneResult[]; +} + +export interface UploadedBinaryModel extends UploadedFile { + mime: "model/gltf-binary"; + isModel: boolean; + name?: string; + byteSize: number; + numFaces: number; + imageSize: number; + bounds: BoundingBox | null; +} + +export interface UploadedUsdModel extends UploadedFile { + mime: "model/vnd.usdz+zip"; +} + +export interface UploadedSource extends UploadedFile { + isModel: boolean; +} + +export type ParsedUserUpload = UploadedArchive | UploadedBinaryModel | UploadedSource; + + +function isUploadedFile(output: any): output is UploadedFile { + return typeof output === "object" && typeof output.fileLocation === "string" && typeof output.mime === "string"; +} + +function isUploadedBinaryModel(output: UploadedFile): output is UploadedBinaryModel { + return isUploadedFile(output) && output.mime == "model/gltf-binary" +} + +async function parseUploadedArchive({ task: { task_id, user_id, data: { fileLocation } }, context: { vfs, userManager, logger } }: TaskHandlerParams): Promise { + if(typeof user_id !== "number") throw new Error("File upload require an owner. No user_id provided"); + const requester = await userManager.getUserById(user_id); + const filename = path.basename(fileLocation); + let files: string[] = []; + logger.debug(`Open ${fileLocation} to list entries`); + let zip = await new Promise((resolve, reject) => yauzl.open(vfs.absolute(fileLocation), { lazyEntries: false, autoClose: true }, (err, zip) => (err ? reject(err) : resolve(zip)))); + zip.on("entry", (record) => { + files.push(record.fileName); + }); + await once(zip, "close"); + + let scenes: ImportSceneResult[] = []; + for (let file of files) { + const { scene, name } = parseFilepath(file); + if (!scene || !name || !isMainSceneFile(file)) continue; + let action: "create" | "update" | "error"; + let error: string; + try { + const level = toAccessLevel(await userManager.getAccessRights(scene, user_id)); + if (level < toAccessLevel("write")) { + action = "error"; + error = `User doesn't have write permissions on scene ${scene}`; + } else { + action = "update"; + } + } catch (e: any) { + if (e.code !== 404) throw e; + if (isUserAtLeast(requester, "create")) { + action = "create" + } else { + action = "error"; + error = "User doesn't have permission to create a new scene"; + } + } + scenes.push({ name: scene, action, error: error! }); + } + return { + mime: "application/zip", + fileLocation, + files: files, + scenes + }; +} + + +async function parseUploadedModel({ task: { data: { fileLocation } }, context: { logger, tasks, vfs } }: TaskHandlerParams): Promise { + const filepath = vfs.absolute(fileLocation); + logger.debug("Check mime type of " + fileLocation); + const mime = await readMagicBytes(filepath); + if (mime !== "model/gltf-binary") { + throw new InternalError("This does not look like a GLB file"); + } + const meta = await tasks.run({ + handler: inspectGlb, + data: { fileLocation } + }); + + const stats = await fs.stat(filepath); + + logger.log(`Parsed glb file ${fileLocation}`); + return { + fileLocation, + mime, + isModel: true, + name: meta.name, + bounds: meta.bounds, + imageSize: meta.imageSize, + numFaces: meta.numFaces, + byteSize: stats.size, + } +} + +/** + * Inspect a user-uploaded file to detect its contents + * @param param0 + * @returns + */ +export async function parseUserUpload({ task: { task_id, user_id, data: { filename, size } }, context: { vfs, tasks, userManager, logger } }: TaskHandlerParams): Promise { + const fileLocation = vfs.relative(path.join(vfs.getTaskWorkspace(task_id), filename)); + + logger.debug(`Checking size of uploaded file ${fileLocation}`); + let diskSize: number; + try { + const stats = await fs.stat(vfs.absolute(fileLocation)); + diskSize = stats.size; + } catch (e: any) { + if (e.code === "ENOENT") { + logger.error(`File ${fileLocation} does not exist. Maybe it wasn't uploaded properly?`); + } + throw e; + } + if (diskSize != size) { + throw new Error(`Expected a file of size ${size}, found ${diskSize}`); + } + + + const mime = getMimeType(filename); + + if (mime === "application/zip") { + return await tasks.run({ + data: { fileLocation }, + handler: parseUploadedArchive, + }); + } else if (mime == "model/gltf-binary") { + return await tasks.run({ + data: { fileLocation }, + handler: parseUploadedModel, + }); + } else { + return { + mime, + fileLocation, + isModel: isModelType(filename), + } + } + + // @FIXME maybe we should already delete the file if it has errors? + // It depends on the behaviour we expect of a "partial success" zip upload. +} + + + +export interface ProcessUploadedFilesParams { + tasks: number[]; + name: string; + language: string; + options: { + optimize?: boolean; + } +} + + +/** + * Process file(s) that have been uploaded through `userUploads` task(s). + * The file(s) are expected to come from previous tasks + */ +export async function createSceneFromFiles({ context: { tasks, vfs, logger }, task: { user_id, task_id, data: { tasks: source_ids, name, language, options } } }: TaskHandlerParams): Promise { + if (!user_id) throw new InternalError(`Can't create an anonymous scene. Provide a user`); + if (!name) throw new BadRequestError(`Can't create a scene without a name`); + if (!language) throw new BadRequestError(`Default language is required for scene creation`); + if (!isSceneLanguage(language)) throw new BadRequestError(`Unsupported scene language ${language}`); + if (!source_ids.length) throw new BadRequestError(`This task requires at least one source file`); + + for (const task_id of source_ids) { + if (!Number.isInteger(task_id)) throw new BadRequestError(`Invalid source task id: ${task_id}`); + } + const source_tasks = await Promise.all(source_ids.map(id => tasks.getTask(id))); + const failed_tasks = source_tasks.filter(t => t.status !== "success"); + const invalid_outputs = source_tasks.filter(t => !isUploadedFile(t.output)); + if (failed_tasks.length) throw new BadRequestError(`Source task${1 < failed_tasks.length ? "s" : ""} #${failed_tasks.map(t => t.task_id).join(", #")} has not completed successfully`); + if (invalid_outputs.length) { + for (let t of invalid_outputs) { + console.log(`Task ${t.type}#${t.task_id} can't be used as a scene source: Output is ${JSON.stringify(t.output)}`); + } + throw new BadRequestError(`Source task${1 < invalid_outputs.length ? "s" : ""} ${invalid_outputs.map(t => `${t.type}#${t.task_id}`).join(", ")} did not output a file`); + } + //If some source models are present, copy all files to the task's workspace + let sources: Array = source_tasks.map(t => t.output); + if (source_tasks.some(t => (t.output as UploadedSource).isModel)) { + const dir = await vfs.createTaskWorkspace(task_id); + sources = await Promise.all(source_tasks.map(async ({ output }) => { + const filepath = vfs.absolute(output.fileLocation); + const dest = path.join(dir, path.basename(filepath)); + await fs.link(filepath, dest); + return { + ...output, + fileLocation: vfs.relative(dest) + } satisfies ParsedUserUpload; + })); + } + + const scene_id = await vfs.createScene(name, user_id); + + /** + * Optimize the model if requested and perform the final move to its destination path + */ + async function moveModel(source: UploadedBinaryModel) { + let filepath = vfs.absolute(source.fileLocation); + let filename = path.basename(filepath); + let mime = source.mime; + if (options.optimize) { + const output = await tasks.run({ + data: { + fileLocation: source.fileLocation, + preset: "High", + }, + handler: optimizeGlb, + }); + if (typeof output.fileLocation !== "string") logger.warn("Model optimization output is unreadable : ", output); + else { + filepath = vfs.absolute(output.fileLocation); + filename = path.basename(output.fileLocation); + mime = "model/gltf-binary"; + logger.debug("Optimized model %s to %s", source.fileLocation, output.fileLocation); + } + } + logger.debug("Copy %s to %s", filepath, filename); + await vfs.copyFile(filepath, { scene: scene_id, name: filename, user_id, mime }); + models.push({ + ...(source as UploadedBinaryModel), + uri: filename, + quality: "High", + usage: "Web3D" + }); + } + + + // @TODO: reparent everything to this task and this task to the created scene for better discoverability + const models: Array = []; + // We could probably return the scene ID from here and let this all be out-of-band + for (let source of sources) { + let filepath = vfs.absolute(source.fileLocation); + const filename = path.basename(filepath); + if (source.mime === "application/zip") { + logger.warn("in-scene Zip extraction is not yet implemented. Skipped."); + continue; + } else if (isUploadedBinaryModel(source)) { + await moveModel(source); + } else if ((source as UploadedSource).isModel) { + logger.log("Convert source model %s to GLB", source.fileLocation); + const dest = await tasks.run({ + data: { fileLocation: vfs.relative(filepath) }, + handler: toGlb, + }); + logger.debug("Copy Converted source file to %s", dest.fileLocation); + + const meta = await tasks.run({ + handler: parseUploadedModel, + data: { + fileLocation: dest.fileLocation, + } + }); + logger.debug("Parsed converted file :", meta); + await moveModel(meta); + } else { + logger.debug("Copy source file %s (%s)", filepath, source.mime); + const file = await vfs.copyFile(filepath, { scene: scene_id, name: filename, user_id, mime: source.mime }); + if (!file.hash) throw new BadRequestError(`File ${source.filepath} is empty`); + } + } + + logger.debug(`Create a new document for ${models.length} model${1 < models.length ? "s" : ""}`); + const doc = await tasks.run({ + scene_id: scene_id, + data: { scene: name, models, language }, + handler: createDocumentFromFiles, + }); + await vfs.writeDoc(JSON.stringify(doc), { scene: scene_id, user_id: user_id, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json" }); + logger.debug(`Scene ${scene_id} initialized`); + //TODO cleanup: unlink tasks artifacts + return scene_id; +} diff --git a/source/server/tasks/logger.test.ts b/source/server/tasks/logger.test.ts new file mode 100644 index 000000000..114c29fca --- /dev/null +++ b/source/server/tasks/logger.test.ts @@ -0,0 +1,297 @@ +import { Writable, PassThrough } from "node:stream"; +import timers from "node:timers/promises"; +import { createBatcher, createInserter, createLogger } from "./logger.js"; +import openDatabase, { Database, DatabaseHandle } from "../vfs/helpers/db.js"; +import { LogSeverity } from "./types.js"; +import { randomBytes } from "node:crypto"; +import Vfs from "../vfs/index.js"; +import UserManager from "../auth/UserManager.js"; + +/** + * Helper: collect all batches emitted by a batcher into an array + */ +function collectBatches(batcher: ReturnType) { + const batches: Array> = []; + const sink = new Writable({ + objectMode: true, + write(chunk, _, cb) { + batches.push(chunk); + cb(); + } + }); + batcher.pipe(sink); + return { batches, sink }; +} + +describe("createBatcher", function () { + + it("flushes when batch reaches batchSize", async function () { + const batcher = createBatcher(3, 5000); // large debounce so only count triggers + const { batches, sink } = collectBatches(batcher); + + for (let i = 0; i < 3; i++) { + batcher.write({ severity: "log" as LogSeverity, message: `msg${i}` }); + } + + // Batch should have been pushed synchronously + expect(batches).to.have.length(1); + expect(batches[0]).to.have.length(3); + + batcher.end(); + await new Promise((res) => sink.on("finish", res)); + }); + + it("flushes on debounce timeout", async function () { + const batcher = createBatcher(100, 30); // small debounce + const { batches, sink } = collectBatches(batcher); + + batcher.write({ severity: "log" as LogSeverity, message: "hello" }); + expect(batches, "should not have flushed synchronously").to.have.length(0); + + await timers.setTimeout(60); // wait for debounce + expect(batches, "should have flushed after debounce").to.have.length(1); + expect(batches[0]).to.have.length(1); + + batcher.end(); + await new Promise((res) => sink.on("finish", res)); + }); + + it("end() flushes remaining buffer even when timer has not fired", async function () { + const batcher = createBatcher(100, 5000); // neither count nor timer should trigger naturally + const { batches, sink } = collectBatches(batcher); + + batcher.write({ severity: "log" as LogSeverity, message: "a" }); + batcher.write({ severity: "log" as LogSeverity, message: "b" }); + expect(batches).to.have.length(0); + + batcher.end(); + await new Promise((res) => sink.on("finish", res)); + expect(batches).to.have.length(1); + expect(batches[0]).to.have.length(2); + }); + + it("end() with empty buffer still completes the pipeline", async function () { + const batcher = createBatcher(10, 100); + const { sink } = collectBatches(batcher); + + // No writes at all + batcher.end(); + await new Promise((res) => sink.on("finish", res)); + }); + + it("end() after timer has already fired completes cleanly", async function () { + const batcher = createBatcher(100, 20); + const { batches, sink } = collectBatches(batcher); + + batcher.write({ severity: "log" as LogSeverity, message: "early" }); + + // Wait for timer to fire + await timers.setTimeout(50); + expect(batches).to.have.length(1); + + // Now end with empty buffer + batcher.end(); + await new Promise((res) => sink.on("finish", res)); + // No extra batch should have been pushed + expect(batches).to.have.length(1); + }); + + it("end() right after write completes without hanging", async function () { + // This tests the exact scenario in the scheduler: write a message, then + // immediately end the batcher before the timer fires. + this.timeout(500); + const batcher = createBatcher(10, 100); + const { batches, sink } = collectBatches(batcher); + + batcher.write({ severity: "debug" as LogSeverity, message: "schedule child" }); + batcher.end(); + + await new Promise((res) => sink.on("finish", res)); + expect(batches).to.have.length(1); + expect(batches[0]).to.have.length(1); + }); + + it("multiple rapid writes followed by end() don't lose data", async function () { + const batcher = createBatcher(5, 100); + const { batches, sink } = collectBatches(batcher); + + for (let i = 0; i < 12; i++) { + batcher.write({ severity: "log" as LogSeverity, message: `msg${i}` }); + } + batcher.end(); + await new Promise((res) => sink.on("finish", res)); + + // 5 + 5 full batches + 2 remaining = 3 batches + const totalMessages = batches.reduce((sum, b) => sum + b.length, 0); + expect(totalMessages).to.equal(12); + }); + it("completes end() even if downstream is slow", async function () { + this.timeout(2000); + + const batcher = createBatcher(10, 50); + const batches: any[] = []; + + const slowSink = new Writable({ + objectMode: true, + write(chunk, _, cb) { + batches.push(chunk); + // Simulate slow processing (like a DB write) + setTimeout(cb, 100); + } + }); + batcher.pipe(slowSink); + + // Write some data, triggering a timer + batcher.write({ severity: "log" as LogSeverity, message: "slow1" }); + + // Wait for the timer to fire and the slow write to start + await timers.setTimeout(80); + + // Now end (while possibly a write is in progress on the sink) + batcher.end(); + + await new Promise((resolve, reject) => { + slowSink.on("finish", resolve); + slowSink.on("error", reject); + }); + + const totalMessages = batches.reduce((sum: number, b: any[]) => sum + b.length, 0); + expect(totalMessages).to.equal(1); + }); +}); + + +describe("createLogger (integration)", function () { + let db_uri: string, handle: Database, task_id: number; + + this.beforeAll(async function () { + db_uri = await getUniqueDb("logger_test"); + handle = await openDatabase({ uri: db_uri }); + const vfs = new Vfs("/dev/null", handle); + const userManager = new UserManager(handle); + const user = await userManager.addUser("logger_tester", "12345678", "admin"); + const scene_id = await vfs.createScene(randomBytes(8).toString("base64url"), user.uid); + }); + + this.afterAll(async function () { + await handle?.end(); + await dropDb(db_uri); + }); + + this.beforeEach(async function () { + await handle.run(`DELETE FROM tasks_logs`); + await handle.run(`DELETE FROM tasks`); + // Create a fresh task for each test + const result = await handle.all<{ task_id: number }>( + `INSERT INTO tasks(type, status) VALUES ('test', 'running') RETURNING task_id` + ); + task_id = result[0].task_id; + }); + + it("dispose with no logs resolves quickly", async function () { + this.timeout(500); + const logger = createLogger(handle, task_id); + await (logger as any)[Symbol.asyncDispose](); + }); + + it("dispose with a few logs resolves", async function () { + this.timeout(1000); + const logger = createLogger(handle, task_id); + + logger.log("msg1"); + logger.log("msg2"); + logger.debug("msg3"); + + await (logger as any)[Symbol.asyncDispose](); + + // Verify logs were actually inserted + const logs = await handle.all( + `SELECT severity, message FROM tasks_logs WHERE fk_task_id = $1 ORDER BY log_id`, + [task_id] + ); + expect(logs).to.have.length(3); + expect(logs[0].message).to.equal("msg1"); + }); + + + it("dispose right after write completes without hanging", async function () { + // This mimics the scheduler scenario: write a log, then immediately dispose + this.timeout(500); + const logger = createLogger(handle, task_id); + logger.debug("schedule child task"); + + await (logger as any)[Symbol.asyncDispose](); + + const logs = await handle.all( + `SELECT message FROM tasks_logs WHERE fk_task_id = $1`, + [task_id] + ); + expect(logs).to.have.length(1); + }); + + it("many rapid log entries don't lose data", async function () { + this.timeout(2000); + const logger = createLogger(handle, task_id); + + for (let i = 0; i < 25; i++) { + logger.log(`message ${i}`); + } + + await (logger as any)[Symbol.asyncDispose](); + + const logs = await handle.all( + `SELECT message FROM tasks_logs WHERE fk_task_id = $1`, + [task_id] + ); + expect(logs).to.have.length(25); + }); + + it("multiple loggers for different tasks dispose independently", async function () { + this.timeout(1000); + + // Create a second task + const result = await handle.all<{ task_id: number }>( + `INSERT INTO tasks(type, status) VALUES ('test2', 'running') RETURNING task_id` + ); + const task_id2 = result[0].task_id; + + const logger1 = createLogger(handle, task_id); + const logger2 = createLogger(handle, task_id2); + + logger1.log("from logger1"); + logger2.log("from logger2"); + + await (logger1 as any)[Symbol.asyncDispose](); + await (logger2 as any)[Symbol.asyncDispose](); + + const [logs1, logs2] = await Promise.all([ + handle.all(`SELECT message FROM tasks_logs WHERE fk_task_id = $1`, [task_id]), + handle.all(`SELECT message FROM tasks_logs WHERE fk_task_id = $1`, [task_id2]), + ]); + expect(logs1).to.have.length(1); + expect(logs2).to.have.length(1); + }); + + it("multiple loggers disposing concurrently don't deadlock", async function () { + this.timeout(2000); + + const taskIds: number[] = [task_id]; + for (let i = 0; i < 4; i++) { + const result = await handle.all<{ task_id: number }>( + `INSERT INTO tasks(type, status) VALUES ('test_concurrent', 'running') RETURNING task_id` + ); + taskIds.push(result[0].task_id); + } + + const loggers = taskIds.map(id => createLogger(handle, id)); + + // Write to all loggers + for (let i = 0; i < loggers.length; i++) { + loggers[i].log(`log from ${i}`); + loggers[i].debug(`debug from ${i}`); + } + + // Dispose all concurrently + await Promise.all(loggers.map(l => (l as any)[Symbol.asyncDispose]())); + }); +}); diff --git a/source/server/tasks/logger.ts b/source/server/tasks/logger.ts new file mode 100644 index 000000000..39a74f0b0 --- /dev/null +++ b/source/server/tasks/logger.ts @@ -0,0 +1,109 @@ +import { Writable, Transform } from "node:stream"; +import { ITaskLogger, LogSeverity } from "./types.js"; +import { DatabaseHandle } from "../vfs/helpers/db.js"; +import { debuglog, format } from "node:util"; + +const debug = debuglog("tasks:logs"); + +/** + * Creates a Transform stream that batches logs by count or time + * @param batchSize max number of writes to batch + * @param debounceMs max time to wait before flushing writes + */ +export function createBatcher(batchSize: number, debounceMs: number) { + let buffer: Array<{ severity: LogSeverity; message: string }> = []; + let timer: NodeJS.Timeout | null = null; + + return new Transform({ + objectMode: true, + highWaterMark: batchSize, + transform(chunk, _, cb) { + buffer.push(chunk); + + // Flush immediately if batch is full + if (buffer.length >= batchSize) { + if (timer) clearTimeout(timer); + timer = null; + this.push(buffer); + buffer = []; + } else if (!timer) { + // Start timer only if one isn't already running + timer = setTimeout(() => { + if (buffer.length) { + this.push(buffer); + buffer = []; + } + timer = null; + }, debounceMs); + } + cb(); + }, + flush(cb) { + // Ensure remaining data is sent when the stream ends + if (timer) clearTimeout(timer); + if (buffer.length) this.push(buffer); + cb(); + } + }); +} + +/** + * Create a stream that batch-inserts logs into the database + * @param db + * @param task_id + * @returns + */ +export function createInserter(db: DatabaseHandle, task_id: number) { + return new Writable({ + objectMode: true, + write(batch: Array<{ severity: LogSeverity; message: string, timestamp: Date }>, _, cb) { + const severities: LogSeverity[] = []; + const messages: string[] = []; + const timestamps: Date[] = []; + for (let log of batch) { + severities.push(log.severity); + messages.push(log.message); + timestamps.push(log.timestamp); + } + + db.run(` + INSERT INTO tasks_logs(fk_task_id, severity, message) + SELECT $1, * + FROM UNNEST($2::log_severity[], $3::text[]) AS t(severity, message)`, + [task_id, severities, messages] + ).then(() => cb(), cb); + } + }); +} + +/** + * Disposable logger that batches log inserts using Transform streams + * Reduces database lock contention by grouping multiple inserts + */ +export function createLogger(db: DatabaseHandle, task_id: number) { + const batcher = createBatcher(10, 100); + + const inserter = createInserter(db, task_id); + + batcher.pipe(inserter); + + function log(severity: LogSeverity, message: string) { + debug(`[${severity.toUpperCase()}] ${message}`); + batcher.write({ severity, message, timestamp: new Date() }); + } + + return { + debug: (...args: any[]) => log('debug', format(...args)), + log: (...args: any[]) => log('log', format(...args)), + warn: (...args: any[]) => log('warn', format(...args)), + error: (...args: any[]) => log('error', format(...args)), + [Symbol.asyncDispose]: async function (): Promise { + // Close both streams and wait for them to finish + batcher.end(); //We expect batcher.end to be effective immediately and never throw, so we don't wait for it + await new Promise((resolve, reject) => { + inserter.on('finish', resolve); + inserter.on('error', reject); + }); + } + } satisfies ITaskLogger & AsyncDisposable; +} \ No newline at end of file diff --git a/source/server/tasks/manager.test.ts b/source/server/tasks/manager.test.ts new file mode 100644 index 000000000..5cecf6956 --- /dev/null +++ b/source/server/tasks/manager.test.ts @@ -0,0 +1,432 @@ + + +// Tests for tasks management +// The system is a bit intricate and hard to test in isolation + +import { Client } from "pg"; +import openDatabase, { Database } from "../vfs/helpers/db.js"; + +import { Uid } from "../utils/uid.js"; +import { randomBytes } from "node:crypto"; +import { TaskManager } from "./manager.js"; +import Vfs from "../vfs/index.js"; +import UserManager from "../auth/UserManager.js"; +import { BadRequestError, NotFoundError } from "../utils/errors.js"; + +describe("TaskManager", function(){ + let db_uri: string, scene_id: number, user_id: number, handle: Database; + + let listener :TaskManager; + this.beforeAll(async function(){ + db_uri = await getUniqueDb(this.currentTest?.title.replace(/[^\w]/g, "_")); + handle = await openDatabase({uri: db_uri}); + const vfs = new Vfs("/dev/null", handle); + scene_id = await vfs.createScene(randomBytes(8).toString("base64url")); + const userManager = new UserManager(handle); + user_id = (await userManager.addUser(randomBytes(8).toString("base64url"), randomBytes(8).toString("base64url"))).uid; + }); + + this.afterAll(async function(){ + await handle?.end(); + await dropDb(db_uri); + }); + + this.beforeEach(async function(){ + await Promise.all([ + handle.run(`DELETE FROM tasks`), + handle.run(`DELETE FROM tasks_logs`), + ]); + + listener = new TaskManager(handle); + }) + + + //Non-connected functions that should work well in isolation + it("create tasks", async function(){ + let task = await listener.create({ + scene_id: null, + user_id: null, + type: "delayTask", + data: {time: 0} + }); + expect(task.task_id).to.be.a("number"); + expect(await handle.all("SELECT * FROM tasks")).to.have.length(1); + }); + + it("create a task attached to a scene", async function(){ + let task = await listener.create({ + scene_id, + user_id: null, + type: "delayTask", + data: {time: 0} + }); + expect(task.task_id).to.be.a("number"); + expect(task.scene_id).to.equal(scene_id); + }) + + it("create a task attached to a user", async function(){ + let task = await listener.create({ + scene_id: null, + user_id, + type: "delayTask", + data: {time: 0} + }); + expect(task.task_id).to.be.a("number"); + expect(task.user_id).to.equal(user_id); + }); + + it("update task status", async function(){ + let t = await listener.create({ + scene_id, + user_id: null, + type: "delayTask", + data: {time: 0} + }); + expect(await handle.get("SELECT * FROM tasks")).to.have.property("status", "pending"); + await listener.setTaskStatus(t.task_id, "success"); + expect(await handle.get("SELECT * FROM tasks")).to.have.property("status", "success"); + }); + + it("get a task", async function(){ + //Create a task + let t = await listener.create({ + scene_id, + user_id: null, + type: "delayTask", + data: {time: 0} + }); + + let resolved = await listener.getTask(t.task_id); + expect(resolved).to.deep.equal(t); + expect(resolved.data).to.deep.equal({time: 0}); + }); + + + it("creates a task with a parent", async function(){ + let parent = await listener.create({ + scene_id, + user_id: null, + type: "delayTask", + data: {time: 0} + }); + + let child = await listener.create({ + scene_id, + user_id: null, + type: "delayTask", + data: {time: 0}, + parent: parent.task_id, + }); + + expect(child).to.have.property("parent", parent.task_id); + }); + + it("raises NotFoundError when getting non-existent task", async function(){ + await expect(listener.getTask(99999)).to.be.rejectedWith("No task found"); + }); + + it("raises NotFoundError calling setTaskStatus on non-existent task", async function(){ + // the scheduler shouldn't assume the update succeeded. + await expect(listener.setTaskStatus(99999, "running")).to.be.rejectedWith(NotFoundError); + }); + + it("raises NotFoundError calling takeTask on non-existent task", async function(){ + // the scheduler shouldn't assume the update succeeded. + await expect(listener.takeTask(99999)).to.be.rejectedWith(NotFoundError); + }); + + it("raises NotFoundError calling releaseTask on non-existent task", async function(){ + // the scheduler shouldn't assume the update succeeded. + await expect(listener.releaseTask(99999)).to.be.rejectedWith(NotFoundError); + }); + + it("raises NotFoundError calling errorTask on non-existent task", async function(){ + // the scheduler shouldn't assume the update succeeded. + await expect(listener.errorTask(99999, new Error("some error"))).to.be.rejectedWith(NotFoundError); + }); + + it("handles database errors during task creation", async function(){ + await expect(listener.create({ + scene_id: -1, // Invalid ID + user_id: null, + type: "delayTask", + data: {}, + })).to.be.rejected; + }); + + + it("close() prevents further database operations", async function(){ + // PURPOSE: Verify that calling close() properly invalidates the manager + // and prevents accidental use after close, which could cause connection leaks. + const manager = new TaskManager(handle); + await manager.close(); + + await expect(manager.create({ + scene_id: null, + user_id: null, + type: "test", + data: {}, + })).to.be.rejectedWith("TaskManager has been closed"); + }); + + + it("deleteTask properly cleans up cascade deletions", async function(){ + // PURPOSE: Verify that deleteTask with cascading deletes doesn't leave + // orphaned records or cause constraint violations on subsequent operations. + const parent = await listener.create({ + scene_id: null, + user_id: null, + type: "parent", + data: {}, + }); + + const child = await listener.create({ + scene_id: null, + user_id: null, + type: "child", + data: {}, + parent: parent.task_id, + }); + + const deleted = await listener.deleteTask(parent.task_id); + expect(deleted).to.be.true; + + // Child should be cascaded deleted + const rows = await handle.all("SELECT * FROM tasks"); + expect(rows).to.have.length(0); + }); + + it("takeTask only transitions from pending or initializing status", async function(){ + let task = await listener.create({ + scene_id: null, + user_id: null, + type: "test", + data: {}, + status: "pending", + }); + + await listener.takeTask(task.task_id); + let updated = await listener.getTask(task.task_id); + expect(updated.status).to.equal("running"); + + // Try to take again - should throw. + await expect(listener.takeTask(task.task_id)).to.be.rejectedWith(BadRequestError); + }); + + it("setTaskStatus validates status values", async function(){ + // PURPOSE: Verify that invalid status transitions are handled correctly. + // The type system prevents "success" and "error" here, but "running"->pending + // should be allowed even if not semantically correct. Tests the type boundaries. + const task = await listener.create({ + scene_id: null, + user_id: null, + type: "test", + data: {}, + }); + + await listener.setTaskStatus(task.task_id, "running"); + let updated = await listener.getTask(task.task_id); + expect(updated.status).to.equal("running"); + + // Setting to initializing should also work + await listener.setTaskStatus(task.task_id, "initializing"); + updated = await listener.getTask(task.task_id); + expect(updated.status).to.equal("initializing"); + }); + + it("task output is properly serialized and deserialized", async function(){ + // PURPOSE: Verify round-trip serialization of various data types. + // Ensures JSON serialization doesn't lose precision or corrupt data. + const task = await listener.create({ + scene_id: null, + user_id: null, + type: "test", + data: {}, + }); + + const testOutput = { + string: "test", + number: 42, + float: 3.14159, + boolean: true, + null: null, + array: [1, 2, 3], + nested: { + deep: { + value: "found", + } + } + }; + + await listener.releaseTask(task.task_id, testOutput); + const retrieved = await listener.getTask(task.task_id); + + // Output is stored as JSON string, so we need to parse it if comparing + expect(retrieved.output).to.be.an("object"); + expect(retrieved.output).to.deep.equal(testOutput); + }); + + describe("getTaskTree()", function(){ + it("returns the root task with empty logs when there are none", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + const {root: rootNode, logs} = await listener.getTaskTree(root.task_id); + + expect(logs).to.deep.equal([]); + expect(rootNode.task_id).to.equal(root.task_id); + expect(rootNode.children).to.deep.equal([]); + }); + + it("throws NotFoundError for a non-existent task id", async function(){ + await expect(listener.getTaskTree(99999)).to.be.rejectedWith(NotFoundError); + }); + + it("returns logs for a single task, ordered by log_id ASC", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + + await handle.run( + `INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'first'), ($1, 'warn', 'second')`, + [root.task_id] + ); + + const {root: rootNode, logs} = await listener.getTaskTree(root.task_id); + + expect(rootNode.children).to.deep.equal([]); + expect(logs).to.have.length(2); + expect(logs[0].message).to.equal("first"); + expect(logs[1].message).to.equal("second"); + expect(logs[0].log_id).to.be.lessThan(logs[1].log_id); + expect(logs[0].task_id).to.equal(root.task_id); + expect(logs[1].task_id).to.equal(root.task_id); + }); + + it("returns parent and direct children with their respective logs", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + const child1 = await listener.create({scene_id: null, user_id: null, type: "child", data: {}, parent: root.task_id}); + const child2 = await listener.create({scene_id: null, user_id: null, type: "child", data: {}, parent: root.task_id}); + + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'root-log')`, [root.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'child1-log')`, [child1.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'error', 'child2-log')`, [child2.task_id]); + + const {root: rootNode, logs} = await listener.getTaskTree(root.task_id); + + // All logs present, ordered by log_id + expect(logs).to.have.length(3); + const logIds = logs.map(l => l.log_id); + expect(logIds).to.deep.equal([...logIds].sort((a, b) => a - b)); + + // Root node carries both children + expect(rootNode.children.map(c => c.task_id).sort()).to.deep.equal([child1.task_id, child2.task_id].sort()); + + // Children carry the parent id and no children of their own + const childNode1 = rootNode.children.find(c => c.task_id === child1.task_id)!; + expect(childNode1.parent).to.equal(root.task_id); + expect(childNode1.children).to.deep.equal([]); + }); + + it("fetches deeply nested (grandchild) tasks recursively", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + const child = await listener.create({scene_id: null, user_id: null, type: "child", data: {}, parent: root.task_id}); + const grand = await listener.create({scene_id: null, user_id: null, type: "grandchild", data: {}, parent: child.task_id}); + + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'grand-log')`, [grand.task_id]); + + const {root: rootNode, logs} = await listener.getTaskTree(root.task_id); + + // Log comes from the grandchild + expect(logs).to.have.length(1); + expect(logs[0].task_id).to.equal(grand.task_id); + + // Graph is properly linked: root -> child -> grandchild + expect(rootNode.children).to.have.length(1); + const childNode = rootNode.children[0]; + expect(childNode.task_id).to.equal(child.task_id); + expect(childNode.children).to.have.length(1); + expect(childNode.children[0].task_id).to.equal(grand.task_id); + }); + + it("does not include tasks outside the requested subtree", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + const child = await listener.create({scene_id: null, user_id: null, type: "child", data: {}, parent: root.task_id}); + const unrelated = await listener.create({scene_id: null, user_id: null, type: "other", data: {}}); + + // Fetching only the child subtree should not include root or unrelated + const {root: subtreeRoot} = await listener.getTaskTree(child.task_id); + expect(subtreeRoot.task_id).to.equal(child.task_id); + expect(subtreeRoot.parent).to.equal(root.task_id); // parent id is preserved + expect(subtreeRoot.children).to.deep.equal([]); // but parent node is not included + }); + + it("logs from all tasks in the tree are merged and sorted by log_id", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + const child = await listener.create({scene_id: null, user_id: null, type: "child", data: {}, parent: root.task_id}); + + // Interleave inserts from different tasks to test global ordering + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'a')`, [root.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'b')`, [child.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'c')`, [root.task_id]); + + const {logs} = await listener.getTaskTree(root.task_id); + + expect(logs).to.have.length(3); + const logIds = logs.map(l => l.log_id); + expect(logIds).to.deep.equal([...logIds].sort((a, b) => a - b)); + expect(logs.map(l => l.message)).to.deep.equal(['a', 'b', 'c']); + }); + + it("filters logs by minimum severity level", async function(){ + const root = await listener.create({scene_id: null, user_id: null, type: "root", data: {}}); + + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'debug', 'msg-debug')`, [root.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'log', 'msg-log')`, [root.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'warn', 'msg-warn')`, [root.task_id]); + await handle.run(`INSERT INTO tasks_logs(fk_task_id, severity, message) VALUES ($1, 'error', 'msg-error')`, [root.task_id]); + + // no filter → all four lines + const {logs: all} = await listener.getTaskTree(root.task_id); + expect(all.map(l => l.message)).to.deep.equal(['msg-debug', 'msg-log', 'msg-warn', 'msg-error']); + + // level: 'log' → excludes 'debug' + const {logs: fromLog} = await listener.getTaskTree(root.task_id, {level: 'log'}); + expect(fromLog.map(l => l.message)).to.deep.equal(['msg-log', 'msg-warn', 'msg-error']); + + // level: 'warn' → only warn and error + const {logs: fromWarn} = await listener.getTaskTree(root.task_id, {level: 'warn'}); + expect(fromWarn.map(l => l.message)).to.deep.equal(['msg-warn', 'msg-error']); + + // level: 'error' → only errors + const {logs: fromError} = await listener.getTaskTree(root.task_id, {level: 'error'}); + expect(fromError.map(l => l.message)).to.deep.equal(['msg-error']); + }); + }); + + describe("getTasks()", function(){ + it("returns only root tasks by default when filtering by user", async function(){ + const root = await listener.create({scene_id: null, user_id, type: "root", data: {}}); + const child = await listener.create({scene_id: null, user_id, type: "child", data: {}, parent: root.task_id}); + + const tasks = await listener.getTasks({ user_id }); + expect(tasks).to.have.length(1); + expect(tasks[0].task_id).to.equal(root.task_id); + }); + + it("returns child tasks as well when rootOnly is false", async function(){ + const root = await listener.create({scene_id: null, user_id, type: "root", data: {}}); + const child = await listener.create({scene_id: null, user_id, type: "child", data: {}, parent: root.task_id}); + + const tasks = await listener.getTasks({ user_id, rootOnly: false }); + // both root and child should be returned + const ids = tasks.map(t => t.task_id).sort((a,b)=>a-b); + expect(ids).to.deep.equal([root.task_id, child.task_id].sort((a,b)=>a-b)); + }); + + it("applies exact type matching", async function(){ + const a = await listener.create({scene_id: null, user_id, type: "delayTask", data: {}}); + const b = await listener.create({scene_id: null, user_id, type: "other", data: {}}); + + const tasks = await listener.getTasks({ user_id, type: 'delayTask' }); + expect(tasks).to.have.length(1); + expect(tasks[0].type).to.equal('delayTask'); + }); + }); +}); diff --git a/source/server/tasks/manager.ts b/source/server/tasks/manager.ts new file mode 100644 index 000000000..0ca628366 --- /dev/null +++ b/source/server/tasks/manager.ts @@ -0,0 +1,358 @@ +import { debuglog } from "node:util"; +import { BadRequestError, HTTPError, NotFoundError } from "../utils/errors.js"; +import { DatabaseHandle } from "../vfs/helpers/db.js"; +import { serializeTaskError } from "./errors.js"; +import { TaskStatus, TaskDataPayload, TaskDefinition, CreateTaskParams, TaskNode, TaskLogEntry, TaskTreeResult, LogSeverity } from "./types.js"; + +const debug_status = debuglog("tasks:status"); +const debug_logs = debuglog("tasks:logs"); + +/** + * Contains base interface to manage tasks: creation, status changes, etc... + * To be used through a {@link TaskScheduler} or directly for externally-managed tasks + */ +export class TaskManager{ + public get db(){ + if(!this._db) throw new Error(`TaskManager has been closed`); + return this._db; + } + + constructor(private _db: DatabaseHandle){ + if(!this._db) throw new Error("A valid database handle is required to instanciate a TaskManager"); + } + + + close(){ + this._db = null as any; + } + /** + * Internal method to adjust task status + * @param id + * @param status + * @warning it's almost always better to use the narrowed-down versions : + * {@link TaskManager.takeTask}, {@link TaskManager.releaseTask} and {@link TaskManager.errorTask} + * Because they contain more robust assertions about the task's current sate that might prevent a number of race conditions + * eg. {@link TaskManager.takeTask} will only work on tasks that have not already started + */ + public async setTaskStatus(id: number, status:Omit): Promise{ + debug_status("Set task %d to status %s", id, status); + let r = await this.db.run(`UPDATE tasks SET status = $2 WHERE task_id = $1`, [id, status]); + if(!r.changes) throw new NotFoundError(`No task found with id ${id}`); + } + + /** + * "take" a task that has status "pending" or "initializing" and switch it to status "running" + * @param id + * @throws {NotFoundError} if task doesn't exist + * @throws {BadRequestError} if task status doesn't match 'pending' or 'initializing' + */ + public async takeTask(id: number): Promise{ + debug_status("Take task %d", id); + const r = await this.db.run(`UPDATE tasks SET status = 'running' WHERE task_id = $1 AND status IN ('initializing', 'pending')`, [id]); + if(!r.changes){ + const t = await this.getTask(id); //will throw NotFoundError if task doesn't exist + throw new BadRequestError(`Can't take task #${id} with status ${t.status}`); + } + } + + /** + * Marks a task as completed + * Output is serialized using `JSON.stringify()` + * @throws {NotFoundError} if task doesn't exist + */ + async releaseTask(id: number, output: any = null){ + if(debug_logs.enabled) debug_logs(`Release task #${id}`, output); + else debug_status(`Release task #${id}`); + + const result = await this.db.run(`UPDATE tasks SET status = 'success', output = $2 WHERE task_id = $1`, [id, JSON.stringify(output)]); + if(!result.changes){ + throw new NotFoundError(`No task found with id ${id}`); + } + } + + /** + * Marks a task as failed with an error message + * @throws {NotFoundError} if task doesn't exist + * @throws {Error} if error serialization or database update fails + */ + async errorTask(id: number, reason: HTTPError|Error|string){ + try{ + debug_status(`Task #${id} Error : `, reason); + const serialized = serializeTaskError(reason) + const result = await this.db.run(`UPDATE tasks SET status = 'error', output = $2 WHERE task_id = $1`, [id, serialized]); + + if(!result.changes){ + throw new NotFoundError(`No task found with id ${id}`); + } + }catch(e:any){ + console.error("While trying to set task status:", e); + throw e; + } + } + + static #taskColumns = ` + fk_scene_id AS scene_id, + fk_user_id AS user_id, + task_id, + ctime, + type, + parent, + data, + output, + status + `; + + // Same as #taskColumns but with every column qualified by the tasks table alias, + // needed inside recursive CTEs where tasks is joined against a CTE that also + // exposes task_id (causing an "ambiguous column" error in PostgreSQL). + static #qualifiedTaskColumns = ` + tasks.fk_scene_id AS scene_id, + tasks.fk_user_id AS user_id, + tasks.task_id, + tasks.ctime, + tasks.type, + tasks.parent, + tasks.data, + tasks.output, + tasks.status + `; + + public async create({scene_id, user_id, type, data, status='pending', parent=null}: CreateTaskParams): Promise>{ + let args = [scene_id, type, data ?? {}, status, user_id, parent]; + let task = await this.db.all>(` + INSERT INTO tasks(fk_scene_id, type, data, status, fk_user_id, parent) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING ${TaskManager.#taskColumns} + `, args); + return task[0]; + } + + public async getTask(id: number):Promise>{ + let task = await this.db.all>(` + SELECT + ${TaskManager.#taskColumns} + FROM tasks + WHERE task_id = $1 + LIMIT 1 + `, [id]); + if(!task.length) throw new NotFoundError(`No task found with id ${id}`); + return task[0]; + } + + /** + * Returns all log lines for a single task, ordered by `log_id ASC`. + * @throws {NotFoundError} if no task with `id` exists + */ + public async getLogs(id: number): Promise { + // Verify task existence first so callers get a proper NotFoundError + await this.getTask(id); + return this.db.all(` + SELECT + log_id, + fk_task_id AS task_id, + timestamp, + severity, + message + FROM tasks_logs + WHERE fk_task_id = $1 + ORDER BY log_id ASC + `, [id]); + } + + /** + * Fetches a task by id together with **all descendants** (tasks whose `parent` + * chain leads back to `id`), and every log line produced by any of those tasks. + * + * The query is issued as a **single atomic statement** using a recursive CTE so + * the result is a consistent snapshot even under concurrent writes. + * + * Returned structure: + * - `root` – the requested {@link TaskNode} with descendants nested under `children`. + * - `logs` – flat array of {@link TaskLogEntry} ordered by `log_id ASC`, + * optionally filtered to lines at or above `options.level`. + * + * @param options.level Minimum severity to include. Defaults to `'debug'` (all lines). + * Severity order: `debug` < `log` < `warn` < `error`. + * @throws {NotFoundError} when no task with `id` exists + */ + public async getTaskTree( + id: number, + options?: { level?: LogSeverity } + ): Promise> { + /* + * One atomic query: + * 1. Recursive CTE `tree` walks the task graph depth-first. + * 2. Left-join with tasks_logs to pull every log line in the same pass, + * filtered by minimum severity using PostgreSQL's ENUM ordering. + * 3. The outer SELECT returns every (task, log?) pair; rows with no + * matching logs still appear once thanks to LEFT JOIN. + * + * Post-processing in JS is O(n) and avoids a second round-trip. + */ + const level: LogSeverity = options?.level ?? 'debug'; + const rows = await this.db.all<{ + // task columns + scene_id: number; + user_id: number; + task_id: number; + ctime: Date; + type: string; + parent: number | null; + data: TData extends undefined ? {} : TData; + output: TReturn; + status: string; + // log columns (nullable – task may have no logs) + log_id: number | null; + log_task_id: number | null; + timestamp: Date | null; + severity: string | null; + message: string | null; + }>(` + WITH RECURSIVE tree AS ( + SELECT ${TaskManager.#taskColumns} + FROM tasks + WHERE task_id = $1 + + UNION ALL + + SELECT ${TaskManager.#qualifiedTaskColumns} + FROM tasks + INNER JOIN tree ON tasks.parent = tree.task_id + ) + SELECT + tree.*, + tl.log_id, + tl.fk_task_id AS log_task_id, + tl.timestamp, + tl.severity, + tl.message + FROM tree + LEFT JOIN tasks_logs tl ON tl.fk_task_id = tree.task_id + AND tl.severity >= $2::log_severity + ORDER BY tl.log_id ASC NULLS FIRST + `, [id, level]); + + if (!rows.length) { + throw new NotFoundError(`No task found with id ${id}`); + } + + // --- assemble tasks (deduplicate by task_id) and logs (deduplicate by log_id) --- + const taskMap = new Map>(); + const logs: TaskLogEntry[] = []; + const seenLogIds = new Set(); + + for (const row of rows) { + if (!taskMap.has(row.task_id)) { + taskMap.set(row.task_id, { + scene_id: row.scene_id, + user_id: row.user_id, + task_id: row.task_id, + ctime: row.ctime, + type: row.type, + parent: row.parent, + after: [], + data: row.data, + output: row.output, + status: row.status as any, + children: [], + }); + } + + if (row.log_id !== null && !seenLogIds.has(row.log_id)) { + seenLogIds.add(row.log_id); + logs.push({ + log_id: row.log_id, + task_id: row.log_task_id!, + timestamp: row.timestamp!, + severity: row.severity as any, + message: row.message!, + }); + } + } + + // Wire up children and find the root (the node whose parent is not in the tree) + let root: TaskNode | undefined; + for (const node of taskMap.values()) { + if (node.parent !== null && taskMap.has(node.parent)) { + taskMap.get(node.parent)!.children.push(node); + } else { + root = node; + } + } + + return { root: root!, logs }; + } + + /** + * Returns all **root** tasks (tasks without a parent) created by a given user, + * ordered by creation time descending (newest first). + * + * Only root tasks are returned because child tasks are accessible via + * {@link getTaskTree} from their parent. Returning the full flat list would + * duplicate every child in the summary and make the view noisy. + * + * @param userId The `uid` of the user whose tasks to fetch. + */ + /** + * Generic task listing with optional filters. + * + * Options: + * - `user_id` : filter by `fk_user_id` + * - `type` : regular expression match against `type` column + * - `scene_id`: filter by `fk_scene_id` + * + * Returns tasks ordered by `ctime DESC` (newest first). + */ + public async getTasks(options: { user_id?: number; type?: string; scene_id?: number; rootOnly?: boolean; status?: 'success'|'error'|'all' } = {}): Promise { + const where: string[] = []; + const params: any[] = []; + + if (options.user_id != null) { + where.push(`fk_user_id = $${params.push(options.user_id)}`); + } + + if (options.scene_id != null) { + where.push(`fk_scene_id = $${params.push(options.scene_id)}`); + } + + + if (options.type) { + // Exact match on `type` (previously supported regex). Use equality to + // avoid surprising regex semantics and to keep behavior predictable. + where.push(`type = $${params.push(options.type)}`); + } + + // By default, return only root tasks (no parent). Caller can set `rootOnly: false` + // to include child tasks as well. This preserves previous summary behaviour. + if (options.rootOnly !== false) { + where.push(`parent IS NULL`); + } + + // Status filter: 'success' or 'error' or 'all' (no filter) + if (options.status && options.status !== 'all') { + where.push(`status = $${params.push(options.status)}`); + } + + const whereClause = where.length ? `WHERE ${where.join(" AND ")}` : ""; + + return this.db.all(` + SELECT ${TaskManager.#qualifiedTaskColumns}, scenes.scene_name AS scene, users.username AS owner + FROM tasks + LEFT JOIN scenes ON scenes.scene_id = tasks.fk_scene_id + LEFT JOIN users ON users.user_id = tasks.fk_user_id + ${whereClause} + ORDER BY tasks.ctime DESC + `, params); + } + + /** + * Deletes a task from the database + * Deletion will cascade to any dependents. + */ + async deleteTask(id: number): Promise{ + let r = await this.db.run(`DELETE FROM tasks WHERE task_id = $1`, [id]); + if(r.changes !== 1) return false; + return true; + } + +} \ No newline at end of file diff --git a/source/server/tasks/queue.test.ts b/source/server/tasks/queue.test.ts new file mode 100644 index 000000000..aa1d6ff07 --- /dev/null +++ b/source/server/tasks/queue.test.ts @@ -0,0 +1,194 @@ +import timers from "node:timers/promises"; +import { Queue } from "./queue.js"; + + + +describe("Queue", function () { + let q: Queue; + this.beforeEach(function () { + q = new Queue(); + }); + describe("close()", function () { + it("close enmpty queue", async function () { + await q.close(); + }); + it("can't close twice", async function () { + await q.close(); + await expect(q.close()).to.be.rejectedWith("already closed"); + }); + it("can't add jobs once closed", async function () { + await q.close(); + //add() throws synchronously + expect(() => q.add(() => Promise.resolve())).to.throw("Can't add new tasks"); + }); + + it("cancels running jobs", async function () { + let result: any; + let _op = q.add(async ({ signal }) => { + try { + await timers.setTimeout(1000, null, { signal }) + result = "ok"; + } catch (e: any) { + result = e.name; + throw e; + } + }).catch(e => e); + + //Shouldn't throw despite the task throwing an AbortError + await q.close(100); + expect(result).to.equal("AbortError"); + expect(await _op).to.have.property("name", "AbortError"); + }) + + it("force quit jobs after a timeout", async function () { + let result: any; + let _op = q.add(async ({ signal }) => timers.setTimeout(100, null, /*no signal support */)); + + //Shouldn't throw despite the task throwing an AbortError + await q.close(1); + await expect(_op).to.be.rejectedWith("Queue close timeout") + }) + }); + + describe("add()", function () { + it("can process a task", async function () { + const result = await q.add(() => Promise.resolve("Hello")); + expect(result).to.equal("Hello"); + }); + + it("respects concurrency limit", async function () { + q = new Queue(2); + let running = 0; + let maxRunning = 0; + const tasks = []; + for (let i = 0; i < 6; i++) { + tasks.push(q.add(async () => { + running++; + maxRunning = Math.max(maxRunning, running); + await timers.setTimeout(10); + running--; + return i; + })); + } + const results = await Promise.all(tasks); + expect(results).to.deep.equal([0, 1, 2, 3, 4, 5]); + expect(maxRunning).to.equal(2); + }); + }); + + describe("unshift()", function () { + it("bypasses concurrency limit", async function () { + q = new Queue(1); + const order: number[] = []; + // Fill the queue with a slow task + const t1 = q.add(async () => { + await timers.setTimeout(20); + order.push(1); + }); + // unshift should start immediately even though concurrency is 1 + const t2 = q.unshift(async () => { + order.push(2); + }); + await t2; + expect(order).to.include(2); + await t1; + }); + }); + + describe("nested queues (simulating scheduler nesting)", function () { + it("nested add() calls complete without deadlock", async function () { + this.timeout(2000); + const root = new Queue(2, "root"); + const result = await root.add(async () => { + const child = new Queue(1, "child"); + try { + return await child.add(async () => { + const grandchild = new Queue(1, "grandchild"); + try { + return await grandchild.add(async () => "deep result"); + } finally { + await grandchild.close(); + } + }); + } finally { + await child.close(); + } + }); + expect(result).to.equal("deep result"); + await root.close(); + }); + + it("deeply nested queues with awaits between levels don't deadlock", async function () { + this.timeout(2000); + const root = new Queue(2, "root"); + const executed: number[] = []; + + await root.add(async () => { + executed.push(1); + const q1 = new Queue(1, "q1"); + try { + await q1.add(async () => { + executed.push(2); + // Yield to event loop (mimics timers.setTimeout(0) in the scheduler test) + await timers.setTimeout(0); + const q2 = new Queue(1, "q2"); + try { + await q2.add(async () => { + executed.push(3); + }); + } finally { + await q2.close(); + } + }); + } finally { + await q1.close(); + } + }); + + expect(executed).to.deep.equal([1, 2, 3]); + await root.close(); + }); + + it("queue close() after all tasks complete returns immediately", async function () { + const child = new Queue(1, "child"); + await child.add(() => Promise.resolve("done")); + // All tasks done, close should not hang + await child.close(); + }); + + it("multiple concurrent tasks with nested queues don't deadlock", async function () { + this.timeout(3000); + const root = new Queue(2, "root"); + const results = await Promise.all( + Array.from({ length: 4 }, (_, i) => + root.add(async () => { + const nested = new Queue(1, `nested-${i}`); + try { + return await nested.add(async () => { + await timers.setTimeout(1); + return i; + }); + } finally { + await nested.close(); + } + }) + ) + ); + expect(results).to.deep.equal([0, 1, 2, 3]); + await root.close(); + }); + + it("nested queue close() with pending tasks rejects them", async function () { + const child = new Queue(1, "child"); + const t1 = child.add(async ({ signal }) => { + await timers.setTimeout(500, null, { signal }); + }); + const t2 = child.add(() => Promise.resolve("pending")); + + await child.close(50); + // t1 should abort, t2 should be rejected (never started) + await expect(t1).to.be.rejected; + await expect(t2).to.be.rejected; + }); + }); +}) \ No newline at end of file diff --git a/source/server/tasks/queue.ts b/source/server/tasks/queue.ts new file mode 100644 index 000000000..0f278ec11 --- /dev/null +++ b/source/server/tasks/queue.ts @@ -0,0 +1,140 @@ +import EventEmitter from "node:events"; +import timers from "node:timers/promises"; +import { TaskDataPayload, TaskHandler, TaskPackage } from "./types.js"; + + + +interface WorkPackage{ + work: TaskPackage; + resolve: (value: ReturnType) => void; + reject: (err: Error)=>void; +} + + +export class Queue{ + #queue: WorkPackage[] = []; + /** + * Pointer to the jobs currently running. Allows force-stop in close() + */ + #current = new Set(); + #c = new AbortController(); + + /** + * Callback defined when queue is closing + */ + #settleResolve: (() => void) | null = null; + + constructor(public limit = Infinity, public name?:string) { + } + + toString(){ + return `Queue(${this.limit}, ${this.name || "anonymous"})` + } + + + /** + * Adds a task to the queue. + * The task will wait for an open slot before starting + */ + add(work: TaskPackage):Promise { + if(this.#c.signal.aborted){ + throw new Error(`Queue is closed. Can't add new tasks`); + } + return new Promise((resolve, reject) => { + this.#queue.push({ work, resolve, reject }); + this.#processNext(); + }); + } + + /** + * Jumps the queue and start processing a job immediately + * + * It is still counted in {@link Queue.activeCount}, but will ignore the queue's concurrency limit. + */ + unshift(work: TaskPackage):Promise { + if(this.#c.signal.aborted){ + throw new Error(`Queue is closed. Can't add new tasks`); + } + return new Promise((resolve, reject) => { + this.#run({ work, resolve, reject }); + }); + } + + #run(job: WorkPackage){ + this.#current.add(job); + + //Execute the job. When it settles, check if it was interrupted before calling the resolvers + Promise.resolve(job.work({signal: this.#c.signal})) + .then(result => { + if(this.#current.delete(job)) job.resolve(result); + }, error =>{ + if(this.#current.delete(job)) job.reject(error); + }) + .finally(()=>{ + // Notify waiters if all active tasks have completed + if(this.#current.size === 0 && this.#settleResolve){ + this.#settleResolve(); + this.#settleResolve = null; + } + this.#processNext(); + }); + } + + #processNext(){ + // Stop if we are busy or if the queue is empty + if (this.#current.size >= this.limit || this.#queue.length === 0 ) { + return; + } + // 3. Dequeue the next task + this.#run(this.#queue.shift()!); + } + + /** + * Close the queue gracefully + * Waits for active tasks to complete before aborting pending ones + * @param timeoutMs Maximum time to wait for active tasks. Defaults to 1 second. + */ + async close(timeoutMs: number = 1000){ + if(this.#c.signal.aborted){ + throw new Error(`Queue is already closed.`); + } + // Now abort and reject pending tasks + this.#c.abort(); + //Empty the queue (work not yet started) + for (let item of this.#queue){ + item.reject(this.#c.signal.reason ?? new Error("Queue closed")); + } + this.#queue = []; + //Return immediately if no jobs are currently processed + if(this.#current.size == 0) return; + //Otherwise wait for timeout to let jobs resolve properly + await new Promise((resolve, reject)=>{ + const _t = setTimeout(()=>{ + this.#settleResolve = null; + //Force-reject any running jobs + for(let job of this.#current){ + job.reject(new Error(`Queue close timeout: task did not stop within ${timeoutMs}ms`)); + } + this.#current.clear(); + resolve(); + }, timeoutMs); + + this.#settleResolve = ()=>{ + clearTimeout(_t); + resolve(); + }; + + }); + } + /** + * Number of jobs waiting for an execution slot + */ + get pendingCount() { return this.#queue.length; } + /** + * Number of jobs currently being executed + * It's possible to have `limit < activeCount` if {@link Queue.unshift} is used. + */ + get activeCount() { return this.#current.size; } + /** true if Queue has been closed */ + get closed(){ return this.#c.signal.aborted; } +} \ No newline at end of file diff --git a/source/server/tasks/scheduler.test.ts b/source/server/tasks/scheduler.test.ts new file mode 100644 index 000000000..e316ab945 --- /dev/null +++ b/source/server/tasks/scheduler.test.ts @@ -0,0 +1,459 @@ + + +// Tests for tasks management +// The system is a bit intricate and hard to test in isolation +import timers from "timers/promises"; +import { Client } from "pg"; +import openDatabase, { Database, DatabaseHandle } from "../vfs/helpers/db.js"; + +import { Uid } from "../utils/uid.js"; +import { randomBytes } from "node:crypto"; +import { TaskScheduler } from "./scheduler.js"; +import { CreateRunTaskParams, TaskDefinition } from "./types.js"; +import Vfs from "../vfs/index.js"; +import UserManager from "../auth/UserManager.js"; +import EventEmitter, { once } from "node:events"; + + +const makeTask = (props: Partial> = {}) => ({ + scene_id: null, user_id: null, data: {}, + handler: () => Promise.resolve(), + ...props, +}) + +describe("TaskScheduler", function () { + let db_uri: string, scene_id: number, user_id: number, handle: Database, client: Client; + + //Create a taskScheduler with minimal context + let scheduler: TaskScheduler<{ db: DatabaseHandle }>; + this.beforeAll(async function () { + db_uri = await getUniqueDb(this.currentTest?.title.replace(/[^\w]/g, "_")); + handle = await openDatabase({ uri: db_uri }); + + const userManager = new UserManager(handle); + const user = await userManager.addUser("alice", "12345678", "admin"); + user_id = user.uid; + const vfs = new Vfs("/dev/null", handle); + scene_id = await vfs.createScene(randomBytes(8).toString("base64url"), user_id); + }); + + this.afterAll(async function () { + await handle?.end(); + await dropDb(db_uri); + }); + + this.beforeEach(async function () { + scheduler = new TaskScheduler({ db: handle }); + }); + + this.afterEach(async function () { + await handle.run(`DELETE FROM tasks_logs`); + await handle.run(`DELETE FROM tasks`); + if (!scheduler.closed) await scheduler.close(); + }); + + it("creates an immediately-executed task", async function () { + const result = await scheduler.run({ + scene_id: null, + user_id: null, + type: "testTask", + data: {}, + handler: async ({ task }) => { + return task.task_id; + } + }); + expect(result).to.be.a("number"); + + const task = await scheduler.getTask(result); + expect(task).to.have.property("status", "success"); + expect(task).to.have.property("output", result); + }); + + it("initialize a task for later execution", async function () { + let task = await scheduler.create({ + scene_id: null, + user_id: null, + type: "testTask", + data: {}, + }); + + const output = await scheduler.runTask({ task, handler: async ({ task }) => task.task_id }); + expect(output).to.equal(task.task_id); + + task = await scheduler.getTask(output); + expect(task).to.have.property("status", "success"); + expect(task).to.have.property("output", output); + }); + + it("handles async tasks errors", async function () { + let id_ref: number; + await expect(scheduler.run({ + scene_id: null, + user_id: null, + type: "testTask", + data: {}, + handler: async ({ task }) => { + id_ref = task.task_id; + await Promise.reject(new Error("Some message")); + } + })).to.be.rejectedWith("Some message"); + + expect(id_ref!).to.be.a("number"); + + const task = await scheduler.getTask(id_ref!); + expect(task).to.have.property("status", "error"); + expect(task).to.have.property("output").ok; + }); + + + it("handles synchronous errors in handlers", async function () { + // PURPOSE: Verify that synchronous errors (not returned from async) are caught. + // This catches programming errors like accessing undefined properties without returning a rejected promise. + let id_ref: number; + + await expect(scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: ({ task }) => { + id_ref = task.task_id; + throw new Error("Sync error"); + } + })).to.be.rejectedWith("Sync error"); + + const task = await scheduler.getTask(id_ref!); + expect(task.status).to.equal("error"); + }); + + it("use a named function for task type", async function () { + const result = await scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async function testTask({ task }) { + return task.task_id; + } + }); + expect(result).to.be.a("number"); + + const task = await scheduler.getTask(result); + expect(task).to.have.property("status", "success"); + expect(task).to.have.property("type", "testTask"); + }); + + it("data is never null inside a task", async function () { + let ok = false; + await scheduler.run(makeTask({ + handler: ({ task }) => { + expect(task).to.have.property("data").an("object"); + ok = true; + } + })); + expect(ok, `Task seems to not have been run`).to.be.true; + }); + + it("propagates abort signals", async function () { + let progress = new EventEmitter(); + let c = new AbortController(); + const runningTask = scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + signal: c.signal, + handler: async ({ context: { signal } }) => { + progress.emit("start"); + await timers.setTimeout(1000, null, { signal }); // Slow task, with abort + return "task1"; + } + }); + await once(progress, "start"); + c.abort(); + await expect(runningTask).to.be.rejected; + }); + + it("won't deadlock itself", async function () { + scheduler.concurrency = 2; + + let tasks = []; + for (let i = 0; i < 4; i++) { + tasks.push(scheduler.run(makeTask({ + handler: async () => { + await timers.setTimeout(1); + await scheduler.run(makeTask()); + } + }))); + } + await Promise.all(tasks); + }); + + it("nested tasks with limited concurrency don't deadlock (deep nesting)", async function () { + scheduler.concurrency = 2; + + const executed: number[] = []; + + const result = await scheduler.run(makeTask({ + handler: async () => { + executed.push(1); + await scheduler.run(makeTask({ + handler: async () => { + executed.push(2); + await timers.setTimeout(0); + await scheduler.run(makeTask({ + handler: async () => { + executed.push(3); + } + })); + } + })); + } + })); + + expect(executed).to.deep.equal([1, 2, 3]); + }); + + it("nested tasks get their parent's attributes", async function () { + let children: Array = []; + + const parent_id = await scheduler.run(makeTask({ + scene_id, + user_id, + handler: async function ({ task, context: { tasks } }) { + await tasks.run(makeTask({ + handler: async ({ task }) => { + children.push(task); + } + })); + await scheduler.run(makeTask({ + handler: async ({ task }) => { + children.push(task); + } + })); + return task.task_id; + } + })); + + expect(parent_id).to.be.a("number"); + expect(children).to.have.length(2); + for (let i = 0; i < children.length; i++) { + const child = children[i]; + expect(child).to.have.property("parent", parent_id); + expect(child).to.have.property("scene_id", scene_id); + expect(child).to.have.property("user_id", user_id); + } + }); + + it("group runs function-items inside nest and preserves order (Promise.all-like)", async function () { + //Also, checks for deadlocks + function* gen() { + for (let i = 0; i < 100; i++) { + yield timers.setTimeout(1, i); + } + } + + const res = await scheduler.group(() => gen()); + expect(res).to.have.length(100); + for (let i = 0; i < 100; i++) { + expect(res[i]).to.equal(i); + } + }); + + it("Can group promises", async function () { + const calls: string[] = []; + + const res = await scheduler.group(() => ([ + timers.setTimeout(5, 1), + timers.setTimeout(5, 2), + ])); + expect(res).to.deep.equal([1, 2]); + }); + + it("accepts a generator function (callable) and preserves results", async function () { + function* gen() { + yield timers.setTimeout(1, 1); + yield timers.setTimeout(1, 2); + } + const res = await scheduler.group(gen); + expect(res).to.deep.equal([1, 2]); + }); + + it("generator function runs inside nest and shares context name", async function () { + const ctxNames: string[] = []; + function* gen() { + yield (async () => { ctxNames.push(String(scheduler.context().queue.name)); return 1; })(); + yield (async () => { ctxNames.push(String(scheduler.context().queue.name)); return 2; })(); + } + const res = await scheduler.group(gen); + expect(res).to.deep.equal([1, 2]); + expect(ctxNames).to.have.length(2); + for (const n of ctxNames) expect(n).to.match(/\[GROUP\]/); + expect(ctxNames[0], `Expected contexts to all have the same name`).to.equal(ctxNames[1]); + }); + + + it("handles errors in nested tasks without deadlocking", async function () { + // PURPOSE: Verify that if a nested task throws an error, the parent task + // receives that error and can handle it appropriately without deadlocking. + // This is critical because async context nesting could easily cause deadlocks + // if error propagation isn't handled correctly. + let parentTaskId: number; + let childTaskId: number; + + await expect(scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async ({ task }) => { + parentTaskId = task.task_id; + await expect(scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async ({ task }) => { + childTaskId = task.task_id; + throw new Error("Child failed"); + } + })).to.be.rejectedWith("Child failed"); + throw new Error("Parent failed"); + } + })).to.be.rejectedWith("Parent failed"); + + const parent = await scheduler.getTask(parentTaskId!); + const child = await scheduler.getTask(childTaskId!); + expect(parent.status).to.equal("error"); + expect(child.status).to.equal("error"); + }); + + + + it("rejects new tasks after close()", async function () { + // PURPOSE: Verify that Queue.close() properly transitions to a closed state + // and prevents new tasks from being added. This prevents memory leaks from + // accumulated tasks that will never execute. + await scheduler.close(); + + // Attempting to add a task to closed scheduler should fail + await expect(scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async () => "test" + })).to.be.rejected; + }); + + it("many sequential tasks don't leak memory or resources", async function () { + // PURPOSE: Run many tasks in sequence to verify that AsyncLocalStorage + // contexts and queue workers are properly cleaned up after each task, + // not accumulating in memory. This tests for classic event listener leaks, + // promise chain leaks, etc. + this.timeout(10000); + + const count = 50; + const results = []; + + for (let i = 0; i < count; i++) { + const result = await scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async ({ task }) => task.task_id + }); + results.push(result); + } + + expect(results).to.have.length(count); + // Verify all tasks completed successfully + for (let id of results) { + const task = await scheduler.getTask(id); + expect(task.status).to.equal("success"); + } + }); + + it("many nested tasks don't cause stack overflow or memory issues", async function () { + // PURPOSE: Test that deeply nested async contexts don't cause stack overflow + // or accumulate memory. AsyncLocalStorage should clean up properly as + // contexts exit. This is a regression test for context chain leaks. + this.timeout(10000); + + const depth = 10; + let maxDepth = 0; + + const runNested = async (d: number): Promise => { + if (d === depth) return d; + maxDepth = Math.max(maxDepth, d); + + return await scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async () => runNested(d + 1) + }); + }; + + const result = await runNested(0); + expect(result).to.equal(depth); + expect(maxDepth).to.be.greaterThan(0); + }); + + it("group can run tasks concurrently", async function () { + this.timeout(500) + const results = await scheduler.group(function* () { + for (let i = 0; i < 60; i++) { + yield timers.setTimeout(10, i); + } + }); + expect(results).to.have.length(60); + }); + + it("callback-based error handling doesn't cause unhandled rejections", async function () { + // PURPOSE: When using the optional callback parameter on run(), + // ensure errors are passed to the callback, not left as unhandled rejections. + // Unhandled rejections can crash the process in strict environments. + let callbackError: any = null; + + const promise = scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async () => { + throw new Error("Error in handler"); + } + }, (err) => { + callbackError = err; + }); + + // The promise should also reject + await expect(promise).to.be.rejected; + // The callback should have received the error + await timers.setTimeout(10); // Give callback time to execute + expect(callbackError).to.be.instanceof(Error); + expect(callbackError.message).to.include("Error in handler"); + }); + + it("closing scheduler with pending tasks cleans up properly", async function () { + this.timeout(5000); + + scheduler.concurrency = 1; + + let task1Executed = new EventEmitter(); + + // Task 1: Will be running when we close + const runningTask = scheduler.run({ + scene_id: null, + user_id: null, + data: {}, + handler: async ({ context: { signal } }) => { + task1Executed.emit("start"); + await timers.setTimeout(1000, null, { signal }); // Slow task, with abort + return "task1"; + } + }); + // Give task1 time to start executing + await once(task1Executed, "start"); + + // Close scheduler + await scheduler.close(100); + + // The running task should abort + await expect(runningTask).to.be.rejectedWith("aborted"); + }); +}); \ No newline at end of file diff --git a/source/server/tasks/scheduler.ts b/source/server/tasks/scheduler.ts new file mode 100644 index 000000000..973c4844c --- /dev/null +++ b/source/server/tasks/scheduler.ts @@ -0,0 +1,254 @@ +import { debuglog } from "node:util"; +import { AsyncLocalStorage } from 'node:async_hooks'; +import { DatabaseHandle } from "../vfs/helpers/db.js"; +import { Queue } from "./queue.js"; +import { CreateRunTaskParams, CreateTaskParams, ITaskLogger, RunOptions, RunTaskParams, TaskDataPayload, TaskDefinition, TaskHandler, TaskHandlerContext, TaskPackage, TaskSchedulerContext, TaskSettledCallback, TaskStatus, } from "./types.js"; +import { createLogger } from "./logger.js"; +import { TaskManager } from "./manager.js"; +import { BadRequestError, InternalError } from "../utils/errors.js"; + +// Note: previously used stream/once for generator bridge; no longer required for Promise.all-style group + + +const debug = debuglog("tasks:scheduler"); + + +interface AsyncContext { + queue: Queue; + parent: { + task_id: number; + user_id: number | null; + scene_id: number | null; + logger: ITaskLogger; + } | null; +} + +type NestContextProps = { + name: string; + parent: AsyncContext["parent"]; + /** + * Tasks concurrency for this context. Default is 1 (except for the root context). + * Infinity can be provided if we don't care. + */ + concurrency: number; +} + + +export class TaskScheduler extends TaskManager { + //Do not use "real" private members here because they would be missed by Object.create + /** + * Work queue. used internally by {@link TaskScheduler.run} to run jobs + * */ + private readonly rootQueue = new Queue(4, "root"); + + + public get taskContext() { + return this._context; + } + + public get concurrency() { + return this.rootQueue.limit; + } + public set concurrency(value) { + this.rootQueue.limit = value; + } + + public get closed() { + return this.rootQueue.closed; + } + + constructor(protected _context: TContext) { + super(_context.db); + + //AsyncLocalStorage is here instead of as a class member + // because we want to only use it through the interfaces provided below + const asyncStore = new AsyncLocalStorage(); + this.context = () => { + return (asyncStore.getStore() as any ?? { queue: this.rootQueue, parent: null }) satisfies AsyncContext; + } + this.nest = async ({ parent, name, concurrency = 1 }, work, ...args) => { + const q = new Queue(concurrency, name); + try { + return await asyncStore.run({ queue: q, parent } satisfies AsyncContext, work, ...args); + } finally { + await q.close(); + } + } + } + + /** + * Close the scheduler gracefully + * @param timeoutMs Maximum time to wait for active tasks to complete. Defaults to 30 seconds. + */ + async close(timeoutMs?: number) { + await this.rootQueue.close(timeoutMs); + super.close(); + } + /** + * Retrieve the current async context + * + * Async contexts allow nesting calls to {@link TaskScheduler.run()} without risking a deadlock: + * Each nesting level gets its own concurrency context with a default concurrency of one. + */ + public readonly context: () => AsyncContext; + /** + * Run `work` inside a new async context with the given name and concurrency settings + * Calls to {@link TaskScheduler._run} within this async context will resolve to the nested Queue + */ + public readonly nest: (props: NestContextProps, work: (...args: T) => U, ...args: T) => Promise; + + + /** + * Internal task running handler. + * + * Builds a context and actually schedule the task, handling the status change from `"pending"` to `"running"` and to `"complete"|"error"` + * + * Unless there is a problem with the database connection, the task is guaranteed to end up with `status = "complete"|"error"` + * @param handler Handler functionthat will perform the job + * @param task Task definition + * @param param2 Additional options for the task runner + * @param param2.immediate jumps the queue and run the task immediately regardless of concurrency settings + * @returns + */ + private async _run( + handler: TaskHandler, + task: TaskDefinition, + { signal: taskSignal, immediate }: RunOptions = {} + ) { + // Create a wrapper function around the handler to provide the task's execution context + // and set its status + const work: TaskPackage = async ({ signal: queueSignal }) => { + await using logger = createLogger(this.db, task.task_id); + const context: TaskHandlerContext = { + ...this.taskContext, + tasks: Object.create(this), + logger, + signal: taskSignal ? (AbortSignal as any).any([taskSignal, queueSignal]) : queueSignal, + }; + + const thisContext: AsyncContext["parent"] = { + task_id: task.task_id, + scene_id: task.scene_id, + user_id: task.user_id, + logger, + }; + + await this.takeTask(task.task_id); + try { + const output = await this.nest({ concurrency: 1, name: `${task.type}#${task.task_id.toString()}`, parent: thisContext }, handler.bind(context), { context, task }) + await this.releaseTask(task.task_id, output); + return output; + } catch (e: any) { + //Here we might make an exception if e.name === "AbortError" and the database is closed + await this.errorTask(task.task_id, e).catch(e => console.error("Failed to set task error : ", e)); + throw e; + } + } + + const async_ctx = this.context(); + //Custom name for work to be shown in stack traces + Object.defineProperty(work, 'name', { value: `TaskScheduler.payload<${task.type}>(${task.task_id})@${async_ctx.queue.name}` }); + if (async_ctx.parent?.logger) { + async_ctx.parent.logger.debug(`Schedule child task ${task.type}#${task.task_id}`); + } + debug("Schedule work for task #%d on Queue(%s)", task.task_id, async_ctx.queue.name); + return await (immediate ? async_ctx.queue.unshift(work) : async_ctx.queue.add(work)); + } + + /** + * Registers a task to run as soon as possible and wait for its completion. + * + * It's OK to ignore the returned promise if a callback is provided to at least properly log the error + * + * `TaskScheduler.run()` uses async context tracking to inherit **scene_id**, **user_id** and **parent** from it's context + * However those can still be forced to another value if deemed necessary. + * Whether or not this override is desirable is yet unclear. + */ + async run( + params: CreateRunTaskParams, + callback?: TaskSettledCallback + ): Promise { + //We use context to inherit parent, user_id and scene_id + //But if different values are explicitly specified it's possible to "break out" + //Whether or not this is + const { parent } = this.context(); + const task: TaskDefinition = await this.create({ + ...params, + data: params.data as any, + type: (params.type ?? params.handler.name) as string, + status: "pending" as TaskStatus, + parent: parent?.task_id ?? null, + }); + const _p = this._run(params.handler, task, { signal: params.signal, immediate: params.immediate }); + + if (typeof callback === "function") { + _p.then((value) => callback(null, value), (err) => callback(err)); + } + return _p; + } + + + /** + * Run a handler on an externally-created task definition + * + * This is less safe than {@link TaskScheduler.run} because we _trust_ the task definition to be up to date. It's an error to call it with a stale task definition. + * @param param0 + * @param callback + */ + async runTask({ task, signal, handler }: RunTaskParams, callback?: TaskSettledCallback): Promise { + const _p = this._run(handler, task, { signal: signal, immediate: false }); + if (typeof callback === "function") { + _p.then((value) => callback(null, value), (err) => callback(err)); + } + return _p; + } + + + /** + * Create a task with async-context awareness + * This is exposed in case it is ever needed but it's probably always better to call {@link TaskScheduler.run} + * {@link TaskManager.create} for the base method + */ + override async create(params: CreateTaskParams): Promise> { + const { parent } = this.context(); + if (parent) debug(`Inherit values from Parent task #${parent.task_id}: ${parent.scene_id ? "Scene: " + parent.scene_id : ""} ${parent.user_id ? "User: " + parent.user_id : ""}`); + if (!params.scene_id && parent?.scene_id) params.scene_id = parent.scene_id; + if (!params.user_id && parent?.user_id) params.user_id = parent.user_id; + if (!params.parent && parent?.task_id) params.parent = parent.task_id; + return await super.create(params); + } + + /** + * Join a task that has been created through {@link queue} + */ + async join(task_id: number) { + //It's yet unclear if this is really needed + throw new Error("Unimplemented"); + } + + /** + * Sometimes we want the concurrency settings to be ignored. + * This creates an internal context with infinite concurrency that allows everything to run in parallel + * @TODO : allow generators here + */ + // Accept either a generator function or a factory that returns an iterable of promises. + group(work: () => Generator, any, any>): Promise; + group(work: () => Iterable>): Promise; + group(work: () => Generator, any, any> | Iterable>): Promise { + const async_ctx = this.context(); + if (async_ctx.parent?.logger) { + async_ctx.parent.logger.debug(`Create tasks group`); + } + + if (typeof work !== 'function') throw new TypeError('group expects a function (factory or generator function)'); + + // Run once inside nest so the iterable is created and consumed in the nested async context + return this.nest({ name: `${async_ctx.queue.name}[GROUP]`, parent: async_ctx.parent, concurrency: Infinity }, async () => { + const iterable = (work as () => Iterable>)(); + // materialize inside nested context so iterator.next() runs under nest + const arr = [...iterable]; + return await Promise.all(arr); + }) as unknown as Promise; + } + +} \ No newline at end of file diff --git a/source/server/tasks/types.ts b/source/server/tasks/types.ts new file mode 100644 index 000000000..1df32e6de --- /dev/null +++ b/source/server/tasks/types.ts @@ -0,0 +1,206 @@ +import type UserManager from "../auth/UserManager.js"; +import { Config } from "../utils/config.js"; +import { TDerivativeQuality } from "../utils/schema/model.js"; +import { RootRelativePath } from "../vfs/Base.js"; +import { DatabaseHandle } from "../vfs/helpers/db.js"; +import type Vfs from "../vfs/index.js"; +import { TaskScheduler } from "./scheduler.js"; + + +export type TaskData = Record +export type TaskDataPayload = undefined|TaskData; + +export type TaskStatus = 'initializing'|'pending'|'aborting'|'running'|'success'|'error'; +export enum ETaskStatus{ + 'aborting' = -2, + 'error' = -1, + 'initializing', + 'running', + 'pending', + 'success', +} + + + +/** + * Task Creation parameters + */ +export interface TaskDefinition{ + scene_id: number|null; + user_id: number|null; + task_id: number; + ctime: Date; + type :string; + parent: number|null; + /** **Unordered** list of task requirements */ + after: number[]; + data: TData extends undefined? {}: TData; + output: TReturn; + status: TaskStatus; +}; + + + +export interface TaskSchedulerContext{ + vfs: Vfs, + db: DatabaseHandle, + userManager: UserManager, + config: Config, +} + +export type TaskHandlerContext = T & { + tasks: TaskScheduler, + logger: ITaskLogger, + signal: AbortSignal, +}; + +/** + * Parameters passed to a task handler when it is invoked + */ +export interface TaskHandlerParams{ + task: TaskDefinition; + context: TaskHandlerContext; +} + +/** + * In the future we might want to support tasks that yield sub-tasks using return value `AsyncGenerator` + */ +export type TaskHandler = (this: TaskHandlerContext, params:TaskHandlerParams)=> TReturn|Promise; + +/** + * Bound TaskHandler work package + */ +export type TaskPackage = (params: {signal: AbortSignal})=>Promise; + + +type TaskDataRequirement = T extends undefined + ? { data?: never } + : { data: T }; + + +type TaskCreateCommonParameters = { + scene_id?: number|null; + user_id?: number|null; + parent?: number|null; +} + +export interface RunOptions{ + signal?:AbortSignal; + immediate?:boolean; +} + + +/** + * Parameters to create a task + */ +export type CreateTaskParams = TaskCreateCommonParameters &{ + type: string; + status?: TaskStatus; + data: TData; +}; + + +export type CreateRunTaskParams = + TaskCreateCommonParameters & TaskDataRequirement & { + handler: TaskHandler; + type?: string; + status?:"pending"; + immediate?: boolean; + signal?: AbortSignal; + /** Can't create an immediately-running task with a status other than pending */ +}; + +/** + * Run a task that was previously created + */ +export interface RunTaskParams{ + task: TaskDefinition; + handler: TaskHandler; + immediate?: boolean; + signal?: AbortSignal; +} + +export interface TaskSettledCallback { + (err:null, value:T):unknown + (err:any):unknown +} + +export interface ITaskLogger{ + debug: (message?: any, ...optionalParams: any[]) => void, + log: (message?: any, ...optionalParams: any[]) => void, + warn: (message?: any, ...optionalParams: any[]) => void, + error: (message?: any, ...optionalParams: any[]) => void, +} + +export type LogSeverity = keyof ITaskLogger; + +/** + * A single log line produced by a task + */ +export interface TaskLogEntry{ + log_id: number; + task_id: number; + timestamp: Date; + severity: LogSeverity; + message: string; +} + +/** + * A task node in the tree, carrying its direct children. + * The `parent` field allows reconstructing the graph from a flat list if needed. + */ +export interface TaskNode + extends TaskDefinition { + children: TaskNode[]; +} + +/** + * Task list item returned by `getTasks()` in summary views. + * + * Includes nullable `scene` and `owner` fields which map to the referenced + * `scenes.scene_name` and `users.username` respectively. These fields may be + * `null` when the task has no linked scene or user. + */ +export interface TaskListItem + extends TaskDefinition { + scene: string | null; + owner: string | null; +} + +/** + * Result returned by {@link TaskManager.getTaskTree}. + * + * - `root` is the requested task as a {@link TaskNode}; its `children` array + * contains the direct children, each of which recursively carries their own + * `children`, forming a proper tree. The `parent` field on every node is + * preserved so the tree can also be flattened back to a list if needed. + * - `logs` is a **flat, ordered array** of every log line produced by any task in + * the tree, sorted by `log_id ASC` (i.e. insertion order). + */ +export interface TaskTreeResult{ + /** The requested task, with descendants nested under `children` recursively */ + root: TaskNode; + /** All log lines from every task in the tree, ordered by log_id ASC */ + logs: TaskLogEntry[]; +} + +export interface TaskHandlerDefinition{ + readonly type: string; + handle: TaskHandler; +}; + + +// Common task data types +export interface FileArtifact extends TaskData{ + fileLocation: RootRelativePath +} + +export function isArtifactTask(output:TaskData):output is FileArtifact{ + return typeof output?.fileLocation == "string"; +} + + + +export interface ProcessFileParams extends FileArtifact{ + preset: TDerivativeQuality; +} diff --git a/source/server/templates/locales/en.yml b/source/server/templates/locales/en.yml index 4546bbedd..1314954c8 100644 --- a/source/server/templates/locales/en.yml +++ b/source/server/templates/locales/en.yml @@ -57,10 +57,21 @@ labels: batchTags: Change selection's tags batchAddTag: Add this tag to selected scenes batchRmTag: Remove this tag from selected scenes + sceneName: Scene name + mine: Mine + all: All + success: Success + error: Error + allTasks: All tasks + rootOnly: Root tasks only + noTasks: No tasks found. + noLogs: No logs. + optimizeModel: Optimize models buttons: searchScene: search scenes - upload: create a scene + uploadScene: create a scene + uploadArchive: extract useStandalone: use standalone mode logout: Disconnect next: "Next Page" @@ -103,6 +114,12 @@ fields: create: Creator manage: Editor admin: Administrator + owner: Owner + parent: Parent + scene: Scene + type: Type + status: Status + minLevel: Min level nav: tags: collections login: connection @@ -150,6 +167,11 @@ titles: modifiedToday: Scenes modified today buildRef: Current version previewEmail: Preview email templates + createdScenes: Created scenes + myTasks: My tasks + taskTree: Task tree + logs: Logs + advancedConfiguration: Advanced configuration tooltips: showPassword: Show the password's text @@ -214,9 +236,23 @@ leads: noArchives: No archived scenes noGroups: This user has not been added to any group poweredByECorpus: Powered by eCorpus under the Apache-2.0 license + uploadFiles: Drop your files here + uploadMixedContent: > + Uploaded files are a mix of scenes archive and regular files. Mixed content can't be handled in a single operation. + uploadName: > + This will be the URL name of the created scene. + It is language-independent. You will be able to create a title for each language later. + uploadLanguage: The scene's default language + uploadOptimize: Optimize 3D models for better performance. This may take a bit more time. + uploadErrors: > + Some errors were encountered while processing the uploaded files. Please check errors above. + + errors: generic: Unknown Error "Username not provided": Username not provided "Password not provided": Password not provided "Bad password": Invalid password "Username not found": Invalid Username + "requireCreate": "{{what}} requires \"create\" rights" + "requireUser": "Authentication is required for {{what}}" diff --git a/source/server/templates/locales/fr.yml b/source/server/templates/locales/fr.yml index 81ae95b77..af9498681 100644 --- a/source/server/templates/locales/fr.yml +++ b/source/server/templates/locales/fr.yml @@ -57,10 +57,20 @@ labels: batchTags: Change selection's tags batchAddTag: Ajouter ce tag aux scènes sélectionnées batchRmTag: Retirer ce tag des scènes sélectionnées + mine: Mes tâches + all: Tous + success: Succès + error: Erreur + allTasks: Toutes les tâches + rootOnly: Parents uniquement + noTasks: Aucune tâche trouvée. + noLogs: Aucun journal. + optimizeModel: Optimiser les modèles buttons: searchScene: chercher une scène - upload: créer une scène + uploadScene: créer une scène + uploadArchive: extraire download: Télécharger la scène downloadSelection: Télécharger la sélection useStandalone: utiliser le mode Standalone @@ -103,6 +113,12 @@ fields: create: Créateur manage: Editeur admin: Administrateur + owner: Propriétaire + parent: Parent + scene: Scène + type: Type + status: Statut + minLevel: Niveau min nav: tags: collections login: connexion @@ -150,6 +166,10 @@ titles: modifiedToday: Scènes modifiées aujourd'hui buildRef: Version actuelle previewEmail: Voir les templates d'emails + myTasks: Mes tâches + taskTree: Arbre des tâches + logs: Journaux + advancedConfiguration: Configuration avancée tooltips: showPassword: Montrer le texte du mot de passe @@ -219,6 +239,19 @@ leads: noArchives: Pas de scènes archivées noGroups: Cet utilisateur n'est ajouté à aucun groupe poweredByECorpus: Propulsé par eCorpus sous license Apache-2.0 + uploadFiles: Déposez vos fichiers ici + uploadMixedContent: > + Impossible de traiter cet ensemble: Les fichiers déposés sont un mélange d'archives de scènes et de fichiers individuels. + uploadName: > + Ce nom sera utilisé dans l'URL de la scène créée. + Il est indépendant de la langue utilisée. + Il sera possible de créer un titre par langue plus tard. + uploadLanguage: La langue principale de la scène. + uploadOptimize: Optimiser les modèles pour une meilleure performance. Cela peut prendre un peu plus de temps. + uploadErrors: > + Des erreurs sont survenues pendant le transfert des fichiers. + Veuillez vérifier les messages ci-dessus et corriger les problèmes avant de continuer. + errors: generic: Erreur inconnue "Username not provided": Nom d'utilisateur non fourni diff --git a/source/server/templates/task.hbs b/source/server/templates/task.hbs new file mode 100644 index 000000000..9d074ff93 --- /dev/null +++ b/source/server/templates/task.hbs @@ -0,0 +1,88 @@ + +
+ +

+ Task #{{root.task_id}} — {{root.type}} + + ‹ + +

+ + {{#if root.scene_id}} +

Scene #{{root.scene_id}}

+ {{/if}} + +
+
+

{{i18n "titles.taskTree"}}

+ {{#if root.parent}}Child of #{{root.parent}}{{/if}} +
+
+ {{> taskNode root}} +
+
+ +
+
+

{{i18n "titles.logs" default="Logs"}} ({{logs.length}})

+
+
+ +
+
+
+ {{#if logs.length}} +
+ + + + + + + + + + + {{#each logs}} + + + + + + + {{/each}} + +
timetasklevelmessage
{{dateString timestamp}}#{{task_id}}{{severity}}{{message}}
+
+ {{else}} +

{{i18n "labels.noLogs" default="No logs."}}

+ {{/if}} +
+ +
+ +{{!-- Recursive partial for task nodes --}} +{{#*inline "taskNode"}} +
+
+ #{{task_id}} + {{type}} + {{status}} +
+ {{#if children.length}} +
+ {{#each children}} + {{> taskNode this}} + {{/each}} +
+ {{/if}} +
+{{/inline}} diff --git a/source/server/templates/tasks.hbs b/source/server/templates/tasks.hbs new file mode 100644 index 000000000..88b35c329 --- /dev/null +++ b/source/server/templates/tasks.hbs @@ -0,0 +1,93 @@ + +
+ +

{{i18n "titles.myTasks" default="My tasks"}}

+ +
+
+ {{#if (test user.level "==" "admin")}} + + {{/if}} + + + + + + +
+ {{#if tasks.length}} + + + + + {{#if (test params.owner "==" "all") }}{{/if}} + {{#unless params.rootOnly }}{{/unless}} + + + + + + + {{#each tasks}} + + + {{#if (test @root.params.owner "==" "all") }}{{/if}} + {{#unless @root.params.rootOnly }} + + {{/unless}} + + + + + + {{/each}} + +
{{i18n "fields.ctime"}}{{i18n "fields.owner"}}{{i18n "fields.parent"}}{{i18n "fields.scene"}}{{i18n "fields.type"}}{{i18n "fields.status"}}
{{dateString ctime}}{{owner}} + {{#if parent}} + #{{ parent }} + {{else}} + — + {{/if}} + + {{#if scene}} + {{ scene }} + {{else}} + — + {{/if}} + + + {{type}}#{{task_id}} + + + + {{status}} + +
+ {{else}} +

{{i18n "labels.noTasks" default="No tasks found."}}

+ {{/if}} +
+ +
diff --git a/source/server/templates/upload.hbs b/source/server/templates/upload.hbs index 9e9109a5f..f590db705 100644 --- a/source/server/templates/upload.hbs +++ b/source/server/templates/upload.hbs @@ -1,42 +1,123 @@ -

{{i18n "titles.upload"}}

- -
-
-

{{i18n "titles.createOrUpdateScene"}} - {{#> popover id="upload-tooltip" type="button" }}{{i18n "tooltips.upload"}}{{/popover}} -

-
-
- - -
-
-
-
- - -
- -
- - -
-
- -
+
+ +

{{i18n "titles.upload"}}

+
+ {{#if scenes.length }} +
+

{{i18n "titles.createdScenes" }}

+ 🗙 + + +
+ {{#each scenes}} + {{#if error }} + {{ error }} + {{else}} + + + {{/if}} + {{/each}}
- - + + {{/if}} +
+ + +

{{i18n "titles.createOrUpdateScene"}}

+ + {{i18n "labels.selectFile_s"}} + +

{{i18n "leads.uploadFiles"}}

+ +

{{i18n "leads.uploadMixedContent"}}

+ +
+
+ +
+
+ + +
+
+ +
+
+ + +
+
+ +
+ +
+

{{i18n "titles.advancedConfiguration"}}

+
+ + +
+
+ + + +
+ + + + {{i18n "leads.uploadErrors"}} +
+ +
+
+
diff --git a/source/server/tests-common.ts b/source/server/tests-common.ts index 914dc4d85..72a94806b 100644 --- a/source/server/tests-common.ts +++ b/source/server/tests-common.ts @@ -69,7 +69,7 @@ global.dropDb = async function(uri: string){ } global.createIntegrationContext = async function(c :Mocha.Context, config_override :Partial={}){ - let {default:createServer} = await import("./routes/index.js"); + let {default:createService} = await import("./create.js"); let titleSlug = "t_"+ (c.currentTest?.title.replace(/[^\w]/g, "_") ?? `eCorpus_integration`)+"_"+randomBytes(4).toString("hex"); c.db_uri = await getUniqueDb(titleSlug); c.dir = await fs.mkdtemp(path.join(tmpdir(), titleSlug)); @@ -83,12 +83,13 @@ global.createIntegrationContext = async function(c :Mocha.Context, config_overri //Options we might want to customize config_override ); - c.server = await createServer( c.config ); + c.services = await createService( c.config ); + c.server = c.services.app; return c.server.locals; } global.cleanIntegrationContext = async function(c :Mocha.Context){ - await c.server.locals.vfs.close(); + await c.services?.close(); await dropDb(c.db_uri); if(c.dir) await fs.rm(c.dir, {recursive: true}); } \ No newline at end of file diff --git a/source/server/tsconfig.json b/source/server/tsconfig.json index 1b5a0baaa..a4bb305c6 100644 --- a/source/server/tsconfig.json +++ b/source/server/tsconfig.json @@ -1,6 +1,8 @@ { "compilerOptions": { - "lib": ["ES2021"], + "lib": [ + "ESNext" + ], "module": "NodeNext", "target": "ES2021", "strict": true, @@ -8,17 +10,27 @@ "skipLibCheck": true, "forceConsistentCasingInFileNames": true, "sourceMap": true, - "types": ["mocha", "chai", "chai-as-promised"], + "types": [ + "mocha", + "chai", + "chai-as-promised" + ], "outDir": "./dist", "rootDir": "." }, "files": [ "index.ts", - "healthcheck.ts" + "healthcheck.ts", + "./tasks/handlers/createDocumentFromFiles.ts" + ], + "include": [ + "./**/*.ts" ], - "include": ["./**/*.ts"], "exclude": [ "node_modules" ], - "ts-node": { "esm": true, "files": true } -} + "ts-node": { + "esm": true, + "files": true + } +} \ No newline at end of file diff --git a/source/server/utils/archives.test.ts b/source/server/utils/archives.test.ts new file mode 100644 index 000000000..e2336281d --- /dev/null +++ b/source/server/utils/archives.test.ts @@ -0,0 +1,22 @@ +import { parseFilepath } from "./archives.js" + + + +describe("parseFilepath()", function(){ + it("ignores base `scenes` directory", function(){ + expect(parseFilepath(`/scenes/`)).to.deep.equal({isDirectory: true}); + }); + it("finds scene name and relative file path", function(){ + expect(parseFilepath(`/scenes/foo/scene.svx.json`)).to.deep.equal({scene: "foo", name:"scene.svx.json", isDirectory: false}); + }); + it("finds nested paths", function(){ + expect(parseFilepath(`/scenes/foo/articles/hello.html`)).to.deep.equal({scene: "foo", name:"articles/hello.html", isDirectory: false}); + }); + it("find nested folders", function(){ + //Also, strips trailing slash. + expect(parseFilepath(`/scenes/foo/articles/`)).to.deep.equal({scene: "foo", name:"articles", isDirectory: true}); + }) + it("find scene folder", function(){ + expect(parseFilepath(`/scenes/foo/`)).to.deep.equal({scene: "foo", name: undefined, isDirectory: true}); + }) +}) \ No newline at end of file diff --git a/source/server/utils/archives.ts b/source/server/utils/archives.ts new file mode 100644 index 000000000..660d9960d --- /dev/null +++ b/source/server/utils/archives.ts @@ -0,0 +1,36 @@ + +export interface ParsedFileEntry{ + scene?: string; + /** + * full path to the file from inside the scene's scope + * eg: `articles/foo-xyz.html` + */ + name?: string; + + /**True if file is a directory. */ + isDirectory: boolean; +} + +/** + * Parse an archive entry's name to extract its scene name + * @param filepath + * @returns Undefined if a file is definitely not scoped to a scene + */ +export function parseFilepath(filepath: string): ParsedFileEntry{ + const isDirectory = filepath.endsWith("/"); + const pathParts = filepath.split("/").filter(p=>!!p); + if(pathParts[0] == "scenes") pathParts.shift(); + if(pathParts.length === 0) return {isDirectory}; + const scene = pathParts.shift()!; + const name = pathParts.join("/"); + return { + scene, + name: name.length? name: undefined, + isDirectory, + } +} + +export function isMainSceneFile(filename: string){ + const name = filename.toLowerCase(); + return name.endsWith(".svx.json") || name.endsWith("index.html"); +} \ No newline at end of file diff --git a/source/server/utils/config.ts b/source/server/utils/config.ts index 97f7cf34f..891a15436 100644 --- a/source/server/utils/config.ts +++ b/source/server/utils/config.ts @@ -12,6 +12,7 @@ const values = { root_dir: [ process.cwd(), toPath], migrations_dir: [path.join(process.cwd(),"migrations"), toPath], templates_dir: [path.join(process.cwd(),"templates"), toPath], + scripts_dir: [path.join(process.cwd(),"scripts"), toPath], files_dir: [({root_dir}:{root_dir:string})=> path.resolve(root_dir,"files"), toPath], dist_dir: [({root_dir}:{root_dir:string})=> path.resolve(root_dir,"dist"), toPath], assets_dir: [undefined, toPath], diff --git a/source/server/utils/errors.ts b/source/server/utils/errors.ts index 9edcf76fd..753b15066 100644 --- a/source/server/utils/errors.ts +++ b/source/server/utils/errors.ts @@ -1,6 +1,3 @@ -import util from "node:util"; -import { NextFunction, Request, Response } from "express"; -import { useTemplateProperties } from "./locals.js"; export class HTTPError extends Error{ @@ -32,13 +29,25 @@ export class NotFoundError extends HTTPError { } } +export class MethodNotAllowedError extends HTTPError{ + constructor(reason :string = "Method Not Allowed"){ + super(405, reason); + } +} + export class ConflictError extends HTTPError { constructor(reason :string="Conflict"){ super(409, reason); } } -export class RangeNotSatisfiable extends HTTPError { +export class LengthRequiredError extends HTTPError{ + constructor(reason: string = "Length Required"){ + super(411, reason); + } +} + +export class RangeNotSatisfiableError extends HTTPError { constructor(reason :string="Range Not Satisfiable"){ super(416, reason); } @@ -49,3 +58,9 @@ export class InternalError extends HTTPError { super(500, reason); } } + +export class NotImplementedError extends HTTPError{ + constructor(reason :string="Not Implemented"){ + super(501, reason); + } +} \ No newline at end of file diff --git a/source/server/utils/exec.test.ts b/source/server/utils/exec.test.ts new file mode 100644 index 000000000..022ec8abc --- /dev/null +++ b/source/server/utils/exec.test.ts @@ -0,0 +1,28 @@ + +import {run} from "./exec.js"; + + +describe("run()", function(){ + it("runs a command", async function(){ + let {code, stdout, stderr} = await run("echo", ["Hello World"]); + expect(code).to.equal(0); + expect(stdout).to.equal("Hello World\n"); + expect(stderr).to.equal(""); + }); + + it("throw if interrupted by an AbortSignal", async function(){ + let c = new AbortController(); + setTimeout(()=> c.abort(), 10); + await expect(run("sleep", ["1"], {signal: c.signal})).to.be.rejectedWith("The operation was aborted"); + }); + + it("throw if interrupted by a signal", async function(){ + await expect(run("bash", ["-c", "kill -TERM $$"])).to.be.rejectedWith("Command bash was interrupted by signal SIGTERM"); + }); + + it("can return code != 0", async function(){ + let {code,} = await run("bash", ["-c", "exit 1"]); + expect(code).to.equal(1); + }); + +}) \ No newline at end of file diff --git a/source/server/utils/exec.ts b/source/server/utils/exec.ts new file mode 100644 index 000000000..a0f1f3374 --- /dev/null +++ b/source/server/utils/exec.ts @@ -0,0 +1,84 @@ +import { spawn, SpawnOptionsWithoutStdio } from "child_process"; +import { once } from "events"; +import { ITaskLogger } from "../tasks/types.js"; + + +/** + * Run a command with a {@link ITaskLogger} instrumentation + * stdout will be piped to `logger.debug` and stderr to `logger.warn` + * The promise will then be rejected only if exit code != 0 + */ + +export async function taskRun(cmd: string, args: string[], { logger, ...opts }: RunCommandOpts): Promise { + let child = spawn(cmd, args, opts); + + child.stdout.setEncoding("utf-8"); + child.stdout.on("data", (chunk) => logger.debug(chunk)); + child.stderr.setEncoding("utf-8"); + child.stderr.on("data", (chunk) => logger.warn(chunk)); + + try { + let [code, signal] = await once(child, "close"); + if (typeof code !== "number") { + let e: any = new Error(`Command ${cmd} was interrupted by signal ${signal}`); + throw e; + } else if (code != 0) { + throw new Error(`Command ${cmd} exitted with non-zero error code: ${code}`); + } + } finally { + child.stdout.removeAllListeners(); + child.stderr.removeAllListeners(); + } +} + + +/** + * Wrapper around `spawn` that gathers stdout/stderr to a string + */ +export async function run(cmd: string, args: string[], opts?: SpawnOptionsWithoutStdio): Promise<{ code: number; stdout: string; stderr: string; }> { + let child = spawn(cmd, args, opts); + + let stdout: string = ""; + let stderr: string = ""; + child.stdout.setEncoding("utf-8"); + child.stdout.on("data", (chunk) => stdout += chunk); + child.stderr.setEncoding("utf-8"); + child.stderr.on("data", (chunk) => stderr += chunk); + + try { + let [code, signal] = await once(child, "close"); + if (typeof code !== "number") { + let e: any = new Error(`Command ${cmd} was interrupted by signal ${signal}`); + e.stdout = stdout; + e.stderr = stderr; + throw e; + } + return { code, stdout, stderr }; + } finally { + child.stdout.removeAllListeners(); + child.stderr.removeAllListeners(); + } +} +export interface RunCommandOpts extends SpawnOptionsWithoutStdio { + logger: ITaskLogger; +} + +/** + * spawn the ktx utility to get its version string and parse it + */ +export async function getKtxVersion(): Promise { + + const { stdout, stderr } = await run('ktx', ['--version']); + + const version = ((stdout || stderr) as string) + .replace(/ktx version:\s+/, '') + .replace(/~\d+/, '') + .trim(); + + if (!version) { + throw new Error( + `Unable to find "ktx" version. Confirm KTX-Software is installed.` + ); + } + return version; +} diff --git a/source/server/utils/filetypes.test.ts b/source/server/utils/filetypes.test.ts index c00a7c468..28474f2bd 100644 --- a/source/server/utils/filetypes.test.ts +++ b/source/server/utils/filetypes.test.ts @@ -1,96 +1,239 @@ +import path from "node:path"; import express from "express"; -import { compressedMime, getContentType, getMimeType } from "./filetypes.js"; +import { compressedMime, extFromType, getContentType, getFilename, getMimeType, parseMagicBytes, readMagicBytes } from "./filetypes.js"; import request from "supertest"; -describe("getMimeType",function(){ +import { fixturesDir } from "../__test_fixtures/fixtures.js"; + +describe("getFilename", function () { + it("returns filename from content-disposition", function () { + expect(getFilename({ + "content-disposition": 'attachment; filename="report.pdf"', + } as any)).to.equal("report.pdf"); + }); + + it("falls back to the content-type extension", function () { + expect(getFilename({ + "content-type": "image/jpeg", + } as any)).to.match(/^[A-Za-z0-9_-]{12}\.jpeg$/); + }); + + it("falls back to the content-type extension when content-disposition is unsafe", function () { + expect(getFilename({ + "content-disposition": 'attachment; filename="../secret.txt"', + "content-type": "image/jpeg", + } as any)).to.match(/^[A-Za-z0-9_-]{12}\.jpeg$/); + }); + + [ + { + name: "relative parent traversal", + header: 'attachment; filename="../secret.txt"', + }, + { + name: "nested relative traversal", + header: 'attachment; filename="../../var/data/archive.zip"', + }, + { + name: "unix absolute path", + header: 'attachment; filename="/etc/passwd"', + }, + { + name: "windows absolute path", + header: 'attachment; filename="C:\\Windows\\System32\\drivers\\etc\\hosts"', + }, + { + name: "windows parent traversal", + header: 'attachment; filename="..\\..\\AppData\\secret.txt"', + }, + ].forEach(({ name, header }) => { + it(`rejects malicious ${name}`, function () { + expect(getFilename({ + "content-disposition": header, + } as any)).to.equal(undefined); + }); + }); +}); + +describe("getMimeType", function () { //Check proper operation of mimetype guessing function //This is mostly out of our hands, but if types we tend to rely on do change, we want to know! - it("lookups svx files", function(){ + it("lookups svx files", function () { expect(getMimeType("foo.svx.json")).to.equal("application/si-dpo-3d.document+json"); expect(getMimeType("/path/to/scene.svx.json")).to.equal("application/si-dpo-3d.document+json"); }); - it("model/gltf-binary", function(){ + it("model/gltf-binary", function () { expect(getMimeType("foo.glb")).to.equal("model/gltf-binary"); }); - it("text/html", function(){ + it("model/obj", function () { + // Not widely used, but recognized + // by IANA See: https://www.iana.org/assignments/media-types/media-types.xhtml#model + expect(getMimeType("foo.obj")).to.equal("model/obj"); + expect(getMimeType("foo.mtl")).to.equal("model/mtl") + }); + + it("model/stl", function () { + expect(getMimeType("foo.stl")).to.equal("model/stl"); + }); + + it("model/vnd.usdz+zip", function () { + expect(getMimeType("foo.usdz")).to.equal("model/vnd.usdz+zip"); + }) + + it("text/html", function () { expect(getMimeType("foo.html")).to.equal("text/html"); }); - it("defaults to application/octet-stream", function(){ + it("defaults to application/octet-stream", function () { expect(getMimeType("foo.bar")).to.equal("application/octet-stream"); + //Test expectations for known file types + expect(getMimeType("foo.ply")).to.equal("application/octet-stream"); + expect(getMimeType("foo.blend")).to.equal("application/octet-stream"); }) }); -describe("getContentType", function(){ +describe("getContentType", function () { //Wraps around getMimeType to try to force the content type from request's headers. let app = express(); - app.put("/:file", (req, res)=>{ + app.put("/:file", (req, res) => { res.set("Content-Type", getContentType(req)).status(204).end(); }); - it("infer .svx.json", async function(){ + it("infer .svx.json", async function () { await request(app).put("/foo.svx.json") - .expect(204) - .expect("Content-Type", "application/si-dpo-3d.document+json"); + .expect(204) + .expect("Content-Type", "application/si-dpo-3d.document+json"); }); - it("infer model/gltf-binary", async function(){ + it("infer model/gltf-binary", async function () { await request(app).put("/foo.glb") - .expect(204) - .expect("Content-Type", "model/gltf-binary"); + .expect(204) + .expect("Content-Type", "model/gltf-binary"); }); - it("infer html", async function(){ + it("infer html", async function () { await request(app).put("/foo.html") - .expect(204) - .expect("Content-Type", "text/html; charset=utf-8"); + .expect(204) + .expect("Content-Type", "text/html; charset=utf-8"); }); - it("ignores Content-Type header if it can", async function(){ + it("ignores Content-Type header if it can", async function () { //DPO-Voyager sets article's content-type to text/plain, we don't want that. await request(app).put("/foo.html") - .set("Content-Type", "text/plain") - .expect(204) - .expect("Content-Type", "text/html; charset=utf-8"); + .set("Content-Type", "text/plain") + .expect(204) + .expect("Content-Type", "text/html; charset=utf-8"); }); - it("can be hinted", async function(){ + it("can be hinted", async function () { await request(app).put("/foo") - .set("Content-Type", "text/html") - .expect(204) - .expect("Content-Type", "text/html; charset=utf-8"); + .set("Content-Type", "text/html") + .expect(204) + .expect("Content-Type", "text/html; charset=utf-8"); }); - it("defaults to application/octet-stream", async function(){ + it("defaults to application/octet-stream", async function () { await request(app).put("/foo") - .expect(204) - .expect("Content-Type", "application/octet-stream"); + .expect(204) + .expect("Content-Type", "application/octet-stream"); }); }); -describe("compressedMime", function(){ +describe("compressedMime", function () { [ "image/jpeg", "image/png", "video/mp4", "application/zip", - ].forEach((t)=>{ - it(`${t}: false`, function(){ + ].forEach((t) => { + it(`${t}: false`, function () { expect(compressedMime(t)).to.be.false; }); }); - + [ "image/tiff", "image/svg+xml", "application/xml+svg", "text/html", - ].forEach((t)=>{ - it(`${t}: true`, function(){ + ].forEach((t) => { + it(`${t}: true`, function () { expect(compressedMime(t)).to.be.true; }); }); +}); + + +describe("extFromType()", function () { + + Object.entries({ + "model/gltf-binary": ".glb", + "image/jpeg": ".jpeg", + "application/zip": ".zip", + "application/si-dpo-3d.document+json": ".svx.json", + }).forEach(function ([type, extname]) { + it(`${type} => ${extname}`, function () { + expect(extFromType(type)).to.equal(extname); + }); + }); + + it("returns an empty string for extensionless files", function () { + expect(extFromType("application/octet-stream")).to.equal(""); + }); +}); + +describe("parseMagicBytes()", function () { + //Header of a 1156x420 png + //Header of a 1067x800 jpeg + it("image/png", function () { + expect(parseMagicBytes(Buffer.from('89504e470d0a1a0a', "hex"))).to.equal("image/png"); + }); + + it("image/jpeg", function () { + [ + "FFD8FFDB", //Raw jpeg + "FFD8FFE000104A4649460001", //JFIF + "FFD8FFEE", //Also jpeg + "FFD8FFE0", // still jpeg + ].forEach((str) => { + expect(parseMagicBytes(Buffer.from(str, "hex")), `0x${str} should be a valid jpeg header`).to.equal("image/jpeg") + }) + }); + it("image/webp", function () { + //Header with a size of 0 + expect(parseMagicBytes(Buffer.from('524946460000000057454250', 'hex'))).to.equal("image/webp"); + //header with a real size + expect(parseMagicBytes(Buffer.from('52494646b254000057454250', 'hex'))).to.equal("image/webp"); + //header from another RIFF container + expect(parseMagicBytes(Buffer.from('524946460000000057415645', 'hex'))).to.equal("application/octet-stream"); + }); + + it("model/gltf-binary", function () { + // See https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#binary-header + // maps to glTF ascii string, but backwards, because it's a little-endian format + const b = Buffer.alloc(4); + b.writeUint32LE(0x46546C67); + expect(parseMagicBytes(b)).to.equal("model/gltf-binary"); + }); + + it("application/zip", function () { + // See https://en.wikipedia.org/wiki/ZIP_(file_format)#File_headers + const b = Buffer.alloc(4); + b.writeUint32LE(0x04034b50); + expect(parseMagicBytes(b)).to.equal("application/zip"); + }); +}); + +describe("readMagicBytes()", function () { + it("reads first bytes of a glb file", async function () { + let type = await readMagicBytes(path.join(fixturesDir, "cube.glb")); + expect(type).to.equal("model/gltf-binary"); + }); + + it("throws if file doesn't exist", async function () { + await expect(readMagicBytes(path.join(fixturesDir, "not_cube.glb"))).to.be.rejectedWith({ code: "ENOENT" } as any); + }); }); \ No newline at end of file diff --git a/source/server/utils/filetypes.ts b/source/server/utils/filetypes.ts index c9808a4b6..8457124bb 100644 --- a/source/server/utils/filetypes.ts +++ b/source/server/utils/filetypes.ts @@ -1,45 +1,123 @@ -import path from 'path'; -import {Request} from "express"; -import {lookup, types} from "mime-types"; - -const mimeMap :Record = { - ".jpg": "image.jpeg", - ".jpeg": "image.jpeg", - ".png": "image/png", - ".webp": "image/webp", - ".mp4": "video/mp4", - ".glb": "model/gltf-binary", - ".htm": "text/html", - ".html": "text/html", - ".txt": "text/plain", - ".json": "application/json", - ".svx.json":"application/si-dpo-3d.document+json", -} -const mimeList = Object.values(mimeMap); - -export function getMimeType(name:string){ +import fs from 'fs/promises'; + +import { Request } from "express"; +import { lookup, types, extension, extensions } from "mime-types"; +import contentDisposition from 'content-disposition'; +import uid from './uid.js'; + +//Add non-standard extension +extensions["application/si-dpo-3d.document+json"] = ["svx.json"]; + +export function getMimeType(name: string) { //Special case because extname doesn't recognize double-extensions - if(/\.svx\.json$/i.test(name)) return "application/si-dpo-3d.document+json"; + if (/\.svx\.json$/i.test(name)) return "application/si-dpo-3d.document+json"; return lookup(name) || "application/octet-stream"; } /** * Infer a mime type from a request's Content-Type if possible * Defaults to application/octet-stream if Content-Type is none of the known values */ -export function getContentType(req :Request){ +export function getContentType(req: Request) { const inferred = getMimeType(req.originalUrl); - if(inferred != "application/octet-stream") return inferred; + if (inferred != "application/octet-stream") return inferred; return req.get("content-type") || "application/octet-stream"; } +function sanitizeFilename(filename: string): string | undefined { + const trimmed = filename.trim(); + if (!trimmed || trimmed === "." || trimmed === "..") return undefined; + if (/[\\/\0]/.test(trimmed)) return undefined; + if (trimmed.includes("..")) return undefined; + if (/^[A-Za-z]:/.test(trimmed)) return undefined; + return trimmed; +} + + +export function getFilename(headers: Request["headers"]): string | undefined { + const dispositionHeader = typeof headers["content-disposition"] === "string" ? headers["content-disposition"] : undefined; + const disposition = dispositionHeader ? contentDisposition.parse(dispositionHeader) : null; + const filename = disposition?.parameters?.filename; + if (typeof filename === "string") { + const safeFilename = sanitizeFilename(filename); + if (safeFilename) return safeFilename; + } + + const mimeType = typeof headers["content-type"] === "string" ? headers["content-type"] : "application/octet-stream"; + const ext = extFromType(mimeType); + if (ext) { + return uid(12) + ext; + } + return undefined; +} + + + /** * Checks whether a MIME type should be compressed using gzip */ -export function compressedMime(mime :string) :boolean{ - if( /^(?:image|video)\//.test(mime)){ +export function compressedMime(mime: string): boolean { + if (/^(?:image|video)\//.test(mime)) { return ["image/tiff", "image/svg+xml", "image/bmp"].indexOf(mime) !== -1; - }else if(mime.startsWith("application")){ - if(mime.endsWith("zip")) return false; //A whole lot of things are application/**+zip + } else if (mime.startsWith("application")) { + if (mime.endsWith("zip")) return false; //A whole lot of things are application/**+zip } return true; -} \ No newline at end of file +} + +/** + * Returns standard extension with a leading dot for a given mime type + * Returns an empty string for extensionless types (eg: application/octet-stream) + */ +export function extFromType(type: string) { + let ext = extension(type); + if (ext === "bin") return ""; + return `.${ext}`; +} + + +export async function readMagicBytes(filepath: string): Promise { + let handle = await fs.open(filepath, fs.constants.O_RDONLY); + try { + const b = Buffer.allocUnsafe(12); + const { bytesRead } = await handle.read({ buffer: b }); + return parseMagicBytes(b.subarray(0, bytesRead)); + } finally { + await handle.close(); + } +} + +/** + * Seek the first bytes of a file to find type signatures. + * Most files have 4 bytes signatures. + * RIFF containers (eg: webp) needs at least 12 bytes of content + * @see {@link https://en.wikipedia.org/wiki/List_of_file_signatures File Signatures } + */ +export function parseMagicBytes(src: Buffer | Uint8Array): string { + if (src[0] == 0x89 && src.subarray(0, 8).toString("hex") == "89504e470d0a1a0a") { + return "image/png"; + } + + if (src[0] == 0xFF && src.subarray(0, 3).toString("hex") == "ffd8ff") { + return "image/jpeg"; + } + + if (src[0] == 0x52 && src.subarray(0, 4).toString("ascii") == "RIFF") { + //RIFF files. Next 4 bytes are for the file size. + let sig = src.subarray(8, 12).toString(); + if (sig == "WEBP") return "image/webp"; + } + + if (src[0] == 0x67 && src.subarray(0, 4).toString("ascii") == "glTF") { + return "model/gltf-binary"; + } + + if (src[0] == 0x50 && src.subarray(0, 4).toString("hex") == "504b0304") { + return "application/zip"; + } + + return "application/octet-stream"; +} + +export function isModelType(filename: string): boolean { + return /\.(?:obj|stl|ply|gltf|blend)$/i.test(filename); +} diff --git a/source/server/utils/format.test.ts b/source/server/utils/format.test.ts new file mode 100644 index 000000000..09a3558a7 --- /dev/null +++ b/source/server/utils/format.test.ts @@ -0,0 +1,37 @@ +import { formatBytes, isTimeInterval } from "./format.js"; + + + +describe("formatBytes()", function(){ + it("Format a number of bytes to a human readable string", function(){ + expect(formatBytes(1000)).to.equal("1 kB"); + }); + + it("format mibibytes", function(){ + expect(formatBytes(1024, false)).to.equal("1 KiB"); + expect(formatBytes(1024*1024, false)).to.equal("1 MiB"); + }); +}); + + +describe("isTimeInterval()", function(){ + it("Returns false for Date objects", function(){ + expect(isTimeInterval(new Date())).to.be.false; + }); + it("Returns false for timestamps", function(){ + expect(isTimeInterval(Date.now())).to.be.false; + }); + it("Returns false for ISO Date strings", function(){ + expect(isTimeInterval(new Date().toISOString())).to.be.false; + }); + it("Returns true for ISO8601 period strings", function(){ + [ + "P1Y", + "P1M1D", + "PT1M", // 1 minute + "PT1.5S", // Seconds are fractional + ].forEach(function(s){ + expect(isTimeInterval(s), `${s} should be a valid ISO8601 period string`).to.be.true; + }) + }) +}) \ No newline at end of file diff --git a/source/server/utils/format.ts b/source/server/utils/format.ts new file mode 100644 index 000000000..f80a93961 --- /dev/null +++ b/source/server/utils/format.ts @@ -0,0 +1,29 @@ + + + +/** + * Format a byte count into a human readable string + */ +export function formatBytes(bytes:number, si=true){ + const thresh = si ? 1000 : 1024; + if(Math.abs(bytes) < thresh) { + return bytes + ' B'; + } + let units = si + ? ['kB','MB','GB','TB','PB','EB','ZB','YB'] + : ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB']; + let u = -1; + do { + bytes /= thresh; + ++u; + } while(Math.abs(bytes) >= thresh && u < units.length - 1); + return Math.round(bytes*100)/100 + ' '+units[u]; +} + +/** + * Checks if a Date-like parameter might be an interval + * It is not fool-proof: We don't want to match string that would be valid dates. But if it's neither a valid date nor a valid interval, we don't really care about trhe output here. + */ +export function isTimeInterval(t:any){ + return typeof t === "string" && /^[-+]?P?(\d+[YMD]|T[.\d]+[HMS])/.test(t); +} \ No newline at end of file diff --git a/source/server/utils/glTF.test.ts b/source/server/utils/glTF.test.ts deleted file mode 100644 index 4e6bef3bc..000000000 --- a/source/server/utils/glTF.test.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { expect } from "chai"; -import fs from "fs/promises"; -import path from "path"; -import { fileURLToPath } from 'url'; -import { parse_glb, parse_glTF } from "./glTF.js"; - -const thisFile = fileURLToPath(import.meta.url); - -import { fixturesDir } from "../__test_fixtures/fixtures.js"; - -describe("parse_glb()", function(){ - - it("parse a glb file to extract data", async function(){ - let d = await parse_glb(path.resolve(fixturesDir, "cube.glb" )); - expect(d).to.deep.equal({ - meshes:[{ - numFaces: 6*2 /*it takes two triangles to make a square and 6 squares for a cube */, - bounds:{ - min: [-1,-1,-1], - max: [1,1,1.000001] - }, - position: [0,0,0], - name: "Cube", - }], - bounds:{ - min: [-1,-1,-1], - max: [1,1,1.000001] - }, - byteSize: 5500, - }) - }); - it("throw an error for invalid files", async function(){ - await expect(parse_glb( thisFile)).to.be.rejectedWith("bad magic number"); - }) -}); - - -describe("parse_gltf()", function(){ - it("handles morph targets", async function(){ - let data = await fs.readFile(path.resolve(fixturesDir, "morph.gltf" ), {encoding: "utf8"}); - let gltf = JSON.parse(data); - let res = parse_glTF(gltf); - expect(res).to.deep.equal({ - meshes:[{ - numFaces: 64, - position: [0,0,0], - bounds: {min: [-0.5,0,-0.5], max: [0.5,0.20000000298023224,0.5]}, - name: "mesh" - }], - bounds: {min: [-0.5,0,-0.5], max: [0.5,0.20000000298023224,0.5]}, - }); - }) - - it("handles empties", async function(){ - let gltf = { - "asset":{"generator":"Khronos glTF Blender I/O v4.2.69","version":"2.0"}, - "scene":0, - "scenes":[{"name":"Scene","nodes":[0]}], - "nodes":[{"name":"Empty"}] - }; - let res = parse_glTF(gltf); - expect(res).to.deep.equal({ - meshes:[], - bounds: {min: [0,0,0], max: [0,0,0]}, - }); - }) - // @todo add more edge-cases from https://github.com/KhronosGroup/glTF-Sample-Models -}) \ No newline at end of file diff --git a/source/server/utils/glTF.ts b/source/server/utils/glTF.ts deleted file mode 100644 index b53e883fe..000000000 --- a/source/server/utils/glTF.ts +++ /dev/null @@ -1,136 +0,0 @@ -import fs from "fs/promises"; -import assert from "assert/strict"; - - - -interface Accessor { - componentType :5120|5121|5122|5123|5125|5126; - type :"SCALAR"|"VEC1"|"VEC2"|"VEC3"|"VEC4"|"MAT2"|"MAT3"|"MAT4"; - count :number; - min?:number[]; - max?:number[]; -} - -interface Primitive { - attributes:{ - NORMAL :number; - POSITION :number; - TANGENT ?:number; - TEXCOORD_0 ?:number; - } - targets ?:[{POSITION :number}]; - indices :number; -} -interface Mesh { - name :string; - primitives: Primitive[]; -} -interface Bounds { - min: [number, number, number], - max: [number, number, number] -} - -interface MeshDescription { - position: [number, number, number]; - bounds :Bounds; - numFaces: number; - name ?:string; -} - - -export interface SceneDescription{ - meshes :MeshDescription[]; - bounds :Bounds; - byteSize ?:number; -} - -interface GlbDescription extends SceneDescription{ - byteSize :number; -} - -export interface JSONglTF extends Record{ - meshes?: Mesh[]; - accessors ?:Accessor[]; -} - -function asBounds(a :Accessor) :Bounds{ - if(!(a.min?.length == 3 && a.max?.length == 3 )) assert.fail("min and max MUST be defined in glTF mesh position"); - return a as any as Bounds; -} - -function mergeBounds(a:Bounds, b:Bounds) :Bounds{ - return { - min: a.min.map((value, index)=>(("min" in b)?Math.min(value, b.min[index]):value)) as any, - max:a.max.map((value, index)=>(("max" in b)?Math.max(value, b.max[index]):value)) as any, - }; -} - -/** - * - * Float values are rounded to single precision. - * @see https://github.com/KhronosGroup/glTF/blob/main/specification/2.0/Specification.adoc#3625-accessors-bounds - */ -export function parse_glTF({meshes = [], accessors = []}:JSONglTF) :SceneDescription { - let scene :SceneDescription = { - meshes:[], - bounds: {min:[0,0,0], max:[0,0,0]}, - }; - for(let mesh of meshes){ - let out :MeshDescription = { - position: [0, 0, 0], - bounds: {min:[0,0,0], max:[0,0,0]}, - numFaces: 0, - name: mesh.name - }; - for(let primitive of mesh.primitives){ - let positions = [primitive.attributes.POSITION, ...(primitive.targets ?? []).map(t=>t.POSITION)] - for (let positionIndex of positions){ - let position :Bounds|Accessor = accessors[positionIndex]; - out.bounds = mergeBounds(out.bounds, asBounds(position)); - out.numFaces+= accessors[primitive.indices].count /3; //every 3 indices form a triangle - } - } - scene.meshes.push(out); - scene.bounds = mergeBounds(scene.bounds, out.bounds); - } - return scene; -} - - - - -export async function parse_glb(filePath :string) :Promise{ - let res :MeshDescription = {} as any; - let handle = await fs.open(filePath, "r"); - //https://docs.fileformat.com/3d/glb/ - try{ - let header = Buffer.alloc(3*4); - let {bytesRead} = await handle.read({buffer:header}); - assert.equal(bytesRead, 3*4 as number, `Could not read glb header (file too short ${bytesRead})`); - let magic = header.readUint32LE(0); - assert(magic == 0x46546C67, "bad magic number : 0x"+magic.toString(16)) - let version = header.readUInt32LE(4); - assert(version == 0x2, `gltf files version ${version} not supported. Please provide a glTF2.0 file`); - let byteSize = header.readUInt32LE(8); - - let position = header.length; //position is not updated when we skip blocks - let chunkHeader = Buffer.allocUnsafe(4*2); - while( (await handle.read({buffer:chunkHeader, position})).bytesRead == 8){ - position += 8; - let chunkLength = chunkHeader.readUint32LE(0); - let chunkType = chunkHeader.readUInt32LE(4); - if(chunkType != 0x4E4F534A){ - position += chunkLength; - continue; - } - let data = Buffer.allocUnsafe(chunkLength) - let {bytesRead} = await handle.read({buffer:data, position}); - assert(bytesRead == chunkLength, "Reached end of file while trying to get JSON chunk"); - let gltfData :JSONglTF = JSON.parse(data.toString("utf-8")); - return {...parse_glTF(gltfData), byteSize}; - } - }finally{ - await handle.close(); - } - assert.fail("Can't find glTF data "); -} \ No newline at end of file diff --git a/source/server/utils/gltf/inspect.test.ts b/source/server/utils/gltf/inspect.test.ts new file mode 100644 index 000000000..7d1bc49ee --- /dev/null +++ b/source/server/utils/gltf/inspect.test.ts @@ -0,0 +1,87 @@ +import { expect } from "chai"; +import path from "path"; +import { Document } from "@gltf-transform/core"; +import type { Primitive } from "@gltf-transform/core"; + +import { fixturesDir } from "../../__test_fixtures/fixtures.js"; +import { io } from "./io.js"; +import { inspectDocument } from "./inspect.js"; + +describe("inspectDocument()", function(){ + + it("parse etc1s texture size", async function(){ + const document = await io.read(path.join(fixturesDir, "cube_etc1s.glb")); + const desc = inspectDocument(document); + expect(desc).to.be.an("object"); + expect(desc.name).to.equal("Cube"); + expect(desc.numFaces).to.equal(12); + expect(desc.imageSize).to.equal(16); + expect(desc.extensions).to.be.an("array"); + expect(desc.bounds).to.deep.equal({ + min: [-1, -1, -1], + max: [1, 1, 1], + }); + }); + + it("parse webp texture size", async function(){ + const document = await io.read(path.join(fixturesDir, "cube_webp.glb")); + const desc = inspectDocument(document); + expect(desc).to.be.an("object"); + expect(desc.name).to.equal("Cube"); + expect(desc.numFaces).to.equal(12); + expect(desc.imageSize).to.equal(16); + expect(desc.extensions).to.be.an("array"); + expect(desc.bounds).to.deep.equal({ + min: [-1, -1, -1], + max: [1, 1, 1], + }); + }); + + it("returns no bounds=null for empty models", async function(){ + const document = new Document(); + const desc = inspectDocument(document); + expect(desc).to.be.an("object"); + expect(desc.name).to.equal(""); + expect(desc.numFaces).to.equal(0); + expect(desc.imageSize).to.equal(0); + expect(desc.bounds).to.be.null; + }); + + it("handles a scene with a mesh that has empty geometry", async function(){ + const document = new Document(); + const scene = document.createScene("Scene"); + scene.setName("Scene"); + document.getRoot().setDefaultScene(scene); + const node = document.createNode("Node"); + scene.addChild(node); + const mesh = document.createMesh("Mesh"); + // no primitives added — mesh with empty geometry + node.setMesh(mesh); + + const desc = inspectDocument(document); + expect(desc.numFaces).to.equal(0); + expect(desc.bounds, `${JSON.stringify(desc.bounds)}`).to.be.null; + }); + + it("handles a scene with a single point", async function(){ + const document = new Document(); + const scene = document.createScene("Scene"); + scene.setName("Scene"); + document.getRoot().setDefaultScene(scene); + const node = document.createNode("Node"); + scene.addChild(node); + const mesh = document.createMesh("Mesh"); + const primitive = document.createPrimitive(); + primitive.setMode(0 as Parameters[0]); // POINTS + const accessor = document.createAccessor().setType("VEC3").setArray(new Float32Array([1, 2, 3])); + primitive.setAttribute("POSITION", accessor); + mesh.addPrimitive(primitive); + node.setMesh(mesh); + + const desc = inspectDocument(document); + // POINTS mode (0) has no faces + expect(desc.numFaces).to.equal(0); + expect(desc.bounds).to.deep.equal({ min: [1, 2, 3], max: [1, 2, 3] }); + }); + +}); diff --git a/source/server/utils/gltf/inspect.ts b/source/server/utils/gltf/inspect.ts new file mode 100644 index 000000000..bbf96ae5a --- /dev/null +++ b/source/server/utils/gltf/inspect.ts @@ -0,0 +1,84 @@ +import { getBounds, getPrimitiveVertexCount, VertexCountMethod } from '@gltf-transform/functions'; + +import { Document, ImageUtils, PropertyType } from '@gltf-transform/core'; + +/** + * Model boundaries. Copied from DPO-Voyager's `IBoundingBox` definition + * + * It is generally implied that either min/max are finite or the whole bounding box is null. + */ +export interface BoundingBox { + min: [number, number, number], + max: [number, number, number], +} + +export interface SceneDescription { + name: string, + /** Bounding box. `null` when not finite (ie. the document has no faces to get bounds from) */ + bounds: BoundingBox | null, + imageSize: number, + numFaces: number, + extensions: string[], +} + +export function inspectDocument(document: Document): SceneDescription { + const root = document.getRoot(); + const scene = root.getDefaultScene(); + const name = scene?.listChildren().map((node => node.getName())).find(n => !!n) ?? scene?.getName() ?? ''; + const extensions = root.listExtensionsUsed().map(e => e.extensionName); + const rawBounds = scene ? getBounds(scene) : null; + //We don't expect the possibility of a bounding box where only _some_ coordinates are finite so we test just one. + const bounds = Number.isFinite(rawBounds?.min[0]) ? rawBounds : null; + let numFaces = 0; + for (const mesh of root.listMeshes()) { + for (let primitive of mesh.listPrimitives()) { + const mode = primitive.getMode(); + if (mode < 4) continue; // POINTS and LINES* have no faces + if (!primitive.getAttribute("POSITION")) continue; // no geometry data + numFaces += Math.floor(getPrimitiveVertexCount(primitive, VertexCountMethod.RENDER) / 3); + if (4 < mode) numFaces -= 2; //TRIANGLES_STRIP and TRIANGLE_FAN have two shared vertices + } + } + + let imageSize = getMaxDiffuseSize(document); + + return { + name, + bounds, + imageSize, + numFaces, + extensions, + } +} + + +/** + * Get the largest diffuse size in the document + * @param document + * @returns + */ +export function getMaxDiffuseSize(document: Document): number { + const root = document.getRoot(); + let imageSize = 0 + for (const texture of root.listTextures()) { + const slots = document + .getGraph() + .listParentEdges(texture) + .filter((edge) => edge.getParent().propertyType !== PropertyType.ROOT) + .map((edge) => edge.getName()); + + if (slots.indexOf("baseColorTexture") === -1) continue; //Ignore textures not used as baseColor + + const resolution = ImageUtils.getSize(texture.getImage()!, texture.getMimeType()); + if (resolution) { + imageSize = Math.max(imageSize, ...resolution); + } + } + return imageSize; +} + +export function getBaseTextureSizeMultiplier(originalMaxSize: number) { + //How much should we scale down to have High be a 8k texture? + const baseDivider = Math.max(1, originalMaxSize / 8192); + return 1 / baseDivider; +} \ No newline at end of file diff --git a/source/server/utils/gltf/io.ts b/source/server/utils/gltf/io.ts new file mode 100644 index 000000000..f9ac22667 --- /dev/null +++ b/source/server/utils/gltf/io.ts @@ -0,0 +1,19 @@ +import { NodeIO } from '@gltf-transform/core'; +import { EXTMeshoptCompression, KHRDracoMeshCompression, KHRMeshQuantization, KHRTextureBasisu, EXTTextureWebP } from '@gltf-transform/extensions'; +import draco3d from 'draco3dgltf'; +import { MeshoptDecoder, MeshoptEncoder } from 'meshoptimizer'; + +await MeshoptEncoder.ready; +export const io = new NodeIO() + .registerExtensions([ + KHRDracoMeshCompression, + KHRMeshQuantization, + EXTMeshoptCompression, + KHRTextureBasisu, + EXTTextureWebP, + ]) + .registerDependencies({ + 'draco3d.decoder': await draco3d.createDecoderModule(), + 'meshopt.decoder': MeshoptDecoder, + 'meshopt.encoder': MeshoptEncoder, + }); diff --git a/source/server/utils/gltf/obj2gltf.d.ts b/source/server/utils/gltf/obj2gltf.d.ts new file mode 100644 index 000000000..0acfc9004 --- /dev/null +++ b/source/server/utils/gltf/obj2gltf.d.ts @@ -0,0 +1,85 @@ +/** + * Type definitions for the `obj2gltf` package (basic / conservative). + * + * These definitions cover the common options documented in the project's README + * and provide overloads so callers get a `Buffer` when `binary: true`. + */ +declare module 'obj2gltf' { + + /** Input may be a filesystem path, a Buffer, or a parsed object representation. */ + type ObjInput = string | NodeJS.ArrayBufferView | object; + + /** Options exposed by the obj2gltf library (derived from README flags). */ + export interface Obj2GltfOptions { + /** Save as binary glTF (.glb). Default: false. */ + binary?: boolean; + /** Writes out separate buffers and textures instead of embedding them. Default: false. */ + separate?: boolean; + /** Write out separate textures only. Default: false. */ + separateTextures?: boolean; + /** Do a more exhaustive check for texture transparency. Default: false. */ + checkTransparency?: boolean; + /** Prevent reading files outside the input directory. Default: false. */ + secure?: boolean; + /** Pack occlusion into the red channel of metallic-roughness texture. Default: false. */ + packOcclusion?: boolean; + /** Treat .mtl values as metallic-roughness PBR. Default: false. */ + metallicRoughness?: boolean; + /** Treat .mtl values as specular-glossiness PBR. Default: false. */ + specularGlossiness?: boolean; + /** Save with KHR_materials_unlit extension. Default: false. */ + unlit?: boolean; + /** Path to a combined metallic-roughness-occlusion texture that overrides .mtl. */ + metallicRoughnessOcclusionTexture?: string; + /** Path to a specular-glossiness texture that overrides .mtl. */ + specularGlossinessTexture?: string; + /** Path to occlusion texture that overrides .mtl. */ + occlusionTexture?: string; + /** Path to normal texture that overrides .mtl. */ + normalTexture?: string; + /** Path to baseColor / diffuse texture that overrides .mtl. */ + baseColorTexture?: string; + /** Path to emissive texture that overrides .mtl. */ + emissiveTexture?: string; + /** Path to alpha texture that overrides .mtl. */ + alphaTexture?: string; + /** Up axis of the obj input (e.g. 'X', 'Y', 'Z' or string). */ + inputUpAxis?: string; + /** Up axis of the converted glTF output (e.g. 'X', 'Y', 'Z' or string). */ + outputUpAxis?: string; + /** Apply triangle winding order sanitization. Default: false. */ + triangleWindingOrderSanitization?: boolean; + /** Allow material to be double sided. Default: false. */ + doubleSidedMaterial?: boolean; + /** Base path to resolve external resources (MTL, images). */ + resourceDirectory?: string; + /** Embed images into the glTF (default: false). */ + embedImages?: boolean; + /** Use common materials (materialsCommon) compatible with older viewers. */ + materialsCommon?: boolean; + /** Optimize the resulting glTF (where supported). */ + optimize?: boolean; + /** Any additional plugin-specific options. */ + [key: string]: unknown; + } + + /** Convert an OBJ to glTF JSON when called without options. */ + function obj2gltf(input: ObjInput): Promise; + + /** + * Convert an OBJ to glTF JSON when `binary` is false or explicitly omitted. + * Returns a Promise resolving to the glTF JSON object. + */ + function obj2gltf(input: ObjInput, options?: Obj2GltfOptions & { binary?: false }): Promise; + + /** + * Convert an OBJ to GLB when `binary` is true. + * Returns a Promise resolving to a Node `Buffer` containing the .glb binary. + */ + function obj2gltf(input: ObjInput, options: Obj2GltfOptions & { binary: true }): Promise; + + /** Generic fallback signature. */ + function obj2gltf(input: ObjInput, options?: Obj2GltfOptions): Promise; + + export = obj2gltf; +} \ No newline at end of file diff --git a/source/server/utils/gltf/toktx.ts b/source/server/utils/gltf/toktx.ts new file mode 100644 index 000000000..c70cc45fe --- /dev/null +++ b/source/server/utils/gltf/toktx.ts @@ -0,0 +1,297 @@ +import { + BufferUtils, + type Document, + FileUtils, + type ILogger, + ImageUtils, + type Texture, + TextureChannel, + type Transform, +} from '@gltf-transform/core'; + +import { KHRTextureBasisu } from '@gltf-transform/extensions'; +import { + createTransform, + getTextureChannelMask, + getTextureColorSpace, + listTextureSlots, +} from '@gltf-transform/functions'; +import fs from 'fs/promises'; +import os from 'os'; +import path, { join } from 'path'; +import sharp from 'sharp'; + +import { run } from "../exec.js"; +import { getKtxVersion } from "../exec.js"; +import { formatBytes } from '../format.js'; + +const NUM_CPUS = os.cpus().length || 1; + +const { R, G, A } = TextureChannel; + + + +/********************************************************************************************** + * Interfaces. + */ + +export const Mode = { + ETC1S: 'etc1s', + UASTC: 'uastc', +} as const; + + +interface GlobalOptions { + mode: typeof Mode[keyof typeof Mode]; + /** + * Pattern matching the material texture slot(s) to be compressed or converted. + */ + slots: RegExp; + tmpdir: string; + maxSize?: number; +} + +export interface ETC1SOptions extends GlobalOptions { + mode: typeof Mode.ETC1S; + /** [1..255] quality setting. Higher is better. */ + quality?: number; + compression?: number; + rdo?: boolean; +} + +export interface UASTCOptions extends GlobalOptions { + mode: typeof Mode.UASTC; + /** 0 (fastest) to 4 (slowest) */ + level?: number; + zstd?: number; +} + + +/********************************************************************************************** + * KTX conversion Implementation + * + * @todo There is some optimization to be had for the resize step where we may benefit from some experimentation over resize algorithms + */ +export const toktx = function (options: ETC1SOptions | UASTCOptions): Transform { + + return createTransform(options.mode, async (doc: Document): Promise => { + const logger = doc.getLogger(); + + // Confirm recent version of KTX-Software is installed. + logger.debug(`Found ktx version: ${await getKtxVersion()}`) ; + + const tmpdir = options.tmpdir; + const batchPrefix = path.basename(tmpdir); + + const basisuExtension = doc.createExtension(KHRTextureBasisu).setRequired(true); + + const textures = doc.getRoot().listTextures(); + + for (let textureIndex = 0; textureIndex< textures.length; textureIndex++){ + const texture = textures[textureIndex]; + const slots = listTextureSlots(texture); + const channels = getTextureChannelMask(texture); + const textureLabel = + texture.getURI() || + texture.getName() || + `${textureIndex + 1}/${doc.getRoot().listTextures().length}`; + const prefix = `ktx:texture(${textureLabel})`; + logger.debug(`${prefix}: Slots → [${slots.join(', ')}]`); + + // FILTER: Exclude textures that don't match (a) 'slots' or (b) expected formats. + + let srcMimeType = texture.getMimeType(); + + if (options.slots && !slots.find((slot) => slot.match(options.slots))) { + logger.debug(`${prefix}: Skipping, [${slots.join(', ')}] excluded by "slots" parameter.`); + continue; + }else if (srcMimeType === 'image/ktx2') { + logger.debug(`${prefix}: Skipping, already KTX.`); + continue; + } + + let srcImage = texture.getImage()!; + let srcExtension = texture.getURI() + ? FileUtils.extension(texture.getURI()) + : ImageUtils.mimeTypeToExtension(texture.getMimeType()); + const srcSize = texture.getSize(); + const srcBytes = srcImage ? srcImage.byteLength : null; + + if (!srcImage || !srcSize || !srcBytes) { + logger.warn(`${prefix}: Skipping, unreadable texture.`); + continue; + } + + //Resize + if(typeof options.maxSize === "number" || !srcSize.every(n=>isMultipleOfFour(n)) || texture.getMimeType() === "image/webp"){ + // Constrain texture size to a multiple of four + // To be conservative with 3D API compatibility + // @see https://github.khronos.org/KTX-Specification/ktxspec.v2.html#dimensions + if(typeof options.maxSize ==="number"){ + logger.info(`Resizing images to fit within a ${options.maxSize}x${options.maxSize} pixels square`); + }else if(texture.getMimeType() === "image/webp"){ + // toktx doesn't support webp images + logger.info("Reencoding webp image to png before basisu conversion") + } + let dstSize:[number, number] = ((typeof options.maxSize === "number")?fitWithin(srcSize, options.maxSize):srcSize); + if(!dstSize.every(n=>isMultipleOfFour(n))){ + logger.warn(`Source image ${texture.getName()} at index ${textureIndex} ${typeof options.maxSize === "number"?"would be":"is"} ${dstSize.join("x")} px. Ceiling to a multiple of four`); + dstSize = dstSize.map(n=> ceilMultipleOfFour(n)) as [number, number]; + } + const encoder = sharp(srcImage, { limitInputPixels: 32768 * 32768 }).toFormat('png'); + + if(!dstSize.every(n=>isPowerofTwo(n))){ + logger.warn(`${prefix}: Resizing ${srcSize.join('x')} → ${dstSize.join('x')}px: This is not a power of two, which might not be optimal for performance`); + }else{ + logger.debug(`${prefix}: Resizing ${srcSize.join('x')} → ${dstSize.join('x')}px`); + } + + encoder.resize(dstSize[0], dstSize[1], { fit: 'fill', kernel: 'lanczos3' }); + + srcImage = BufferUtils.toView((await encoder.toBuffer()) as any); + srcExtension = 'png'; + srcMimeType = 'image/png'; + } + + // PREPARE: Create temporary in/out paths for the 'ktx' CLI tool, and determine + // necessary command-line flags. + + const srcPath = join(tmpdir, `${batchPrefix}_${textureIndex}.${srcExtension}`); + const dstPath = join(tmpdir, `${batchPrefix}_${textureIndex}.ktx2`); + + await fs.writeFile(srcPath, srcImage); + + const params = [ + 'create', + ...createParams(texture, slots, channels, options), + srcPath, + dstPath, + ]; + logger.debug(`${prefix}: Spawning → ktx ${params.join(' ')}`); + + // COMPRESS: Run `ktx create` CLI tool. + const { code, stdout, stderr} = await run('ktx', params as string[]); + + if (code !== 0) { + logger.error(`${prefix}: Failed with code [${code}]:\n\n${stderr.toString()}`); + } else { + // PACK: Replace image data in the glTF asset. + texture.setImage(await fs.readFile(dstPath) as any).setMimeType('image/ktx2'); + if (texture.getURI()) { + texture.setURI(FileUtils.basename(texture.getURI()) + '.ktx2'); + } + } + + const dstBytes = texture.getImage()!.byteLength; + logger.debug(`${prefix}: ${formatBytes(srcBytes)} → ${formatBytes(dstBytes)} bytes`); + }; + + const usesKTX2 = doc + .getRoot() + .listTextures() + .some((t) => t.getMimeType() === 'image/ktx2'); + + if (!usesKTX2) { + basisuExtension.dispose(); + } + }); +}; + +/********************************************************************************************** + * Utilities. + */ + +/** Create CLI parameters from the given options. Attempts to write only non-default options. */ +function createParams( + texture: Texture, + slots: string[], + channels: number, + options: ETC1SOptions | UASTCOptions, +): (string | number)[] { + const colorSpace = getTextureColorSpace(texture); + const params: (string | number)[] = ['--generate-mipmap']; + + + // See: https://github.com/KhronosGroup/KTX-Software/issues/600 + const isNormalMap = slots.find((slot) => /normal/.test(slot)); + + if (options.mode === Mode.UASTC) { + const _options = options as UASTCOptions; + params.push('--encode', 'uastc'); + params.push('--uastc-quality', _options.level ?? 2); + + if (_options.zstd !== 0) { + params.push('--zstd', _options.zstd ?? 18); + } + } else { + + const _options = options as ETC1SOptions; + params.push('--encode', 'basis-lz'); + + params.push('--qlevel', _options.quality ?? 128); + params.push('--clevel', _options.compression ?? 1); + + if ( _options.rdo === false || isNormalMap) { + params.push('--no-endpoint-rdo', '--no-selector-rdo'); + } + } + + // See: https://github.com/donmccurdy/glTF-Transform/issues/215 + if (colorSpace === 'srgb') { + params.push('--assign-oetf', 'srgb', '--assign-primaries', 'bt709'); + } else if (colorSpace === 'srgb-linear') { + params.push('--assign-oetf', 'linear', '--assign-primaries', 'bt709'); + } else if (slots.length && !colorSpace) { + params.push('--assign-oetf', 'linear', '--assign-primaries', 'none'); + } + + if (channels === R) { + params.push('--format', 'R8_UNORM'); + } else if (channels === G || channels === (R | G)) { + params.push('--format', 'R8G8_UNORM'); + } else if (!(channels & A)) { + params.push('--format', colorSpace === 'srgb' ? 'R8G8B8_SRGB' : 'R8G8B8_UNORM'); + } else { + params.push('--format', colorSpace === 'srgb' ? 'R8G8B8A8_SRGB' : 'R8G8B8A8_UNORM'); + } + + // See: https://github.com/donmccurdy/glTF-Transform/pull/389#issuecomment-1089842185 + const threads = Math.max(2, NUM_CPUS); + params.push('--threads', threads); + + return params; +} + + +function isMultipleOfFour(value: number): boolean { + return value % 4 === 0; +} + +function isPowerofTwo(n:number) { + // Check if n is positive and n & (n-1) is 0 + return (n > 0) && ((n & (n - 1)) === 0); +} + + +function ceilMultipleOfFour(value: number): number { + if (value <= 4) return 4; + return value % 4 ? value + 4 - (value % 4) : value; +} +export function fitWithin(size: [number, number], limit: number): [number, number] { + + let [width, height] = size; + + if (width <= limit && height <= limit) return size; + + if (width > limit) { + height = Math.floor(height * (limit / width)); + width = limit; + } + + if (height > limit) { + width = Math.floor(width * (limit / height)); + height = limit; + } + + return [width, height]; +} diff --git a/source/server/utils/languages.ts b/source/server/utils/languages.ts new file mode 100644 index 000000000..8f917d727 --- /dev/null +++ b/source/server/utils/languages.ts @@ -0,0 +1,16 @@ + +export const sceneLanguages = ["EN", "ES", "DE", "NL", "JA", "FR", "HAW"] as const; +export type SceneLanguage = typeof sceneLanguages[number]; + +export const uiLanguages = ["EN", "FR"] as const; +export type UILanguage = typeof uiLanguages[number]; + + +export function isSceneLanguage(l: any): l is SceneLanguage | undefined { + return typeof l === "undefined" || sceneLanguages.indexOf(l) !== -1; +} + + +export function isUILanguage(l: any): l is UILanguage | undefined { + return typeof l === "undefined" || uiLanguages.indexOf(l) !== -1; +} diff --git a/source/server/utils/locals.ts b/source/server/utils/locals.ts index 87e6200d0..63ba7f417 100644 --- a/source/server/utils/locals.ts +++ b/source/server/utils/locals.ts @@ -1,6 +1,6 @@ import e, { NextFunction, Request, RequestHandler, Response } from "express"; -import {basename, dirname} from "path"; +import { basename, dirname } from "path"; import User, { isUserAtLeast, SafeUser } from "../auth/User.js"; import UserManager, { AccessType, AccessTypes, fromAccessLevel, toAccessLevel } from "../auth/UserManager.js"; import Vfs, { GetFileParams, Scene } from "../vfs/index.js"; @@ -8,35 +8,40 @@ import { BadRequestError, ForbiddenError, HTTPError, InternalError, NotFoundErro import Templates, { AcceptedLocales, locales } from "./templates.js"; import { Config } from "./config.js"; import { isEmbeddable } from "../routes/services/oembed.js"; +import { TaskScheduler } from "../tasks/scheduler.js"; +import { sceneLanguages, uiLanguages } from "./languages.js"; -export interface AppLocals extends Record{ - fileDir :string; - userManager :UserManager; - vfs :Vfs; - templates :Templates; +export interface AppLocals extends Record { + fileDir: string; + userManager: UserManager; + vfs: Vfs; + taskScheduler: TaskScheduler; + templates: Templates; config: Config; /** Length of a session, in milliseconds since epoch */ sessionMaxAge: number; } -export function getLocals(req :Request){ +export type AppParameters = Omit; + +export function getLocals(req: Request) { return req.app.locals as AppLocals; } -export interface SessionData extends SafeUser{ +export interface SessionData extends SafeUser { /** Expire date, in ms since epoch */ expires?: number; lang?: AcceptedLocales; } -export function getSession(req :Request){ - return req.session as SessionData|null|undefined; +export function getSession(req: Request) { + return req.session as SessionData | null | undefined; } -export function canonical(req :Request) :URL -export function canonical(req :Request, ref :string) :URL -export function canonical(req :Request, ref ?:string) :URL{ +export function canonical(req: Request): URL +export function canonical(req: Request, ref: string): URL +export function canonical(req: Request, ref?: string): URL { let host = getHost(req); return new URL(ref ?? req.path, host); } @@ -44,22 +49,22 @@ export function canonical(req :Request, ref ?:string) :URL{ /** * @throws {InternalError} if app.locals.userManager is not defined for this request */ -export function getUserManager(req :Request) :UserManager { - let userManager :UserManager = getLocals(req).userManager; +export function getUserManager(req: Request): UserManager { + let userManager: UserManager = getLocals(req).userManager; //istanbul ignore if - if(!userManager) throw new InternalError("Badly configured app : userManager is not defined in app.locals"); + if (!userManager) throw new InternalError("Badly configured app : userManager is not defined in app.locals"); return userManager } -export function getFileDir(req :Request) :string{ +export function getFileDir(req: Request): string { let fileDir = getLocals(req).fileDir; - if(!fileDir) throw new InternalError("Badly configured app : fileDir is not a valid string"); + if (!fileDir) throw new InternalError("Badly configured app : fileDir is not a valid string"); return fileDir; } -export function isUser(req: Request, res:Response, next :NextFunction){ +export function isUser(req: Request, res: Response, next: NextFunction) { res.append("Cache-Control", "private"); - if((req.session as User).uid ) next(); + if ((req.session as User).uid) next(); else next(new UnauthorizedError()); } @@ -68,25 +73,25 @@ export function isUser(req: Request, res:Response, next :NextFunction){ /** * Special case to allow user creation if no user exists in the database */ -export function isAdministratorOrOpen(req: Request, res:Response, next :NextFunction){ - isAdministrator(req, res, (err)=>{ - if(!err) return next(); - Promise.resolve().then(async ()=>{ +export function isAdministratorOrOpen(req: Request, res: Response, next: NextFunction) { + isAdministrator(req, res, (err) => { + if (!err) return next(); + Promise.resolve().then(async () => { let userManager = getUserManager(req); let users = (await userManager.getUsers()); - if(users.length == 0) return; + if (users.length == 0) return; else throw err; - }).then(()=>next(), next); + }).then(() => next(), next); }); } /** * Checks if user.isAdministrator is true * Not the same thing as canAdmin() that checks if the user has admin rights over a scene */ -export function isAdministrator(req: Request, res:Response, next :NextFunction){ +export function isAdministrator(req: Request, res: Response, next: NextFunction) { res.append("Cache-Control", "private"); - - if((req.session as User).level == "admin") next(); + + if ((req.session as User).level == "admin") next(); else next(new UnauthorizedError()); } @@ -94,9 +99,9 @@ export function isAdministrator(req: Request, res:Response, next :NextFunction){ * Checks if user.isCreator is true * Not the same thing as canWrite() that checks if the user has write rights over a scene */ -export function isCreator(req: Request, res:Response, next :NextFunction){ +export function isCreator(req: Request, res: Response, next: NextFunction) { res.append("Cache-Control", "private"); - if ( isUserAtLeast((req.session as User), "create") ) next(); + if (isUserAtLeast((req.session as User), "create")) next(); else next(new UnauthorizedError()); } @@ -104,22 +109,22 @@ export function isCreator(req: Request, res:Response, next :NextFunction){ * Checks if user.isCreator is true * Not the same thing as canWrite() that checks if the user has write rights over a scene */ -export function isManage(req: Request, res:Response, next :NextFunction){ +export function isManage(req: Request, res: Response, next: NextFunction) { res.append("Cache-Control", "private"); - if ( isUserAtLeast((req.session as User), "manage") ) next(); + if (isUserAtLeast((req.session as User), "manage")) next(); else next(new UnauthorizedError()); } /** * Checks if user is a member of a group or is at least Manage */ -export async function isMemberOrManage(req: Request, res:Response, next :NextFunction){ +export async function isMemberOrManage(req: Request, res: Response, next: NextFunction) { res.append("Cache-Control", "private"); let userManager = getUserManager(req); let user = getUser(req) - let {group} = req.params; + let { group } = req.params; const canSeeGroup = user && (await userManager.isMemberOfGroup(user.uid, group) || isUserAtLeast(user, "manage")); - if(canSeeGroup) next(); + if (canSeeGroup) next(); else next(new UnauthorizedError()); } @@ -128,12 +133,12 @@ export async function isMemberOrManage(req: Request, res:Response, next :NextFun * Usefull for conditional rate-limiting * @example either(isAdministrator, isUser, rateLimit({...})) */ -export function either(...handlers:Readonly) :RequestHandler{ - return (req, res, next)=>{ +export function either(...handlers: Readonly): RequestHandler { + return (req, res, next) => { let mdw = handlers[0]; - if(!mdw) return next(new UnauthorizedError()); - return mdw(req, res, (err)=>{ - if(!err) return next(); + if (!mdw) return next(new UnauthorizedError()); + return mdw(req, res, (err) => { + if (!err) return next(); else if (err instanceof UnauthorizedError) return either(...handlers.slice(1))(req, res, next); else return next(err); }); @@ -145,29 +150,29 @@ export function either(...handlers:Readonly) :RequestHandler{ * Caches result in `req.locals.access` so it's not a problem to apply a generic perms check * to a group of routes then more specific ACL checks for individual handlers */ -function _perms(check:number,req :Request, res :Response, next :NextFunction){ - let {scene} = req.params; - let {level = "create", uid = null} = (req.session ??{})as SafeUser; - if(!scene) throw new BadRequestError("no scene parameter in this request"); - if(check < 0 || AccessTypes.length <= check) throw new InternalError(`Bad permission level : ${check}`); +function _perms(check: number, req: Request, res: Response, next: NextFunction) { + let { scene } = req.params; + let { level = "create", uid = null } = (req.session ?? {}) as SafeUser; + if (!scene) throw new BadRequestError("no scene parameter in this request"); + if (check < 0 || AccessTypes.length <= check) throw new InternalError(`Bad permission level : ${check}`); res.set("Vary", "Cookie, Authorization"); - if(level == "admin"){ + if (level == "admin") { res.locals.access = "admin" as AccessType; return next(); } - + let userManager = getUserManager(req); - (res.locals.access? + (res.locals.access ? Promise.resolve(res.locals.access) : userManager.getAccessRights(scene, uid) - ).then( access => { + ).then(access => { res.locals.access = access; const lvl = toAccessLevel(access); - if(check <= lvl){ + if (check <= lvl) { next(); - } else if(req.method === "GET" || lvl <= toAccessLevel("none")){ + } else if (req.method === "GET" || lvl <= toAccessLevel("none")) { next(new NotFoundError(`Can't find scene ${scene}. It may be private or not exist entirely.`)) } else { //User has insuficient level but can read the scene @@ -189,33 +194,40 @@ export const canWrite = _perms.bind(null, toAccessLevel("write")); */ export const canAdmin = _perms.bind(null, toAccessLevel("admin")); -export function getUser(req :Request){ +export function getUser(req: Request) { return (req.session && req.session.username && req.session.uid && req.session.level) ? - req.session as SafeUser : null; + req.session as SafeUser : null; } -export function getUserId(req :Request){ - const user = getUser(req); - return user? user.uid : null; +export function getUserId(req: Request) { + const user = getUser(req); + return user ? user.uid : null; } -export function getFileParams(req :Request):GetFileParams{ - let {scene, name} = req.params; - if(!scene) throw new BadRequestError(`Scene parameter not provided`); - if(!name) throw new BadRequestError(`File parameter not provided`); +export function getFileParams(req: Request): GetFileParams { + let { scene, name } = req.params; + if (!scene) throw new BadRequestError(`Scene parameter not provided`); + if (!name) throw new BadRequestError(`File parameter not provided`); - return {scene, name}; + return { scene, name }; } -export function getVfs(req :Request){ - let vfs :Vfs = getLocals(req).vfs; +export function getVfs(req: Request) { + let vfs: Vfs = getLocals(req).vfs; //istanbul ignore if - if(!vfs) throw new InternalError("Badly configured app : vfs is not defined in app.locals"); + if (!vfs) throw new InternalError("Badly configured app : vfs is not defined in app.locals"); return vfs; } -export function getHost(req :Request) :URL{ - let host = (req.app.get("trust proxy")? req.get("X-Forwarded-Host") : null) ?? req.get("Host"); + +export function getTaskScheduler(req: Request) { + const scheduler = getLocals(req).taskScheduler; + if (!scheduler) throw new InternalError("Badly configured app: task scheduler is not defined in app.locals"); + return scheduler; +} + +export function getHost(req: Request): URL { + let host = (req.app.get("trust proxy") ? req.get("X-Forwarded-Host") : null) ?? req.get("Host"); return new URL(`${req.protocol}://${host}`); } @@ -223,13 +235,13 @@ export function getHost(req :Request) :URL{ * Validates if a requested redirect URL would be within the current origin * Also make the URL canonical (http://example.com/foo) */ -export function validateRedirect(req :Request, redirect :string|any): URL{ +export function validateRedirect(req: Request, redirect: string | any): URL { let host = getHost(req); - try{ + try { let target = new URL(redirect, host); - if(target.origin !== host.origin) throw new BadRequestError(`Redirect origin mismatch`); + if (target.origin !== host.origin) throw new BadRequestError(`Redirect origin mismatch`); return target; - }catch(e){ + } catch (e) { throw new BadRequestError(`Bad Redirect parameter`); } @@ -239,9 +251,9 @@ export function validateRedirect(req :Request, redirect :string|any): URL{ * Tries to determine if a request is for embedded content * @param req */ -export function isEmbed(req :Request) :boolean{ - if(typeof req.query.embed === "string") return req.query.embed != "0" && req.query.embed != "false"; - if(typeof req.headers["sec-fetch-dest"] === "string") return req.headers["sec-fetch-dest"].indexOf("frame") !== -1 || req.headers["sec-fetch-dest"] === "embed"; +export function isEmbed(req: Request): boolean { + if (typeof req.query.embed === "string") return req.query.embed != "0" && req.query.embed != "false"; + if (typeof req.headers["sec-fetch-dest"] === "string") return req.headers["sec-fetch-dest"].indexOf("frame") !== -1 || req.headers["sec-fetch-dest"] === "embed"; return false; } @@ -249,23 +261,22 @@ export function isEmbed(req :Request) :boolean{ * Common properties for template rendering. * All props required for navbar/footer rendering should be set here */ -export function useTemplateProperties(req :Request, res:Response, next?:NextFunction){ +export function useTemplateProperties(req: Request, res: Response, next?: NextFunction) { const session = getSession(req); - const {config} = getLocals(req); + const { config } = getLocals(req); const user = getUser(req); - const {search} = req.query; + const { search } = req.query; const lang = session?.lang ?? (req.acceptsLanguages(locales) || "en"); Object.assign(res.locals, { lang, - languages: [ - {selected: lang === "fr", code: "fr", key: "lang.fr"}, - {selected: lang === "en", code: "en", key: "lang.en"}, - ], - scene_languages: [ - {selected: lang === "fr", code: "fr", key: "lang.fr"}, - {selected: lang === "en", code: "en", key: "lang.en"}, - {selected: lang === "es", code: "es", key: "lang.es"}, - ], + languages: uiLanguages.map(l => { + const code = l.toLowerCase(); + return { selected: lang === code, code, key: `lang.${code}` }; + }), + scene_languages: sceneLanguages.map(l => { + const code = l.toLowerCase(); + return { selected: lang === code, code, key: `lang.${code}` }; + }), user: user, location: req.originalUrl, search, @@ -274,6 +285,6 @@ export function useTemplateProperties(req :Request, res:Response, next?:NextFunc canonical: canonical(req).toString(), root_url: canonical(req, "/").toString(), }); - if(next) next(); + if (next) next(); } diff --git a/source/server/utils/schema/default.ts b/source/server/utils/schema/default.ts index 406bb6cee..1294c0cb0 100644 --- a/source/server/utils/schema/default.ts +++ b/source/server/utils/schema/default.ts @@ -1,3 +1,6 @@ +import { IDocument, IScene } from "./document.js"; +import { ISetup } from "./setup.js"; + /** * Copied from DPO-Voyager * @@ -136,12 +139,11 @@ const default_doc = { "type": "environment" }] } as const; - /** * This is a workaround for JSON imports syntax changing every other day and the fact we _might_ mutate the document in place one returned * @returns a pristine default document that we are free to mutate or otherwise modify */ -export default async function getDefaultDocument(){ +export default function getDefaultDocument(): IDocument&{nodes:number[], scene: 0,scenes:[IScene], setups:[ISetup]}{ /** @fixme structuredClone is only available starting with node-17. Remove this check once node-16 support is dropped */ //@ts-ignore return (typeof structuredClone ==="function")?structuredClone(default_doc) : JSON.parse(JSON.stringify(default_doc)); diff --git a/source/server/utils/templates.ts b/source/server/utils/templates.ts index 69d8369b8..78e1803e1 100644 --- a/source/server/utils/templates.ts +++ b/source/server/utils/templates.ts @@ -174,6 +174,14 @@ export default class Templates{ return this.#dir; } + /** + * Get the translate function of the templates renderer + * Useful to translate a string of text outside of templates eg: when reporting an error + */ + get t(){ + return this.#i18n.t; + } + async listPartials(dir = this.#partialsDir) :Promise{ if(this.#partials) return this.#partials; diff --git a/source/server/utils/wrapAsync.ts b/source/server/utils/wrapAsync.ts index 3e0ff357c..22783afd4 100644 --- a/source/server/utils/wrapAsync.ts +++ b/source/server/utils/wrapAsync.ts @@ -2,7 +2,7 @@ import { Request, RequestHandler, Response, NextFunction } from "express"; -interface AsyncRequestHandler{ +export interface AsyncRequestHandler{ ( req: Request, res: Response, diff --git a/source/server/vfs/Base.ts b/source/server/vfs/Base.ts index 3b444fd51..2441bfffa 100644 --- a/source/server/vfs/Base.ts +++ b/source/server/vfs/Base.ts @@ -1,20 +1,41 @@ -import open, {Database, DbController} from "./helpers/db.js"; +import {Database, DbController} from "./helpers/db.js"; import path from "path"; -import { InternalError, NotFoundError } from "../utils/errors.js"; +import { NotFoundError } from "../utils/errors.js"; import { FileProps } from "./types.js"; +import { mkdir } from "fs/promises"; export type Isolate = (this: that, vfs :that)=> Promise; +/** + * Branded type to make a distinction between absolute path and ROOT_DIR relative paths + */ +export type RootRelativePath = string & {_brand: "RootRelativePath"}; + export default abstract class BaseVfs extends DbController{ constructor(protected rootDir :string, db :Database){ super(db); } + public get baseDir(){ return this.rootDir; } + /** + * Temporary directory to store in-transit files. + * should be in the same volume as `Vfs.objectsDir` and `Vfs.artifactsDir` + * to ensure files can be moved atomically between those folders. + */ public get uploadsDir(){ return path.join(this.rootDir, "uploads"); } + /** + * Main objects directory + */ public get objectsDir(){ return path.join(this.rootDir, "objects"); } + /** + * Secondary directory used to store task artifacts + * Those are not considered long-term persistent: They can be cleaned-up to save space. + * On the other hand, they _shouldn't_ be cleaned before their referencing task + */ + public get artifactsDir(){ return path.join(this.rootDir, "artifacts"); } public filepath(f :FileProps|string|{hash:string}){ if(typeof f ==="string"){ @@ -26,6 +47,35 @@ export default abstract class BaseVfs extends DbController{ } } + + /** + * Compute path to a file relative to {@link baseDir }. Accepts either absolute paths or paths that are already relative to baseDir. + * This is not a sanitize function and won't prevent bad paths from breaking out of baseDir. + * @param filepath + */ + public relative(filepath: string){ + return path.relative(this.baseDir, path.resolve(this.baseDir, filepath)) as RootRelativePath; + } + + public absolute(filepath: RootRelativePath|string){ + return path.isAbsolute(filepath)? filepath: path.resolve(this.baseDir, filepath); + } + + + + /** + * Create an artifact directory for this task + */ + public async createTaskWorkspace(id: number){ + const dir = this.getTaskWorkspace(id); + await mkdir( dir, {recursive: true}); + return dir; + } + + public getTaskWorkspace(id: number){ + return path.join(this.artifactsDir, id.toString(10)); + } + abstract close() :Promise; public abstract isOpen :boolean; } \ No newline at end of file diff --git a/source/server/vfs/Files.ts b/source/server/vfs/Files.ts index c2d94c28d..67eda93a2 100644 --- a/source/server/vfs/Files.ts +++ b/source/server/vfs/Files.ts @@ -8,7 +8,7 @@ import { CommonFileParams, DataStream, DocProps, FileProps, GetFileParams, GetFi import { Transaction } from "./helpers/db.js"; import { FileHandle } from "fs/promises"; -import { Duplex, Readable, Transform } from "stream"; +import { Duplex, Readable, Transform, Writable } from "stream"; import { pipeline } from "stream/promises"; import { transform } from "typescript"; @@ -85,6 +85,49 @@ export default abstract class FilesVfs extends BaseVfs{ } } + /** + * Faster alternative to {@link writeFile} that only computes the file's hash in-place then hard-links it to its destination + * + * The source file can then safely be unlinked + */ + async copyFile( + filepath :string, + params :WriteFileParams + ) :Promise{ + let handle = await fs.open(filepath, constants.O_RDONLY); + let hashsum = createHash("sha256"); + let size = 0; + try{ + let rs = handle.createReadStream(); + await pipeline( + rs, + new Writable({ + write(chunk, encoding, callback){ + hashsum.update(chunk); + size += chunk.length; + callback(null); + } + }), + ); + let hash = hashsum.digest("base64url"); + let destfile = path.join(this.objectsDir, hash); + + return await this.createFile(params, async ({id})=>{ + try{ + // It's always possible for the transaction to fail afterwards, creating a loose object + // However it's not possible to safely clean it up without race conditions over any other row that may be referencing it + await fs.link(filepath, destfile); + }catch(e){ + if((e as any).code != "EEXIST") throw e; + //If a file with the same hash exists, we presume it's the same file and don't overwrite it. + } + return {hash, size}; + }); + }finally{ + await handle.close(); + } + } + /** * Write a document for a scene diff --git a/source/server/vfs/helpers/db.ts b/source/server/vfs/helpers/db.ts index 8e62a7156..37d598943 100644 --- a/source/server/vfs/helpers/db.ts +++ b/source/server/vfs/helpers/db.ts @@ -24,6 +24,7 @@ pgtypes.setTypeParser(20 /* BIGINT */, function parseBigInt(val){ const debug = debuglog("pg:trace"); function safeDebugError(e:Error|unknown, sql: string){ + if(!debug.enabled) return; try{ debug(expandSQLError(e, sql).toString()); }catch(e){ @@ -33,6 +34,7 @@ function safeDebugError(e:Error|unknown, sql: string){ export interface DatabaseHandle{ /** + * @deprecated it is way more efficient to run `db.all("... LIMIT 1")[0]` because it will not deadlock as easily * Creates a cursor that will only fetch the first row that would be returned by the query */ get(sql: string, params?: any[]):Promise; @@ -120,7 +122,9 @@ export function toHandle(db:Pool|PoolClient|Client) :Omit { debug("connect to database at : "+ uri) - let pool = new Pool({connectionString: uri}); + let pool = new Pool({ + connectionString: uri, + }); pool.on("error", (err, client)=>{ console.error("psql client pool error :", err); diff --git a/source/server/vfs/index.ts b/source/server/vfs/index.ts index 49793b54b..148954e09 100644 --- a/source/server/vfs/index.ts +++ b/source/server/vfs/index.ts @@ -34,17 +34,21 @@ class Vfs extends BaseVfs{ static async Open(rootDir :string, opts:VfsOptions&Required> ):Promise static async Open(rootDir :string, {db, database_uri, createDirs=true, forceMigration = true} :VfsOptions = {} ){ if(!db && !database_uri) throw new Error(`No DB connection method provided. Can't open VFS`); - if(createDirs){ - await fs.mkdir(path.join(rootDir, "objects"), {recursive: true}); - await fs.rm(path.join(rootDir, "uploads"), {recursive: true, force: true}); - await fs.mkdir(path.join(rootDir, "uploads"), {recursive: true}); - } + db ??= await open({ uri: database_uri!, forceMigration, }); let vfs = new Vfs(rootDir, db); + + if(createDirs){ + await Promise.all([ + fs.mkdir(vfs.objectsDir, {recursive: true}), + fs.mkdir(vfs.uploadsDir, {recursive: true}), + fs.mkdir(vfs.artifactsDir, {recursive: true}), + ]); + } return vfs; } diff --git a/source/server/vfs/vfs.test.ts b/source/server/vfs/vfs.test.ts index c2dc68cbc..7f53f162f 100755 --- a/source/server/vfs/vfs.test.ts +++ b/source/server/vfs/vfs.test.ts @@ -42,2073 +42,2101 @@ function sceneProps(id:number): {[P in keyof Required]: Function|any}{ } describe("Vfs", function(){ - this.beforeEach(async function(){ - this.dir = await fs.mkdtemp(path.join(tmpdir(), `vfs_tests`)); - this.uploads = path.join(this.dir, "uploads"); //For quick reference - }); - this.afterEach(async function(){ - await fs.rm(this.dir, {recursive: true}); - }) - it("creates upload directory", async function(){ - let vfs = await Vfs.Open(this.dir, {db: {} as any}); - await expect(fs.access(path.join(this.dir, "uploads"))).to.be.fulfilled; - }); - - describe("isolate", function(){ - let vfs :Vfs; - this.beforeEach(async function(){ - this.db_uri = await getUniqueDb(this.test?.title); - vfs = await Vfs.Open(this.dir, {database_uri: this.db_uri}); - }) - this.afterEach(async function(){ - await vfs.close(); - await dropDb(this.db_uri); + describe("relative()", function(){ + let vfs: Vfs; + this.beforeEach(function(){ + vfs = new Vfs("/path/to/data", {} as any); }) - it("can rollback on error", async function(){ - await expect(vfs.isolate(async (vfs)=>{ - await vfs.createScene("foo"); - await vfs.createScene("foo"); - })).to.be.rejected; - expect(await vfs.getScenes()).to.have.property("length", 0); + it("computes path relative to baseDir", function(){ + expect(vfs.relative("/path/to/data/test.bin")).to.equal("test.bin"); }); - - it("reuses a connection when nested", async function(){ - await expect(vfs.isolate( async (v2)=>{ - await v2.isolate(async (v3)=>{ - expect(v3._db).to.equal(v2._db); - }); - })).to.be.fulfilled; + it("works with paths that are already relative", function(){ + expect(vfs.relative("test.bin")).to.equal("test.bin"); }); + }); - it("can be nested (success)", async function(){ - let scenes = await expect(vfs.isolate( async (v2)=>{ - await v2.getScenes(); - await v2.isolate(async (v3)=>{ - await v3.getScenes(); - await v3.createScene("foo"); - }); - await v2.getScenes(); - await v2.createScene("bar") - return await v2.getScenes(); - })).to.be.fulfilled; - expect(scenes).to.have.property("length", 2); - await expect(vfs.getScenes()).to.eventually.deep.equal(scenes); + describe("absolute()", function(){ + let vfs: Vfs; + this.beforeEach(function(){ + vfs = new Vfs("/path/to/data", {} as any); + }) + it("don't touch absoltue paths", function(){ + expect(vfs.absolute("/something/foo/bar")).to.equal("/something/foo/bar"); }); - - it("can be nested (with caught error)", async function(){ - let scenes = await expect(vfs.isolate( async (v2)=>{ - await v2.createScene("foo"); - //This isolate rolls back but since we don't propagate the error - //the parent will succeed - await v2.isolate(async (v3)=>{ - await v3.createScene("bar"); - //Force this transaction to roll back - throw new Error("TEST"); - }).catch(e=>{ - if(e.message !== "TEST") throw e; - }); - return await v2.getScenes(); - })).to.be.fulfilled; - expect(scenes).to.have.property("length", 1); - expect(scenes[0]).to.have.property("name", "foo"); - expect(await vfs.getScenes()).to.deep.equal(scenes); + it("resolves paths from root dir", function(){ + expect(vfs.absolute("scenes/foo.txt")).to.equal("/path/to/data/scenes/foo.txt"); }); - - it("is properly closed on success", async function(){ - let _transaction:Vfs|null =null; - await expect(vfs.isolate(async tr=>{ - _transaction = tr; - expect(_transaction).to.have.property("isOpen", true); - })).to.be.fulfilled; - expect(_transaction).to.have.property("isOpen", false); - }) - - it("is properly closed on error", async function(){ - let _transaction:Vfs|null =null; - await expect(vfs.isolate(async tr=>{ - _transaction = tr; - expect(_transaction).to.have.property("isOpen", true); - throw new Error("dummy"); - })).to.be.rejectedWith("dummy"); - expect(_transaction).to.have.property("isOpen", false); + + }) + describe("", function(){ + this.beforeEach(async function(){ + this.dir = await fs.mkdtemp(path.join(tmpdir(), `vfs_tests`)); + this.uploads = path.join(this.dir, "uploads"); //For quick reference + }); + this.afterEach(async function(){ + await fs.rm(this.dir, {recursive: true}); }) - }); - - describe("validate search params", function(){ - it("accepts no parameters", function(){ - expect(()=>ScenesVfs._validateSceneQuery({})).not.to.throw(); + it("creates upload directory", async function(){ + let vfs = await Vfs.Open(this.dir, {db: {} as any}); + await expect(fs.access(path.join(this.dir, "uploads"))).to.be.fulfilled; }); - it("requires limit to be a positive integer", function(){ - [null, "foo", 0.5, "0", 0, -1, 101].forEach((limit)=>{ - expect(()=>ScenesVfs._validateSceneQuery({limit} as any), `{limit: ${limit}}`).to.throw(); + describe("isolate", function(){ + let vfs :Vfs; + this.beforeEach(async function(){ + this.db_uri = await getUniqueDb(this.test?.title); + vfs = await Vfs.Open(this.dir, {database_uri: this.db_uri}); + }) + this.afterEach(async function(){ + await vfs.close(); + await dropDb(this.db_uri); + }) + it("can rollback on error", async function(){ + await expect(vfs.isolate(async (vfs)=>{ + await vfs.createScene("foo"); + await vfs.createScene("foo"); + })).to.be.rejected; + expect(await vfs.getScenes()).to.have.property("length", 0); }); - [1, 10, 100].forEach((limit)=>{ - expect(()=>ScenesVfs._validateSceneQuery({limit} as any)).not.to.throw(); + it("reuses a connection when nested", async function(){ + await expect(vfs.isolate( async (v2)=>{ + await v2.isolate(async (v3)=>{ + expect(v3._db).to.equal(v2._db); + }); + })).to.be.fulfilled; }); - }); - it("requires offset to be a positive integer", function(){ - [null, "foo", 0.5, "0", -1].forEach((offset)=>{ - expect(()=>ScenesVfs._validateSceneQuery({offset} as any), `{offset: ${offset}}`).to.throw(); + it("can be nested (success)", async function(){ + let scenes = await expect(vfs.isolate( async (v2)=>{ + await v2.getScenes(); + await v2.isolate(async (v3)=>{ + await v3.getScenes(); + await v3.createScene("foo"); + }); + await v2.getScenes(); + await v2.createScene("bar") + return await v2.getScenes(); + })).to.be.fulfilled; + expect(scenes).to.have.property("length", 2); + await expect(vfs.getScenes()).to.eventually.deep.equal(scenes); }); - [0, 1, 10, 100, 1000].forEach((offset)=>{ - expect(()=>ScenesVfs._validateSceneQuery({offset} as any)).not.to.throw(); + it("can be nested (with caught error)", async function(){ + let scenes = await expect(vfs.isolate( async (v2)=>{ + await v2.createScene("foo"); + //This isolate rolls back but since we don't propagate the error + //the parent will succeed + await v2.isolate(async (v3)=>{ + await v3.createScene("bar"); + //Force this transaction to roll back + throw new Error("TEST"); + }).catch(e=>{ + if(e.message !== "TEST") throw e; + }); + return await v2.getScenes(); + })).to.be.fulfilled; + expect(scenes).to.have.property("length", 1); + expect(scenes[0]).to.have.property("name", "foo"); + expect(await vfs.getScenes()).to.deep.equal(scenes); }); - }); - it("requires orderDirection to match", function(){ - ["AS", "DE", null, 0, -1, 1, "1"].forEach((orderDirection)=>{ - expect(()=>ScenesVfs._validateSceneQuery({orderDirection} as any), `{orderDirection: ${orderDirection}}`).to.throw("Invalid orderDirection"); - }); - ["ASC", "DESC", "asc", "desc"].forEach((orderDirection)=>{ - expect(()=>ScenesVfs._validateSceneQuery({orderDirection} as any)).not.to.throw(); + it("is properly closed on success", async function(){ + let _transaction:Vfs|null =null; + await expect(vfs.isolate(async tr=>{ + _transaction = tr; + expect(_transaction).to.have.property("isOpen", true); + })).to.be.fulfilled; + expect(_transaction).to.have.property("isOpen", false); }) - }); - - it("requires orderBy to match", function(){ - ["foo", 1, -1, null].forEach((orderBy)=>{ - expect(()=>ScenesVfs._validateSceneQuery({orderBy} as any), `{orderBy: ${orderBy}}`).to.throw(`Invalid orderBy`); - }); - ["ctime", "mtime", "name"].forEach((orderBy)=>{ - expect(()=>ScenesVfs._validateSceneQuery({orderBy} as any), `{orderBy: "${orderBy}"}`).not.to.throw(); - }); + it("is properly closed on error", async function(){ + let _transaction:Vfs|null =null; + await expect(vfs.isolate(async tr=>{ + _transaction = tr; + expect(_transaction).to.have.property("isOpen", true); + throw new Error("dummy"); + })).to.be.rejectedWith("dummy"); + expect(_transaction).to.have.property("isOpen", false); + }) }); - it("sanitizes access values", function(){ - ["read","write","admin","none"] - .forEach( a => { - expect(()=>ScenesVfs._validateSceneQuery({access: a as any}),`expected ${a ?? typeof a} to be an accepted access value`).not.to.throw(); + describe("validate search params", function(){ + it("accepts no parameters", function(){ + expect(()=>ScenesVfs._validateSceneQuery({})).not.to.throw(); }); - ["foo",true, 1] - .forEach ( a=> { - expect(()=>ScenesVfs._validateSceneQuery({access: a as any}),`expected ${a ?? typeof a} to not be an accepted access value`).to.throw(`Invalid access type requested : ${a.toString()}`); - }); - }); + it("requires limit to be a positive integer", function(){ + [null, "foo", 0.5, "0", 0, -1, 101].forEach((limit)=>{ + expect(()=>ScenesVfs._validateSceneQuery({limit} as any), `{limit: ${limit}}`).to.throw(); + }); - it("sanitizes authors username", function(){ - [ "Jane" ].forEach(a=>{ - expect(()=>ScenesVfs._validateSceneQuery({author: a})).not.to.throw(); - }); - [ null, 0].forEach(a=>{ - expect(()=>ScenesVfs._validateSceneQuery({author: a as any}),`expected ${a ?? typeof a} to not be an accepted author value`).to.throw(`[400] Invalid author filter request: ${a}`); + [1, 10, 100].forEach((limit)=>{ + expect(()=>ScenesVfs._validateSceneQuery({limit} as any)).not.to.throw(); + }); }); - }) - }); - - describe("", function(){ - let vfs :Vfs, database_uri:string; - //@ts-ignore - const run = async (sql: ISqlite.SqlType, ...params: any[])=> await vfs.db.run(sql, ...params); - //@ts-ignore - const get = async (sql: ISqlite.SqlType, ...params: any[])=> await vfs.db.get(sql, ...params); - //@ts-ignore - const all = async (sql: ISqlite.SqlType, ...params: any[])=> await vfs.db.all(sql, ...params); - this.beforeEach(async function(){ - database_uri = await getUniqueDb(); - vfs = await Vfs.Open(this.dir, {database_uri}); - }); - this.afterEach(async function(){ - await vfs.close(); - await dropDb(database_uri); - }) + it("requires offset to be a positive integer", function(){ + [null, "foo", 0.5, "0", -1].forEach((offset)=>{ + expect(()=>ScenesVfs._validateSceneQuery({offset} as any), `{offset: ${offset}}`).to.throw(); + }); - describe("createScene()", function(){ - it("insert a new scene", async function(){ - await expect(vfs.createScene("foo")).to.be.fulfilled; - }) - it("throws on duplicate name", async function(){ - await expect(vfs.createScene("foo")).to.be.fulfilled; - await expect(vfs.createScene("foo")).to.be.rejectedWith("exist"); + [0, 1, 10, 100, 1000].forEach((offset)=>{ + expect(()=>ScenesVfs._validateSceneQuery({offset} as any)).not.to.throw(); + }); }); - describe("uid handling", function(){ - let old :typeof Uid.make; - let returns :number[] = []; - this.beforeEach(function(){ - old = Uid.make; - returns = []; - Uid.make = ()=> { - let r = returns.pop(); - if (typeof r === "undefined") throw new Error("No mock result provided"); - return r; - }; - }); - this.afterEach(function(){ - Uid.make = old; + it("requires orderDirection to match", function(){ + ["AS", "DE", null, 0, -1, 1, "1"].forEach((orderDirection)=>{ + expect(()=>ScenesVfs._validateSceneQuery({orderDirection} as any), `{orderDirection: ${orderDirection}}`).to.throw("Invalid orderDirection"); }); + ["ASC", "DESC", "asc", "desc"].forEach((orderDirection)=>{ + expect(()=>ScenesVfs._validateSceneQuery({orderDirection} as any)).not.to.throw(); + }) + }); - it("fails if no free uid can be found", async function(){ - returns = [1, 1, 1, 1]; - await expect(vfs.createScene("bar")).to.be.fulfilled; - await expect(vfs.createScene("baz")).to.be.rejectedWith("Unable to find a free id"); + it("requires orderBy to match", function(){ + ["foo", 1, -1, null].forEach((orderBy)=>{ + expect(()=>ScenesVfs._validateSceneQuery({orderBy} as any), `{orderBy: ${orderBy}}`).to.throw(`Invalid orderBy`); }); - it("retry", async function(){ - returns = [1, 1, 2]; - await expect(vfs.createScene("bar")).to.be.fulfilled; - await expect(vfs.createScene("baz")).to.be.fulfilled; + ["ctime", "mtime", "name"].forEach((orderBy)=>{ + expect(()=>ScenesVfs._validateSceneQuery({orderBy} as any), `{orderBy: "${orderBy}"}`).not.to.throw(); }); + }); - it("prevents scene name containing uid", async function(){ - returns = [1, 2]; - let scene_id = await expect(vfs.createScene("bar#1")).to.be.fulfilled; - expect(scene_id).to.equal(2); + it("sanitizes access values", function(){ + ["read","write","admin","none"] + .forEach( a => { + expect(()=>ScenesVfs._validateSceneQuery({access: a as any}),`expected ${a ?? typeof a} to be an accepted access value`).not.to.throw(); }); - }) - it("sets scene author", async function(){ - const userManager = new UserManager(vfs._db); - const user = await userManager.addUser("alice", "xxxxxxxx", "create"); - let id = await expect(vfs.createScene("foo", user.uid)).to.be.fulfilled; - try{ - let s = await vfs.getScene(id, user.uid); - expect(s).to.have.property("access").to.be.equal("admin"); - }catch(e){ - console.log("createScene :", e); - throw e; - } - }); - - it("sets custom scene permissions", async function(){ - const userManager = new UserManager(vfs._db); - const user = await userManager.addUser("alice", "xxxxxxxx", "create"); - let id = await expect(vfs.createScene("foo", user.uid)).to.be.fulfilled; - await userManager.grant(id, user.uid, "write"); - await userManager.setPublicAccess(id, "none"); - let s = await vfs.getScene(id, user.uid); - expect(s.access).to.deep.equal("write"); + ["foo",true, 1] + .forEach ( a=> { + expect(()=>ScenesVfs._validateSceneQuery({access: a as any}),`expected ${a ?? typeof a} to not be an accepted access value`).to.throw(`Invalid access type requested : ${a.toString()}`); + }); }); - }); - describe("getScenes()", function(){ - it("get an empty list", async function(){ - let scenes = await vfs.getScenes(); - expect(scenes).to.have.property("length", 0); + it("sanitizes authors username", function(){ + [ "Jane" ].forEach(a=>{ + expect(()=>ScenesVfs._validateSceneQuery({author: a})).not.to.throw(); + }); + [ null, 0].forEach(a=>{ + expect(()=>ScenesVfs._validateSceneQuery({author: a as any}),`expected ${a ?? typeof a} to not be an accepted author value`).to.throw(`[400] Invalid author filter request: ${a}`); + }); }) + }); - it("get a list of scenes", async function(){ - let scene_id = await vfs.createScene("foo"); - let scenes = await vfs.getScenes(); - expect(scenes).to.have.property("length", 1); - let scene = scenes[0]; - - let props = sceneProps(scene_id); - let key:keyof Scene; - for(key in props){ - if(typeof props[key] ==="undefined"){ - expect(scene, `${(scene as any)[key]}`).not.to.have.property(key); - }else if(typeof props[key] === "function"){ - expect(scene, `scene.${key} should match expected class ${props[key].constructor.name}`).to.have.property(key).instanceof(props[key]); - }else{ - expect(scene, `scene.${key} should match expected value ${props[key]}`).to.have.property(key).to.deep.equal(props[key]); - } - } - }); - - it("get proper ctime and mtime from last document edit", async function(){ - let t2 = new Date(); - let t1 = new Date(Date.now()-100000); - let scene_id = await vfs.createScene("foo"); - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let $doc_id = (await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).id; - //Force ctime - await run(`UPDATE scenes SET ctime = $1`, [t1]); - await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [t2, $doc_id]); - let scenes = await vfs.getScenes(); - expect(scenes).to.have.property("length", 1); - expect(scenes[0].ctime.valueOf(), `ctime is ${scenes[0].ctime}, expected ${t1}`).to.equal(t1.valueOf()); - expect(scenes[0].mtime.valueOf(), `mtime is ${scenes[0].mtime}, expected ${t2}`).to.equal(t2.valueOf()); - }); + describe("", function(){ + let vfs :Vfs, database_uri:string; + //@ts-ignore + const run = async (sql: ISqlite.SqlType, ...params: any[])=> await vfs.db.run(sql, ...params); + //@ts-ignore + const get = async (sql: ISqlite.SqlType, ...params: any[])=> await vfs.db.get(sql, ...params); + //@ts-ignore + const all = async (sql: ISqlite.SqlType, ...params: any[])=> await vfs.db.all(sql, ...params); - it("orders by names, case-insensitive and ascending", async function(){ - await Promise.all([ - vfs.createScene("a1"), - vfs.createScene("aa"), - vfs.createScene("Ab"), - ]); - let scenes = await vfs.getScenes(null, {orderBy: "name"}); - let names = scenes.map(s=>s.name); - expect(names).to.deep.equal(["a1", "aa", "Ab"]); + this.beforeEach(async function(){ + database_uri = await getUniqueDb(); + vfs = await Vfs.Open(this.dir, {database_uri}); }); + this.afterEach(async function(){ + await vfs.close(); + await dropDb(database_uri); + }) - it("can return existing thumbnails", async function(){ - let s1 = await vfs.createScene("01"); - await vfs.writeDoc("{}", {scene: s1, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); - let s2 = await vfs.createScene("02"); - await vfs.writeDoc("{}", {scene: s2, user_id: null, name: "scene-image-thumb.png", mime: "image/jpeg"}); - - let s = await vfs.getScenes(0); - expect(s).to.have.property("length", 2); - expect(s[0]).to.have.property("thumb", "scene-image-thumb.png"); - expect(s[1]).to.have.property("thumb", "scene-image-thumb.jpg"); - }); + describe("createScene()", function(){ + it("insert a new scene", async function(){ + await expect(vfs.createScene("foo")).to.be.fulfilled; + }) + it("throws on duplicate name", async function(){ + await expect(vfs.createScene("foo")).to.be.fulfilled; + await expect(vfs.createScene("foo")).to.be.rejectedWith("exist"); + }); + + describe("uid handling", function(){ + let old :typeof Uid.make; + let returns :number[] = []; + this.beforeEach(function(){ + old = Uid.make; + returns = []; + Uid.make = ()=> { + let r = returns.pop(); + if (typeof r === "undefined") throw new Error("No mock result provided"); + return r; + }; + }); + this.afterEach(function(){ + Uid.make = old; + }); - it("returns the last-saved thumbnail", async function(){ - let s1 = await vfs.createScene("01"); - let times = [ - new Date("2022-01-01"), - new Date("2023-01-01"), - new Date("2024-01-01") - ]; - const setDate = (i:number, d:Date)=>vfs._db.run(`UPDATE files SET ctime = $2 WHERE file_id = $1`, [i, d]); - let png = await vfs.writeDoc("{}", {scene: s1, user_id: null, name: "scene-image-thumb.png", mime: "image/png"}); - let jpg = await vfs.writeDoc("{}", {scene: s1, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); - - let r = await setDate(jpg.id, times[1]); - await setDate(png.id, times[2]); - let s = await vfs.getScenes(0); - expect(s).to.have.length(1); - expect(s[0], `use PNG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.png"); - - await setDate(png.id, times[0]); - s = await vfs.getScenes(0); - expect(s[0], `use JPG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.jpg"); - - //If date is equal, prioritize jpg - await setDate(png.id, times[1]); - s = await vfs.getScenes(0); - expect(s[0], `With equal dates, alphanumeric order shopuld prioritize JPG over PNG file`).to.have.property("thumb", "scene-image-thumb.jpg"); - }); + it("fails if no free uid can be found", async function(){ + returns = [1, 1, 1, 1]; + await expect(vfs.createScene("bar")).to.be.fulfilled; + await expect(vfs.createScene("baz")).to.be.rejectedWith("Unable to find a free id"); + }); - it("can get archived scenes", async function(){ - let scene_id = await vfs.createScene("foo"); - await vfs.writeDoc(JSON.stringify({foo: "bar"}), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await vfs.archiveScene(scene_id); - let scenes = await vfs.getScenes(); - expect(scenes.map(({name})=>({name}))).to.deep.equal([{name: `foo#${scene_id}`}]); - }); - - - it("Can't get archived scenes without being authenticated", async function(){ - await vfs.createScene("bar"); - let scene_id = await vfs.createScene("foo"); - await vfs.writeDoc(JSON.stringify({foo: "bar"}), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await vfs.archiveScene(scene_id); - - //Two scenes total - expect(await vfs.getScenes()).to.have.length(2); - //Filter only scenes with access: none - await expect(vfs.getScenes(null, {archived: true})).to.be.rejectedWith(UnauthorizedError); - }); + it("retry", async function(){ + returns = [1, 1, 2]; + await expect(vfs.createScene("bar")).to.be.fulfilled; + await expect(vfs.createScene("baz")).to.be.fulfilled; + }); - it("can get an author's own archived scenes", async function(){ - let um :UserManager= new UserManager(vfs._db); - let user = await um.addUser("bob", "12345678", "create", "bob@example.com") - //Create a reference non-archived scene (shouldn't be shown) - await vfs.createScene("bar", user.uid); - //Create a scene owned by someone else - await vfs.createScene("baz"); - //Create our archived scene - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({foo: "bar"}), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await vfs.archiveScene(scene_id); - - //Three scenes total - expect(await vfs.getScenes()).to.have.length(3); - //Two "existing" scenes - expect(await vfs.getScenes(user.uid, {archived: false})).to.have.length(2); - - //Filter only scenes with access: none - let scenes = await vfs.getScenes(user.uid, {archived: true}); - expect(scenes.map(({name})=>({name}))).to.deep.equal([{name: `foo#${scene_id}`}]); - }) + it("prevents scene name containing uid", async function(){ + returns = [1, 2]; + let scene_id = await expect(vfs.createScene("bar#1")).to.be.fulfilled; + expect(scene_id).to.equal(2); + }); + }) - describe("with permissions", function(){ - let userManager :UserManager, user :User; - this.beforeEach(async function(){ - userManager = new UserManager(vfs._db); - user = await userManager.addUser("alice", "xxxxxxxx", "create"); + it("sets scene author", async function(){ + const userManager = new UserManager(vfs._db); + const user = await userManager.addUser("alice", "xxxxxxxx", "create"); + let id = await expect(vfs.createScene("foo", user.uid)).to.be.fulfilled; + try{ + let s = await vfs.getScene(id, user.uid); + expect(s).to.have.property("access").to.be.equal("admin"); + }catch(e){ + console.log("createScene :", e); + throw e; + } }); - it("can filter accessible scenes by user_id", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await userManager.setPublicAccess("foo", "none"); - await userManager.setDefaultAccess("foo", "none"); - await userManager.grant("foo", user.uid, "none"); - await run(`UPDATE scenes SET public_access = 0`); - await run(`INSERT INTO users_acl (fk_scene_id, fk_user_id, access_level) VALUES ($1, $2, 3)`, [scene_id, user.uid]); - expect((await vfs.getScenes(0)), `private scene shouldn't be returned to default user`).to.have.property("length", 0); - expect(await vfs.getScenes(user.uid), `private scene should be returned to its author`).to.have.property("length", 1); + it("sets custom scene permissions", async function(){ + const userManager = new UserManager(vfs._db); + const user = await userManager.addUser("alice", "xxxxxxxx", "create"); + let id = await expect(vfs.createScene("foo", user.uid)).to.be.fulfilled; + await userManager.grant(id, user.uid, "write"); + await userManager.setPublicAccess(id, "none"); + let s = await vfs.getScene(id, user.uid); + expect(s.access).to.deep.equal("write"); }); + }); - it("get proper author id and name", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc("{}", {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + describe("getScenes()", function(){ + it("get an empty list", async function(){ let scenes = await vfs.getScenes(); - expect(scenes).to.have.property("length", 1); - expect(scenes[0]).to.have.property("author", user.username); - expect(scenes[0]).to.have.property("author_id", user.uid); - }); - - it("get proper user own access", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await userManager.setDefaultAccess(scene_id, "write"); - await userManager.setPublicAccess(scene_id, "none"); - let scenes = await vfs.getScenes(user.uid); - expect(scenes).to.have.property("length", 1); - expect(scenes[0]).to.have.property("access").to.equal("admin"); - }); - it("get proper \"any\" access", async function(){ + expect(scenes).to.have.property("length", 0); + }) + + it("get a list of scenes", async function(){ let scene_id = await vfs.createScene("foo"); - await userManager.setDefaultAccess(scene_id, "write"); - await userManager.setPublicAccess(scene_id, "read"); - let scenes = await vfs.getScenes(user.uid); + let scenes = await vfs.getScenes(); expect(scenes).to.have.property("length", 1); - expect(scenes[0]).to.have.property("access").to.equal("write"); + let scene = scenes[0]; + + let props = sceneProps(scene_id); + let key:keyof Scene; + for(key in props){ + if(typeof props[key] ==="undefined"){ + expect(scene, `${(scene as any)[key]}`).not.to.have.property(key); + }else if(typeof props[key] === "function"){ + expect(scene, `scene.${key} should match expected class ${props[key].constructor.name}`).to.have.property(key).instanceof(props[key]); + }else{ + expect(scene, `scene.${key} should match expected value ${props[key]}`).to.have.property(key).to.deep.equal(props[key]); + } + } }); - - it("get proper group access", async function(){ + + it("get proper ctime and mtime from last document edit", async function(){ + let t2 = new Date(); + let t1 = new Date(Date.now()-100000); let scene_id = await vfs.createScene("foo"); - let group = await userManager.addGroup("My group"); - await userManager.addMemberToGroup(user.uid, group.groupUid); - await userManager.setDefaultAccess(scene_id, "read"); - await userManager.setPublicAccess(scene_id, "read"); - await userManager.grantGroup(scene_id, group.groupUid, "write") - let scenes = await vfs.getScenes(user.uid); - expect(scenes).to.have.property("length", 1); - expect(scenes[0]).to.have.property("access").to.equal("write"); - }); - - it("Do not show non-public scene when there is no requester", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await userManager.setDefaultAccess(scene_id, "write"); - await userManager.setPublicAccess(scene_id, "none"); + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let $doc_id = (await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).id; + //Force ctime + await run(`UPDATE scenes SET ctime = $1`, [t1]); + await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [t2, $doc_id]); let scenes = await vfs.getScenes(); - expect(scenes).to.have.property("length", 0); - }); - }); - - describe("search", async function(){ - let userManager :UserManager, user :User, sceneAdmin :User, admin :User; - this.beforeEach(async function(){ - userManager = new UserManager(vfs._db); - user = await userManager.addUser("bob", "xxxxxxxx", "create"); - sceneAdmin = await userManager.addUser("alice", "xxxxxxxx", "create"); - admin = await userManager.addUser("adele", "xxxxxxxx", "admin"); + expect(scenes).to.have.property("length", 1); + expect(scenes[0].ctime.valueOf(), `ctime is ${scenes[0].ctime}, expected ${t1}`).to.equal(t1.valueOf()); + expect(scenes[0].mtime.valueOf(), `mtime is ${scenes[0].mtime}, expected ${t2}`).to.equal(t2.valueOf()); }); - it("filters by access-level", async function(){ - await vfs.createScene("foo", sceneAdmin.uid); - await userManager.grant("foo", user.uid, "read"); - expect(await vfs.getScenes(user.uid, {})).to.have.property("length", 1); - expect(await vfs.getScenes(user.uid, {access:"admin"})).to.have.property("length", 0); + it("orders by names, case-insensitive and ascending", async function(){ + await Promise.all([ + vfs.createScene("a1"), + vfs.createScene("aa"), + vfs.createScene("Ab"), + ]); + let scenes = await vfs.getScenes(null, {orderBy: "name"}); + let names = scenes.map(s=>s.name); + expect(names).to.deep.equal(["a1", "aa", "Ab"]); }); - it("won't return inaccessible content", async function(){ - await vfs.createScene("foo", sceneAdmin.uid); - await userManager.setPublicAccess("foo", "none"); - await userManager.setDefaultAccess("foo", "none"); - expect(await vfs.getScenes(user.uid, {access:"none"})).to.have.property("length", 0); - }); + it("can return existing thumbnails", async function(){ + let s1 = await vfs.createScene("01"); + await vfs.writeDoc("{}", {scene: s1, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); + let s2 = await vfs.createScene("02"); + await vfs.writeDoc("{}", {scene: s2, user_id: null, name: "scene-image-thumb.png", mime: "image/jpeg"}); - it("will return everything to admin level user", async function(){ - await vfs.createScene("foo", sceneAdmin.uid); - await userManager.setPublicAccess("foo", "none"); - await userManager.setDefaultAccess("foo", "none"); - expect(await vfs.getScenes(admin.uid)).to.have.property("length", 1); - }); - - it("will return only scenes with specicfic rights to admin level user", async function(){ - await vfs.createScene("foo", sceneAdmin.uid); - await userManager.setPublicAccess("foo", "none"); - await userManager.setDefaultAccess("foo", "none"); - expect(await vfs.getScenes(admin.uid, {access:"read"})).to.have.property("length", 0); - expect(await vfs.getScenes(admin.uid, {access:"write"})).to.have.property("length", 0); - expect(await vfs.getScenes(admin.uid, {access:"admin"})).to.have.property("length", 0); - }); - - it("can select by specific user access level", async function(){ - await vfs.createScene("foo", sceneAdmin.uid); - await userManager.grant("foo", user.uid, "read"); - await userManager.setPublicAccess("foo", "read"); - await userManager.setDefaultAccess("foo", "read"); - expect(await vfs.getScenes(user.uid, {access:"read"})).to.have.property("length", 1); + let s = await vfs.getScenes(0); + expect(s).to.have.property("length", 2); + expect(s[0]).to.have.property("thumb", "scene-image-thumb.png"); + expect(s[1]).to.have.property("thumb", "scene-image-thumb.jpg"); }); - it("filters by author", async function(){ - await vfs.createScene("User Authored", user.uid); - await vfs.createScene("Scene Admin Authored", sceneAdmin.uid); - let s = await vfs.getScenes(user.uid, {author: user.username}); - expect(s, `Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + it("returns the last-saved thumbnail", async function(){ + let s1 = await vfs.createScene("01"); + let times = [ + new Date("2022-01-01"), + new Date("2023-01-01"), + new Date("2024-01-01") + ]; + const setDate = (i:number, d:Date)=>vfs._db.run(`UPDATE files SET ctime = $2 WHERE file_id = $1`, [i, d]); + let png = await vfs.writeDoc("{}", {scene: s1, user_id: null, name: "scene-image-thumb.png", mime: "image/png"}); + let jpg = await vfs.writeDoc("{}", {scene: s1, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); - it("filters by name match", async function(){ - let hello_scene_id = await vfs.createScene("Hello World", user.uid); - await vfs.writeDoc(JSON.stringify( { - metas: [{collection:{ - titles:{EN: "", FR: ""} - }}]} - ), {scene: hello_scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let goodbye_scene_id = await vfs.createScene("Goodbye World", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{collection:{ - titles:{EN: "", FR: ""} - }}]} - ), {scene: goodbye_scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}) - expect(s, `Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + let r = await setDate(jpg.id, times[1]); + await setDate(png.id, times[2]); + let s = await vfs.getScenes(0); + expect(s).to.have.length(1); + expect(s[0], `use PNG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.png"); - it("can match a document's meta title", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{collection:{ - titles:{EN: "Hello World", FR: "Bonjour, monde"} - }}] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + await setDate(png.id, times[0]); + s = await vfs.getScenes(0); + expect(s[0], `use JPG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.jpg"); - it("can match a document's intros", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{collection:{ - intros:{EN: "Hello World", FR: "Bonjour, monde"} - }}] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + //If date is equal, prioritize jpg + await setDate(png.id, times[1]); + s = await vfs.getScenes(0); + expect(s[0], `With equal dates, alphanumeric order shopuld prioritize JPG over PNG file`).to.have.property("thumb", "scene-image-thumb.jpg"); }); - it("can match a document's copyright", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{collection:{ - titles:{EN: " ", FR: " "} - }}], - asset: {copyright: "Hello World"} - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + it("can get archived scenes", async function(){ + let scene_id = await vfs.createScene("foo"); + await vfs.writeDoc(JSON.stringify({foo: "bar"}), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await vfs.archiveScene(scene_id); + let scenes = await vfs.getScenes(); + expect(scenes.map(({name})=>({name}))).to.deep.equal([{name: `foo#${scene_id}`}]); }); + - it("can match a document's article title", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{ - articles:[ - {titles:{EN: "Hello"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + it("Can't get archived scenes without being authenticated", async function(){ + await vfs.createScene("bar"); + let scene_id = await vfs.createScene("foo"); + await vfs.writeDoc(JSON.stringify({foo: "bar"}), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await vfs.archiveScene(scene_id); - it("can match a document's annotation title", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - models: [{ - annotations:[ - {titles:{EN: "Hello"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + //Two scenes total + expect(await vfs.getScenes()).to.have.length(2); + //Filter only scenes with access: none + await expect(vfs.getScenes(null, {archived: true})).to.be.rejectedWith(UnauthorizedError); }); - it("can match a document's tour title", async function(){ + it("can get an author's own archived scenes", async function(){ + let um :UserManager= new UserManager(vfs._db); + let user = await um.addUser("bob", "12345678", "create", "bob@example.com") + //Create a reference non-archived scene (shouldn't be shown) + await vfs.createScene("bar", user.uid); + //Create a scene owned by someone else + await vfs.createScene("baz"); + //Create our archived scene let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - setups: [{ - tours:[ - {titles:{EN: "Hello"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + await vfs.writeDoc(JSON.stringify({foo: "bar"}), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await vfs.archiveScene(scene_id); - it("can match a document's article lead", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{ - articles:[ - {leads:{EN: "Hello"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + //Three scenes total + expect(await vfs.getScenes()).to.have.length(3); + //Two "existing" scenes + expect(await vfs.getScenes(user.uid, {archived: false})).to.have.length(2); - it("can match a document's annotation leads", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - models: [{ - annotations:[ - {leads:{EN: "Hello"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + //Filter only scenes with access: none + let scenes = await vfs.getScenes(user.uid, {archived: true}); + expect(scenes.map(({name})=>({name}))).to.deep.equal([{name: `foo#${scene_id}`}]); + }) - it("can match a document's tour leads", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc(JSON.stringify({ - setups: [{ - tours:[ - {leads:{EN: "Hello"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + describe("with permissions", function(){ + let userManager :UserManager, user :User; + this.beforeEach(async function(){ + userManager = new UserManager(vfs._db); + user = await userManager.addUser("alice", "xxxxxxxx", "create"); + }); + it("can filter accessible scenes by user_id", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await userManager.setPublicAccess("foo", "none"); + await userManager.setDefaultAccess("foo", "none"); + await userManager.grant("foo", user.uid, "none"); + await run(`UPDATE scenes SET public_access = 0`); + await run(`INSERT INTO users_acl (fk_scene_id, fk_user_id, access_level) VALUES ($1, $2, 3)`, [scene_id, user.uid]); + expect((await vfs.getScenes(0)), `private scene shouldn't be returned to default user`).to.have.property("length", 0); + expect(await vfs.getScenes(user.uid), `private scene should be returned to its author`).to.have.property("length", 1); + }); - it("can match a document's article text", async function(){ - let scene_id = await vfs.createScene("foo", user.uid); - await vfs.writeDoc("Hello\n", {scene: scene_id, mime: "text/html", name: "articles/foo.html", user_id: user.uid}); - await vfs.writeDoc(JSON.stringify({ - metas: [{ - articles:[{ - titles: {EN: " ", FR: ""}, - uris: {EN: "articles/foo.html", FR: "articles/foo.html"} - }] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "Hello"}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + it("get proper author id and name", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc("{}", {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let scenes = await vfs.getScenes(); + expect(scenes).to.have.property("length", 1); + expect(scenes[0]).to.have.property("author", user.username); + expect(scenes[0]).to.have.property("author_id", user.uid); + }); + + it("get proper user own access", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await userManager.setDefaultAccess(scene_id, "write"); + await userManager.setPublicAccess(scene_id, "none"); + let scenes = await vfs.getScenes(user.uid); + expect(scenes).to.have.property("length", 1); + expect(scenes[0]).to.have.property("access").to.equal("admin"); + }); + it("get proper \"any\" access", async function(){ + let scene_id = await vfs.createScene("foo"); + await userManager.setDefaultAccess(scene_id, "write"); + await userManager.setPublicAccess(scene_id, "read"); + let scenes = await vfs.getScenes(user.uid); + expect(scenes).to.have.property("length", 1); + expect(scenes[0]).to.have.property("access").to.equal("write"); + }); + + it("get proper group access", async function(){ + let scene_id = await vfs.createScene("foo"); + let group = await userManager.addGroup("My group"); + await userManager.addMemberToGroup(user.uid, group.groupUid); + await userManager.setDefaultAccess(scene_id, "read"); + await userManager.setPublicAccess(scene_id, "read"); + await userManager.grantGroup(scene_id, group.groupUid, "write") + let scenes = await vfs.getScenes(user.uid); + expect(scenes).to.have.property("length", 1); + expect(scenes[0]).to.have.property("access").to.equal("write"); + }); + + it("Do not show non-public scene when there is no requester", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await userManager.setDefaultAccess(scene_id, "write"); + await userManager.setPublicAccess(scene_id, "none"); + let scenes = await vfs.getScenes(); + expect(scenes).to.have.property("length", 0); + }); }); + describe("search", async function(){ + let userManager :UserManager, user :User, sceneAdmin :User, admin :User; + this.beforeEach(async function(){ + userManager = new UserManager(vfs._db); + user = await userManager.addUser("bob", "xxxxxxxx", "create"); + sceneAdmin = await userManager.addUser("alice", "xxxxxxxx", "create"); + admin = await userManager.addUser("adele", "xxxxxxxx", "admin"); + }); - it("is case-insensitive", async function(){ - const scene = await vfs.createScene("Hello World", user.uid); - await vfs.writeDoc(JSON.stringify( { - metas: [{collection:{ - titles:{EN: "", FR: ""} - }}]} - ), {scene: scene, user_id: sceneAdmin.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let s = await vfs.getScenes(user.uid, {match: "hello"}) - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); + it("filters by access-level", async function(){ + await vfs.createScene("foo", sceneAdmin.uid); + await userManager.grant("foo", user.uid, "read"); + expect(await vfs.getScenes(user.uid, {})).to.have.property("length", 1); + expect(await vfs.getScenes(user.uid, {access:"admin"})).to.have.property("length", 0); + }); + it("won't return inaccessible content", async function(){ + await vfs.createScene("foo", sceneAdmin.uid); + await userManager.setPublicAccess("foo", "none"); + await userManager.setDefaultAccess("foo", "none"); + expect(await vfs.getScenes(user.uid, {access:"none"})).to.have.property("length", 0); + }); - it("can search against multiple search terms", async function(){ - let scene_id = await vfs.createScene("bar", user.uid); - await vfs.writeDoc(JSON.stringify({ - metas: [{ - articles:[ - {leads:{EN: "Hello World, this is User"}} - ] - }] - }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - - scene_id = await vfs.createScene("foo 1", sceneAdmin.uid); - await vfs.writeDoc(JSON.stringify( { - metas: [{collection:{ - titles:{EN: "", FR: ""} - }}]} - ), {scene: scene_id, user_id: sceneAdmin.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - - scene_id = await vfs.createScene("foo 2", user.uid); - await vfs.writeDoc(JSON.stringify( { - metas: [{collection:{ - titles:{EN: "fizz", FR: ""} - }}]} - ), {scene: scene_id, user_id: sceneAdmin.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + it("will return everything to admin level user", async function(){ + await vfs.createScene("foo", sceneAdmin.uid); + await userManager.setPublicAccess("foo", "none"); + await userManager.setDefaultAccess("foo", "none"); + expect(await vfs.getScenes(admin.uid)).to.have.property("length", 1); + }); - let s = await vfs.getScenes(user.uid, {match: `foo fizz`}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - expect(s[0]).to.have.property("name", "foo 2"); + it("will return only scenes with specicfic rights to admin level user", async function(){ + await vfs.createScene("foo", sceneAdmin.uid); + await userManager.setPublicAccess("foo", "none"); + await userManager.setDefaultAccess("foo", "none"); + expect(await vfs.getScenes(admin.uid, {access:"read"})).to.have.property("length", 0); + expect(await vfs.getScenes(admin.uid, {access:"write"})).to.have.property("length", 0); + expect(await vfs.getScenes(admin.uid, {access:"admin"})).to.have.property("length", 0); + }); - s = await vfs.getScenes(user.uid, {match: `foo OR fizz`}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 2); - expect(s[0]).to.have.property("name", "foo 2"); - expect(s[1]).to.have.property("name", "foo 1"); - + it("can select by specific user access level", async function(){ + await vfs.createScene("foo", sceneAdmin.uid); + await userManager.grant("foo", user.uid, "read"); + await userManager.setPublicAccess("foo", "read"); + await userManager.setDefaultAccess("foo", "read"); + expect(await vfs.getScenes(user.uid, {access:"read"})).to.have.property("length", 1); + }); - s = await vfs.getScenes(user.uid, {match: `Hello User`}); - expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - expect(s[0]).to.have.property("name", "bar"); - }); + it("filters by author", async function(){ + await vfs.createScene("User Authored", user.uid); + await vfs.createScene("Scene Admin Authored", sceneAdmin.uid); + let s = await vfs.getScenes(user.uid, {author: user.username}); + expect(s, `Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); + it("filters by name match", async function(){ + let hello_scene_id = await vfs.createScene("Hello World", user.uid); + await vfs.writeDoc(JSON.stringify( { + metas: [{collection:{ + titles:{EN: "", FR: ""} + }}]} + ), {scene: hello_scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let goodbye_scene_id = await vfs.createScene("Goodbye World", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{collection:{ + titles:{EN: "", FR: ""} + }}]} + ), {scene: goodbye_scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}) + expect(s, `Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("can match an empty string", async function(){ - await vfs.createScene("Hello World", user.uid); - await vfs.createScene("Goodbye World", user.uid); - let s = await vfs.getScenes(user.uid, {match: ""}) - expect(s, `Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 2); - }); + it("can match a document's meta title", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{collection:{ + titles:{EN: "Hello World", FR: "Bonjour, monde"} + }}] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); + it("can match a document's intros", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{collection:{ + intros:{EN: "Hello World", FR: "Bonjour, monde"} + }}] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - it("can match partial words in titles of scenes, including the ones without json", async function(){ - await vfs.createScene("EAD.A.Nom1.Nom2", user.uid); - await vfs.createScene("GlobeAppli", user.uid); - let s = await vfs.getScenes(user.uid, {match: "lobe"}); - expect(s, `Globe Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - s = await vfs.getScenes(user.uid, {match: "EAD"}); - expect(s, `EAD Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); - }); - }); - - describe("ordering", function(){ - it("rejects bad orderBy key", async function(){ - await expect(vfs.getScenes(0, {orderBy: "bad" as any})).to.be.rejectedWith("Invalid orderBy: bad"); - }) - it("rejects bad orderDirection key", async function(){ - await expect(vfs.getScenes(0, {orderDirection: "bad" as any})).to.be.rejectedWith("Invalid orderDirection: bad"); - }); - it("can order by name descending", async function(){ - for(let i = 0; i < 10; i++){ - await vfs.createScene(`${i}_scene`); - } - const scenes = await vfs.getScenes(0, {orderBy: "name", orderDirection: "desc"}); - expect(scenes.map(s=>s.name)).to.deep.equal([9,8,7,6,5,4,3,2,1,0].map(n=>n+"_scene")); - }); - }); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - describe("pagination", function(){ - it("rejects bad LIMIT", async function(){ - let fixtures = [-1, "10", null]; - for(let f of fixtures){ - await expect(vfs.getScenes(0, {limit: f as any})).to.be.rejectedWith(BadRequestError); - } - }); + it("can match a document's copyright", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{collection:{ + titles:{EN: " ", FR: " "} + }}], + asset: {copyright: "Hello World"} + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("rejects bad OFFSET", async function(){ - let fixtures = [-1, "10", null]; - for(let f of fixtures){ - await expect(vfs.getScenes(0, {limit: f as any})).to.be.rejectedWith(BadRequestError); - } - }); + it("can match a document's article title", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{ + articles:[ + {titles:{EN: "Hello"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("respects pagination options", async function(){ - for(let i = 0; i < 10; i++){ - await vfs.createScene(`scene_${i}`); - } - let res = await vfs.getScenes(0, {limit: 1, offset: 0}) - expect(res).to.have.property("length", 1); - expect(res[0]).to.have.property("name", "scene_9"); - - res = await vfs.getScenes(0, {limit: 2, offset: 2}) - expect(res).to.have.property("length", 2); - expect(res[0]).to.have.property("name", "scene_7"); - expect(res[1]).to.have.property("name", "scene_6"); - }); + it("can match a document's annotation title", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + models: [{ + annotations:[ + {titles:{EN: "Hello"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("limits LIMIT to 100", async function(){ - await expect(vfs.getScenes(0, {limit: 110, offset: 0})).to.be.rejectedWith("[400]"); - }); - }); - }); + it("can match a document's tour title", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + setups: [{ + tours:[ + {titles:{EN: "Hello"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - describe("createFolder(), removeFolder(), listFolders()", function(){ - let scene_id :number; - this.beforeEach(async function(){ - scene_id = await vfs.createScene("foo"); - }) + it("can match a document's article lead", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{ + articles:[ + {leads:{EN: "Hello"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("create a folder in a scene", async function(){ - await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); - await vfs.writeDoc("foo", {scene: scene_id, name: "videos/foo.txt", user_id: null}); - let folders = await collapseAsync(vfs.listFolders(scene_id)); - //order is by mtime descending, name ascending so we can't rely on it - expect(folders.map(f=>f.name)).to.have.members(["articles", "models", "videos"]); - expect(folders).to.have.length(3); - }); + it("can match a document's annotation leads", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + models: [{ + annotations:[ + {leads:{EN: "Hello"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("create a tree of folders", async function(){ - await vfs.createFolder({scene:scene_id, name: "articles/videos", user_id: null}); - let folders = await collapseAsync(vfs.listFolders(scene_id)); - expect(folders.map(f=>f.name)).to.deep.equal(["articles/videos", "articles" , "models"]); - }); + it("can match a document's tour leads", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc(JSON.stringify({ + setups: [{ + tours:[ + {leads:{EN: "Hello"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("don't accept a trailing slash", async function(){ - await expect(vfs.createFolder({scene:scene_id, name: "videos/", user_id: null})).to.be.rejectedWith(BadRequestError); - }); - it("don't accept absolute paths", async function(){ - await expect(vfs.createFolder({scene:scene_id, name: "/videos", user_id: null})).to.be.rejectedWith(BadRequestError); - }); + it("can match a document's article text", async function(){ + let scene_id = await vfs.createScene("foo", user.uid); + await vfs.writeDoc("Hello\n", {scene: scene_id, mime: "text/html", name: "articles/foo.html", user_id: user.uid}); + await vfs.writeDoc(JSON.stringify({ + metas: [{ + articles:[{ + titles: {EN: " ", FR: ""}, + uris: {EN: "articles/foo.html", FR: "articles/foo.html"} + }] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "Hello"}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("throws an error if folder exists", async function(){ - await vfs.createFolder({scene: scene_id, name: "videos", user_id: null}); - await expect( vfs.createFolder({scene: scene_id, name: "videos", user_id: null}) ).to.be.rejectedWith(ConflictError); - }); - it("throws an error if folder doesn't exist", async function(){ - await expect(vfs.removeFolder({scene: scene_id, name: "videos", user_id: null})).to.be.rejectedWith(NotFoundError); - }); + it("is case-insensitive", async function(){ + const scene = await vfs.createScene("Hello World", user.uid); + await vfs.writeDoc(JSON.stringify( { + metas: [{collection:{ + titles:{EN: "", FR: ""} + }}]} + ), {scene: scene, user_id: sceneAdmin.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let s = await vfs.getScenes(user.uid, {match: "hello"}) + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); - it("remove a scene's folder", async function(){ - await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); - await vfs.removeFolder({scene: scene_id, name: "videos", user_id: null}); - let folders = await collapseAsync(vfs.listFolders(scene_id)); - expect(folders.map(f=>f.name)).to.deep.equal(["articles", "models"]); - await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); - folders = await collapseAsync(vfs.listFolders(scene_id)); - expect(folders.map(f=>f.name).sort()).to.deep.equal(["videos", "models", "articles"].sort()); - }); - it("removeFolder() removes all files in the folder", async function(){ - let userManager = new UserManager(vfs._db); - let user = await userManager.addUser("alice", "xxxxxxxx", "create"); - await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); - await vfs.writeFile(dataStream(), {scene: scene_id, name: "videos/foo.mp4", mime:"video/mp4", user_id: null}); + it("can search against multiple search terms", async function(){ + let scene_id = await vfs.createScene("bar", user.uid); + await vfs.writeDoc(JSON.stringify({ + metas: [{ + articles:[ + {leads:{EN: "Hello World, this is User"}} + ] + }] + }), {scene: scene_id, user_id: user.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + + scene_id = await vfs.createScene("foo 1", sceneAdmin.uid); + await vfs.writeDoc(JSON.stringify( { + metas: [{collection:{ + titles:{EN: "", FR: ""} + }}]} + ), {scene: scene_id, user_id: sceneAdmin.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + + scene_id = await vfs.createScene("foo 2", user.uid); + await vfs.writeDoc(JSON.stringify( { + metas: [{collection:{ + titles:{EN: "fizz", FR: ""} + }}]} + ), {scene: scene_id, user_id: sceneAdmin.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + + let s = await vfs.getScenes(user.uid, {match: `foo fizz`}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + expect(s[0]).to.have.property("name", "foo 2"); + + s = await vfs.getScenes(user.uid, {match: `foo OR fizz`}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 2); + expect(s[0]).to.have.property("name", "foo 2"); + expect(s[1]).to.have.property("name", "foo 1"); + + + s = await vfs.getScenes(user.uid, {match: `Hello User`}); + expect(s, `[${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + expect(s[0]).to.have.property("name", "bar"); + }); - await vfs.removeFolder({scene: scene_id, name: "videos", user_id: user.uid }); - let files = await collapseAsync(vfs.listFiles(scene_id)); - expect(files).to.deep.equal([]); - }); - }); + it("can match an empty string", async function(){ + await vfs.createScene("Hello World", user.uid); + await vfs.createScene("Goodbye World", user.uid); + let s = await vfs.getScenes(user.uid, {match: ""}) + expect(s, `Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 2); + }); - describe("tags", function(){ - let scene_id :number; - //Create a dummy scene for future tests - this.beforeEach(async function(){ - scene_id = await vfs.createScene("foo"); - }); - describe("addSceneTag() / removeSceneTag()", function(){ - it("adds a tag to a scene", async function(){ - await vfs.addTag(scene_id, "foo"); - let s = await vfs.getScene(scene_id); - expect(s).to.have.property("tags").to.deep.equal(["foo"]); - await vfs.addTag(scene_id, "bar"); - s = await vfs.getScene(scene_id); - //Ordering is loosely expected to hold: we do not enforce AUTOINCREMENT on rowids but it's generally true - expect(s).to.have.property("tags").to.deep.equal(["foo", "bar"]); + it("can match partial words in titles of scenes, including the ones without json", async function(){ + await vfs.createScene("EAD.A.Nom1.Nom2", user.uid); + await vfs.createScene("GlobeAppli", user.uid); + let s = await vfs.getScenes(user.uid, {match: "lobe"}); + expect(s, `Globe Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + s = await vfs.getScenes(user.uid, {match: "EAD"}); + expect(s, `EAD Matched Scenes: [${s.map(s=>s.name).join(", ")}]`).to.have.property("length", 1); + }); }); - - it("can remove tag", async function(){ - await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(true); - await expect(vfs.addTag(scene_id, "bar")).to.eventually.equal(true); - await expect(vfs.removeTag(scene_id, "foo")).to.eventually.equal(true); - - let s = await vfs.getScene(scene_id); - expect(s).to.have.property("tags").to.deep.equal(["bar"]); + + describe("ordering", function(){ + it("rejects bad orderBy key", async function(){ + await expect(vfs.getScenes(0, {orderBy: "bad" as any})).to.be.rejectedWith("Invalid orderBy: bad"); + }) + it("rejects bad orderDirection key", async function(){ + await expect(vfs.getScenes(0, {orderDirection: "bad" as any})).to.be.rejectedWith("Invalid orderDirection: bad"); + }); + it("can order by name descending", async function(){ + for(let i = 0; i < 10; i++){ + await vfs.createScene(`${i}_scene`); + } + const scenes = await vfs.getScenes(0, {orderBy: "name", orderDirection: "desc"}); + expect(scenes.map(s=>s.name)).to.deep.equal([9,8,7,6,5,4,3,2,1,0].map(n=>n+"_scene")); + }); }); - it("can be called with scene name", async function(){ - await expect(vfs.addTag("foo", "foo")).to.eventually.equal(true); - let s = await vfs.getScene(scene_id); - expect(s).to.have.property("tags").to.deep.equal(["foo"]); + describe("pagination", function(){ + it("rejects bad LIMIT", async function(){ + let fixtures = [-1, "10", null]; + for(let f of fixtures){ + await expect(vfs.getScenes(0, {limit: f as any})).to.be.rejectedWith(BadRequestError); + } + }); - await expect(vfs.removeTag("foo", "foo")).to.eventually.equal(true); - }); + it("rejects bad OFFSET", async function(){ + let fixtures = [-1, "10", null]; + for(let f of fixtures){ + await expect(vfs.getScenes(0, {limit: f as any})).to.be.rejectedWith(BadRequestError); + } + }); - it("throws a 404 errors if scene doesn't exist", async function(){ - // by id - await expect(vfs.addTag(scene_id+1, "foo")).to.be.rejectedWith(NotFoundError); - // by name - await expect(vfs.addTag("baz", "foo")).to.be.rejectedWith(NotFoundError); + it("respects pagination options", async function(){ + for(let i = 0; i < 10; i++){ + await vfs.createScene(`scene_${i}`); + } + let res = await vfs.getScenes(0, {limit: 1, offset: 0}) + expect(res).to.have.property("length", 1); + expect(res[0]).to.have.property("name", "scene_9"); + + res = await vfs.getScenes(0, {limit: 2, offset: 2}) + expect(res).to.have.property("length", 2); + expect(res[0]).to.have.property("name", "scene_7"); + expect(res[1]).to.have.property("name", "scene_6"); + }); + it("limits LIMIT to 100", async function(){ + await expect(vfs.getScenes(0, {limit: 110, offset: 0})).to.be.rejectedWith("[400]"); + }); }); + }); + describe("createFolder(), removeFolder(), listFolders()", function(){ + let scene_id :number; + this.beforeEach(async function(){ + scene_id = await vfs.createScene("foo"); + }) - it("returns false if nothing was changed", async function(){ - await vfs.addTag(scene_id, "foo"); - //When tag is added twice, by scene_id - await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(false); - //When tag is added twice, by name - await expect(vfs.addTag("foo", "foo")).to.eventually.equal(false); - - //When tag doesn't exist - await expect(vfs.removeTag(scene_id, "bar")).to.be.eventually.equal(false); - //When scene doesn't exist - await expect(vfs.removeTag(scene_id+1, "foo")).to.eventually.equal(false); - }); - it("store case and accents", async function(){ - await expect(vfs.addTag(scene_id, "Électricité")).to.eventually.equal(true); - expect(await vfs.getTags()).to.deep.equal([{name: "Électricité", size: 1}]); + it("create a folder in a scene", async function(){ + await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); + await vfs.writeDoc("foo", {scene: scene_id, name: "videos/foo.txt", user_id: null}); + let folders = await collapseAsync(vfs.listFolders(scene_id)); + //order is by mtime descending, name ascending so we can't rely on it + expect(folders.map(f=>f.name)).to.have.members(["articles", "models", "videos"]); + expect(folders).to.have.length(3); }); - it("collate case and accents (same scene)", async function(){ - await expect(vfs.addTag(scene_id, "Électricité")).to.eventually.equal(true); - await expect(vfs.addTag(scene_id, "électricité")).to.eventually.equal(false); - await expect(vfs.addTag(scene_id, "electricite")).to.eventually.equal(false); - - expect(await vfs.getTags()).to.deep.equal([{name: "Électricité", size: 1}]); + it("create a tree of folders", async function(){ + await vfs.createFolder({scene:scene_id, name: "articles/videos", user_id: null}); + let folders = await collapseAsync(vfs.listFolders(scene_id)); + expect(folders.map(f=>f.name)).to.deep.equal(["articles/videos", "articles" , "models"]); }); - it("collate case and accents (multiple scenes)", async function(){ - let s2 = await vfs.createScene("tags-collate-s2"); - let s3 = await vfs.createScene("tags-collate-s3"); - await expect(vfs.addTag(scene_id, "Électricité")).to.eventually.equal(true); - await expect(vfs.addTag(s2, "électricité")).to.eventually.equal(true); - await expect(vfs.addTag(s3, "electricite")).to.eventually.equal(true); - - expect(await vfs.getTags()).to.deep.equal([{name: "Électricité", size: 3}]); - }) - }); - - - describe("getTags()", function(){ - it("get all tags", async function(){ - //Create a bunch of additional test scenes - for(let i=0; i < 3; i++){ - let id = await vfs.createScene(`test_${i}`); - for(let j=0; j <= i; j++ ){ - await vfs.addTag(id, `tag_${j}`); - } - } - expect(await vfs.getTags()).to.deep.equal([ - {name: "tag_0", size: 3}, - {name: "tag_1", size: 2}, - {name: "tag_2", size: 1}, - ]); + it("don't accept a trailing slash", async function(){ + await expect(vfs.createFolder({scene:scene_id, name: "videos/", user_id: null})).to.be.rejectedWith(BadRequestError); }); - it("get tags matching a string", async function(){ - await vfs.addTag(scene_id, `tag_foo`); - await vfs.addTag(scene_id, `foo_tag`); - await vfs.addTag(scene_id, `tag_bar`); - - expect(await vfs.getTags({like: "foo"})).to.deep.equal([ - {name:"foo_tag", size: 1}, - {name: "tag_foo", size: 1}, - ]); - - //Match should be case-insensitive - expect(await vfs.getTags({like: "Foo"})).to.deep.equal([ - {name:"foo_tag", size: 1}, - {name: "tag_foo", size: 1}, - ]); + it("don't accept absolute paths", async function(){ + await expect(vfs.createFolder({scene:scene_id, name: "/videos", user_id: null})).to.be.rejectedWith(BadRequestError); }); - it("supports pagination", async function(){ - //Create a bunch of additional test scenes - for(let i=0; i < 3; i++){ - let id = await vfs.createScene(`test_${i}`); - for(let j=0; j <= i; j++ ){ - await vfs.addTag(id, `tag_${j}`); - } - } - expect(await vfs.getTags({limit: 1})).to.deep.equal([ - {name: "tag_0", size: 3}, - ]); - expect(await vfs.getTags({limit: 2, offset: 1})).to.deep.equal([ - {name: "tag_1", size: 2}, - {name: "tag_2", size: 1}, - ]); - expect(await vfs.getTags({offset: 3})).to.deep.equal([ ]); + it("throws an error if folder exists", async function(){ + await vfs.createFolder({scene: scene_id, name: "videos", user_id: null}); + await expect( vfs.createFolder({scene: scene_id, name: "videos", user_id: null}) ).to.be.rejectedWith(ConflictError); }); - it("don't count archived scenes", async function(){ - await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(true); - await vfs.archiveScene(scene_id); - expect(await vfs.getTags()).to.deep.equal([]); + it("throws an error if folder doesn't exist", async function(){ + await expect(vfs.removeFolder({scene: scene_id, name: "videos", user_id: null})).to.be.rejectedWith(NotFoundError); }); - }); - describe("getTag()", function(){ - it("Get all scenes attached to a tag", async function(){ - let ids = []; - for(let i=0; i < 3; i++){ - let id = await vfs.createScene(`test_${i}`); - ids.push(id); - await vfs.addTag(id, `tag_foo`); - } - for(let i=3; i < 6; i++){ - let id = await vfs.createScene(`test_${i}`); - await vfs.addTag(id, `tag_bar`); - } - let scenes = await vfs.getTag("tag_foo"); - expect(scenes).to.deep.equal(ids); + it("remove a scene's folder", async function(){ + await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); + await vfs.removeFolder({scene: scene_id, name: "videos", user_id: null}); + let folders = await collapseAsync(vfs.listFolders(scene_id)); + expect(folders.map(f=>f.name)).to.deep.equal(["articles", "models"]); + await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); + folders = await collapseAsync(vfs.listFolders(scene_id)); + expect(folders.map(f=>f.name).sort()).to.deep.equal(["videos", "models", "articles"].sort()); }); - it("Ignore archived scenes", async function(){ - await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(true); + it("removeFolder() removes all files in the folder", async function(){ + let userManager = new UserManager(vfs._db); + let user = await userManager.addUser("alice", "xxxxxxxx", "create"); + await vfs.createFolder({scene:scene_id, name: "videos", user_id: null}); + await vfs.writeFile(dataStream(), {scene: scene_id, name: "videos/foo.mp4", mime:"video/mp4", user_id: null}); - let s2 = await vfs.createScene(`test_scene_2`); - await expect(vfs.addTag(s2, "foo")).to.eventually.equal(true); - expect(await vfs.getTag("foo")).to.deep.equal([scene_id, s2]); + await vfs.removeFolder({scene: scene_id, name: "videos", user_id: user.uid }); - await vfs.archiveScene(s2); - expect(await vfs.getTag("foo")).to.deep.equal([scene_id]); + let files = await collapseAsync(vfs.listFiles(scene_id)); + expect(files).to.deep.equal([]); }); + }); + describe("tags", function(){ + let scene_id :number; + //Create a dummy scene for future tests + this.beforeEach(async function(){ + scene_id = await vfs.createScene("foo"); + }); + + describe("addSceneTag() / removeSceneTag()", function(){ + it("adds a tag to a scene", async function(){ + await vfs.addTag(scene_id, "foo"); + let s = await vfs.getScene(scene_id); + expect(s).to.have.property("tags").to.deep.equal(["foo"]); + await vfs.addTag(scene_id, "bar"); + s = await vfs.getScene(scene_id); + //Ordering is loosely expected to hold: we do not enforce AUTOINCREMENT on rowids but it's generally true + expect(s).to.have.property("tags").to.deep.equal(["foo", "bar"]); + }); - describe("respects permissions", function(){ - let userManager :UserManager, alice :User, bob :User; - this.beforeEach(async function(){ - userManager = new UserManager(vfs._db); - alice = await userManager.addUser("alice", "12345678", "admin"); - bob = await userManager.addUser("bob", "12345678", "create"); + it("can remove tag", async function(){ + await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(true); + await expect(vfs.addTag(scene_id, "bar")).to.eventually.equal(true); + await expect(vfs.removeTag(scene_id, "foo")).to.eventually.equal(true); + + let s = await vfs.getScene(scene_id); + expect(s).to.have.property("tags").to.deep.equal(["bar"]); }); - it("return scenes with public read access", async function(){ - await vfs.addTag("foo", "foo"); - expect(await vfs.getTag("foo", alice.uid), "with admin user_id").to.deep.equal([scene_id]); + it("can be called with scene name", async function(){ + await expect(vfs.addTag("foo", "foo")).to.eventually.equal(true); + let s = await vfs.getScene(scene_id); + expect(s).to.have.property("tags").to.deep.equal(["foo"]); - expect(await vfs.getTag("foo", bob.uid), "with normal user id").to.deep.equal([scene_id]); + await expect(vfs.removeTag("foo", "foo")).to.eventually.equal(true); }); - it("return all scenes for admins", async function(){ - //Scene if from bob, alice has no special rights over it, but should see it anyways - const id = await vfs.createScene("bob-private", bob.uid); - await userManager.setPublicAccess("bob-private", "none"); - await userManager.setDefaultAccess("bob-private", "none"); - await vfs.addTag("bob-private", "foo"); - expect(await vfs.getTag("foo"), "without user id").to.deep.equal([]); - expect(await vfs.getTag("foo", alice.uid), "with admin id").to.deep.equal([id]); - }) + it("throws a 404 errors if scene doesn't exist", async function(){ + // by id + await expect(vfs.addTag(scene_id+1, "foo")).to.be.rejectedWith(NotFoundError); + // by name + await expect(vfs.addTag("baz", "foo")).to.be.rejectedWith(NotFoundError); + + }); - it("won't return non-readable scene", async function(){ - const id = await vfs.createScene("admin-only", alice.uid); - await userManager.setPublicAccess("admin-only", "none"); - await userManager.setDefaultAccess("admin-only", "none"); - await vfs.addTag("admin-only", "foo"); - expect(await vfs.getTag("foo"), "without user_id").to.deep.equal([]); - expect(await vfs.getTag("foo", alice.uid), "with admin user_id").to.deep.equal([id]); + it("returns false if nothing was changed", async function(){ + await vfs.addTag(scene_id, "foo"); + //When tag is added twice, by scene_id + await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(false); + //When tag is added twice, by name + await expect(vfs.addTag("foo", "foo")).to.eventually.equal(false); - expect(await vfs.getTag("foo", bob.uid)).to.deep.equal([]); + //When tag doesn't exist + await expect(vfs.removeTag(scene_id, "bar")).to.be.eventually.equal(false); + //When scene doesn't exist + await expect(vfs.removeTag(scene_id+1, "foo")).to.eventually.equal(false); + }); + it("store case and accents", async function(){ + await expect(vfs.addTag(scene_id, "Électricité")).to.eventually.equal(true); + expect(await vfs.getTags()).to.deep.equal([{name: "Électricité", size: 1}]); }); - }) - }); - }); - describe("", function(){ - let scene_id :number; - //Create a dummy scene for future tests - this.beforeEach(async function(){ - scene_id = await vfs.createScene("foo"); - }); - - describe("renameScene()", function(){ - it("can change a scene name", async function(){ - await expect(vfs.renameScene(scene_id, "bar")).to.be.fulfilled; - }); - it("throw a 404 error", async function(){ - await expect(vfs.renameScene(404, "bar")).to.be.rejectedWith("404"); - }); - }) + it("collate case and accents (same scene)", async function(){ + await expect(vfs.addTag(scene_id, "Électricité")).to.eventually.equal(true); + await expect(vfs.addTag(scene_id, "électricité")).to.eventually.equal(false); + await expect(vfs.addTag(scene_id, "electricite")).to.eventually.equal(false); - describe("archiveScene()", function(){ - it("makes scene hidden", async function(){ - await vfs.archiveScene("foo"); - expect(await vfs.getScenes(0, {archived: false})).to.have.property("length", 0); - }); + expect(await vfs.getTags()).to.deep.equal([{name: "Électricité", size: 1}]); + }); + it("collate case and accents (multiple scenes)", async function(){ + let s2 = await vfs.createScene("tags-collate-s2"); + let s3 = await vfs.createScene("tags-collate-s3"); + await expect(vfs.addTag(scene_id, "Électricité")).to.eventually.equal(true); + await expect(vfs.addTag(s2, "électricité")).to.eventually.equal(true); + await expect(vfs.addTag(s3, "electricite")).to.eventually.equal(true); - it("can't archive twice", async function(){ - await vfs.archiveScene(scene_id); - await expect(vfs.archiveScene(scene_id), (await vfs._db.all("SELECT * FROM scenes"))[0].scene_name).to.be.rejectedWith(NotFoundError); + expect(await vfs.getTags()).to.deep.equal([{name: "Électricité", size: 3}]); + }) }); - it("can remove archived scene (by id)", async function(){ - await vfs.archiveScene(scene_id); - await expect(vfs.removeScene(scene_id)).to.be.fulfilled; - }); - - it("can remove archived scene (by name)", async function(){ - await vfs.archiveScene(scene_id); - await expect(vfs.removeScene(`foo#${scene_id}`)).to.be.fulfilled; - }); - it("store archive time", async function(){ - //To be used later - await vfs.archiveScene(scene_id); - let {archived} = await vfs._db.get(`SELECT archived FROM scenes WHERE scene_id= $1`, [scene_id]); - expect(archived).to.be.instanceof(Date); - expect(archived.toString()).not.to.equal('Invalid Date'); - expect(archived.valueOf(), archived.toUTCString()).to.be.above(new Date().valueOf() - 2000); - expect(archived.valueOf(), archived.toUTCString()).to.be.below(new Date().valueOf()+1); - }) - }); + describe("getTags()", function(){ + it("get all tags", async function(){ + //Create a bunch of additional test scenes + for(let i=0; i < 3; i++){ + let id = await vfs.createScene(`test_${i}`); + for(let j=0; j <= i; j++ ){ + await vfs.addTag(id, `tag_${j}`); + } + } + expect(await vfs.getTags()).to.deep.equal([ + {name: "tag_0", size: 3}, + {name: "tag_1", size: 2}, + {name: "tag_2", size: 1}, + ]); + }); - describe("unarchiveScene()", function(){ - it("restores an archived scene", async function(){ - await vfs.archiveScene(scene_id); - await vfs.unarchiveScene(`foo#${scene_id}`); - expect(await vfs.getScene("foo")).to.have.property("archived", null); - }); + it("get tags matching a string", async function(){ + await vfs.addTag(scene_id, `tag_foo`); + await vfs.addTag(scene_id, `foo_tag`); + await vfs.addTag(scene_id, `tag_bar`); - it("throws if archive doesn't exist", async function(){ - await expect(vfs.unarchiveScene("xxx")).to.be.rejectedWith(NotFoundError); - }) - }) + expect(await vfs.getTags({like: "foo"})).to.deep.equal([ + {name:"foo_tag", size: 1}, + {name: "tag_foo", size: 1}, + ]); - describe("createFile()", function(){ - it("can create an empty file", async function(){ - let r = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: null, size: 0}); - expect(r).to.have.property("id"); - expect(r).to.have.property("generation", 1); - expect(r).to.have.property("hash", null); - }); + //Match should be case-insensitive + expect(await vfs.getTags({like: "Foo"})).to.deep.equal([ + {name:"foo_tag", size: 1}, + {name: "tag_foo", size: 1}, + ]); + }); - it("can create a dummy file", async function(){ - let r = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "xxxxxx", size: 150}); - }) + it("supports pagination", async function(){ + //Create a bunch of additional test scenes + for(let i=0; i < 3; i++){ + let id = await vfs.createScene(`test_${i}`); + for(let j=0; j <= i; j++ ){ + await vfs.addTag(id, `tag_${j}`); + } + } + expect(await vfs.getTags({limit: 1})).to.deep.equal([ + {name: "tag_0", size: 3}, + ]); + expect(await vfs.getTags({limit: 2, offset: 1})).to.deep.equal([ + {name: "tag_1", size: 2}, + {name: "tag_2", size: 1}, + ]); + expect(await vfs.getTags({offset: 3})).to.deep.equal([ ]); + }); - it("autoincrements generation", async function(){ - await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "xxxxxx", size: 150}); - let r = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "yyyyy", size: 150}); - expect(r).to.have.property("generation", 2); - }) - it("can copy a file", async function(){ - let foo = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "xxxxxx", size: 150}); - let bar = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/bar.txt", user_id: null}, {hash: "xxxxxx", size: 150}); - expect(bar).to.have.property("id").not.equal(foo.id); - expect(bar).to.have.property("generation", 1); - expect(bar).to.have.property("hash", foo.hash); - expect(bar).to.have.property("size", foo.size); + it("don't count archived scenes", async function(){ + await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(true); + await vfs.archiveScene(scene_id); + expect(await vfs.getTags()).to.deep.equal([]); + }); }); - it("can use custom callbacks", async function(){ - let foo = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, ()=>Promise.resolve({hash: null, size: 150})); - expect(foo).to.have.property("hash", null); - expect(foo).to.have.property("size", 150); - - let bar = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/bar.txt", user_id: null}, ()=>Promise.resolve({hash: "xxxxxx", size: 0})); - expect(bar).to.have.property("hash", "xxxxxx"); - expect(bar).to.have.property("size", 0); - }); + describe("getTag()", function(){ + it("Get all scenes attached to a tag", async function(){ + let ids = []; + for(let i=0; i < 3; i++){ + let id = await vfs.createScene(`test_${i}`); + ids.push(id); + await vfs.addTag(id, `tag_foo`); + } + for(let i=3; i < 6; i++){ + let id = await vfs.createScene(`test_${i}`); + await vfs.addTag(id, `tag_bar`); + } + let scenes = await vfs.getTag("tag_foo"); + expect(scenes).to.deep.equal(ids); + }); - it("Set scene type to html when a file named index.html is created", async function(){ - await vfs.createFile( {scene: "foo", mime: "text/html", name: "index.html", user_id: null}, {hash: "xxxxxx", size: 150}); - let scene = await vfs.getScene("foo"); - expect(scene.type).to.equal("html"); - }); + it("Ignore archived scenes", async function(){ + await expect(vfs.addTag(scene_id, "foo")).to.eventually.equal(true); - it("Set scene type to html when a file named scene.svx.json is created", async function(){ - await vfs.createFile( {scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json", user_id: null}, {hash: "xxxxxx", size: 150}); - let scene = await vfs.getScene("foo"); - expect(scene.type).to.equal("voyager"); - }); + let s2 = await vfs.createScene(`test_scene_2`); + await expect(vfs.addTag(s2, "foo")).to.eventually.equal(true); + expect(await vfs.getTag("foo")).to.deep.equal([scene_id, s2]); - it("Voyager scene type overrides html scene types", async function(){ - await vfs.createFile( {scene: "foo", mime: "text/html", name: "index.html", user_id: null}, {hash: "xxxxxx", size: 150}); - let scene = await vfs.getScene("foo"); - expect(scene.type).to.equal("html"); - await vfs.createFile( {scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json", user_id: null}, {hash: "xxxxxx", size: 150}); - scene = await vfs.getScene("foo"); - expect(scene.type).to.equal("voyager"); - }); - - it("html scene type does not override voyager scene type", async function(){ - await vfs.createFile( {scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json", user_id: null}, {hash: "xxxxxx", size: 150}); - let scene = await vfs.getScene("foo"); - expect(scene.type).to.equal("voyager"); - await vfs.createFile( {scene: "foo", mime: "text/html", name: "index.html", user_id: null}, {hash: "xxxxxx", size: 150}); - scene = await vfs.getScene("foo"); - expect(scene.type).to.equal("voyager"); - }); - }); + await vfs.archiveScene(s2); + expect(await vfs.getTag("foo")).to.deep.equal([scene_id]); + }); - describe("writeFile()", function(){ - it("can upload a file (relative)", async function(){ - let r = await vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}); - expect(r).to.have.property("id").a("number"); - expect(r).to.have.property("generation", 1); - await expect(fs.access(path.join(this.dir, "objects", r.hash as any)), "can't access object file").to.be.fulfilled; - await expect(empty(this.uploads)); - }); - it("can upload a file (absolute)", async function(){ - let r = await expect( - vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}) - ).to.be.fulfilled; - expect(r).to.have.property("generation", 1); - expect(r).to.have.property("id").a("number"); + describe("respects permissions", function(){ + let userManager :UserManager, alice :User, bob :User; + this.beforeEach(async function(){ + userManager = new UserManager(vfs._db); + alice = await userManager.addUser("alice", "12345678", "admin"); + bob = await userManager.addUser("bob", "12345678", "create"); + }); - await expect(fs.access(path.join(this.dir, "objects", r.hash)), "can't access object file").to.be.fulfilled; - await expect(empty(this.uploads)); - }); - it("gets proper generation", async function(){ - await vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}); - for(let i=2; i < 5; i++){ - let foo = await vfs.writeFile(dataStream(["bar","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}); - expect(foo).to.have.property("generation", i); - } - let bar = await vfs.writeFile(dataStream(["bar","\n"]), {scene: "foo", mime: "text/html", name: "articles/bar.txt", user_id: null}); - expect(bar).to.have.property("generation", 1); - }); - it("can upload over an existing file", async function(){ - await expect( - vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}) - ).to.eventually.have.property("generation", 1); - let r = await expect( - vfs.writeFile(dataStream(["bar","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}) - ).to.be.fulfilled; - - expect(r).to.have.property("generation", 2); - await expect(fs.access(path.join(this.dir, "objects", r.hash)), "can't access object file").to.be.fulfilled; - await expect(empty(this.uploads)); - }); - - it("cleans up on errors", async function(){ - async function* badStream(){ - yield Promise.resolve(Buffer.from("foo")); - yield Promise.reject(new Error("CONNRESET")); - } - await expect(vfs.writeFile(badStream(), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null})).to.be.rejectedWith("CONNRESET"); - await expect(fs.access(path.join(this.dir, "foo.txt")), "can't access foo.txt").to.be.rejectedWith("ENOENT"); - await expect(empty(this.uploads)); - }); - }); + it("return scenes with public read access", async function(){ + await vfs.addTag("foo", "foo"); + expect(await vfs.getTag("foo", alice.uid), "with admin user_id").to.deep.equal([scene_id]); + expect(await vfs.getTag("foo", bob.uid), "with normal user id").to.deep.equal([scene_id]); + }); + it("return all scenes for admins", async function(){ + //Scene if from bob, alice has no special rights over it, but should see it anyways + const id = await vfs.createScene("bob-private", bob.uid); + await userManager.setPublicAccess("bob-private", "none"); + await userManager.setDefaultAccess("bob-private", "none"); + await vfs.addTag("bob-private", "foo"); + expect(await vfs.getTag("foo"), "without user id").to.deep.equal([]); + expect(await vfs.getTag("foo", alice.uid), "with admin id").to.deep.equal([id]); + }) + + it("won't return non-readable scene", async function(){ + const id = await vfs.createScene("admin-only", alice.uid); + await userManager.setPublicAccess("admin-only", "none"); + await userManager.setDefaultAccess("admin-only", "none"); + await vfs.addTag("admin-only", "foo"); + expect(await vfs.getTag("foo"), "without user_id").to.deep.equal([]); + + expect(await vfs.getTag("foo", alice.uid), "with admin user_id").to.deep.equal([id]); + + expect(await vfs.getTag("foo", bob.uid)).to.deep.equal([]); + }); + }) + }); + }); describe("", function(){ - let r:FileProps, ctime :Date; - let props :GetFileParams = {scene: "foo", name: "articles/foo.txt"}; + let scene_id :number; + //Create a dummy scene for future tests this.beforeEach(async function(){ - r = await vfs.writeFile(dataStream(["foo","\n"]), {...props, mime: "text/html", user_id: null} ); - ctime = r.ctime; + scene_id = await vfs.createScene("foo"); }); - describe("getFileProps", function(){ - it("get a file properties", async function(){ - let r = await expect(vfs.getFileProps(props)).to.be.fulfilled; - expect(r).to.have.property("generation", 1); - expect(r).to.have.property("ctime").instanceof(Date); - expect(r).to.have.property("mtime").instanceof(Date); - expect(r.ctime.valueOf()).to.equal(ctime.valueOf()); - expect(r.mtime.valueOf()).to.equal(ctime.valueOf()); + + describe("renameScene()", function(){ + it("can change a scene name", async function(){ + await expect(vfs.renameScene(scene_id, "bar")).to.be.fulfilled; }); - it("uses the same format as writeFile", async function(){ - await expect(vfs.getFileProps(props)).to.eventually.deep.equal(r); - }) - it("get proper mtime and ctime", async function(){ - let mtime = new Date(Math.floor(Date.now())+100*1000); - let r = await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null}); - r = await expect(run(`UPDATE files SET ctime = $2 WHERE file_id = $1`, [ r.id, mtime.toISOString()])).to.be.fulfilled; - expect(r).to.have.property("changes", 1); - r = await expect(vfs.getFileProps(props)).to.be.fulfilled; - expect(r.ctime.valueOf()).to.equal(ctime.valueOf()); - expect(r.mtime.valueOf()).to.equal(mtime.valueOf()); - }); - - it("can use a scene ID", async function(){ - let r = await expect(vfs.getFileProps({...props, scene: scene_id})).to.be.fulfilled; - expect(r).to.have.property("name", props.name); - }) - - it("throw 404 error if file doesn't exist", async function(){ - await expect(vfs.getFileProps({...props, name: "bar.html"})).to.be.rejectedWith("404"); + it("throw a 404 error", async function(){ + await expect(vfs.renameScene(404, "bar")).to.be.rejectedWith("404"); }); + }) - it("get archived file", async function(){ - let id = await vfs.removeFile({...props, user_id: null}); - await expect(vfs.getFileProps(props), `File with id ${id} shouldn't be returned`).to.be.rejectedWith("[404]"); - await expect(vfs.getFileProps({...props, archive: true})).to.eventually.have.property("id", id); + describe("archiveScene()", function(){ + it("makes scene hidden", async function(){ + await vfs.archiveScene("foo"); + expect(await vfs.getScenes(0, {archived: false})).to.have.property("length", 0); }); - it("get by generation", async function(){ - let r = await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null}); - expect(r).to.have.property("generation", 2); - await expect(vfs.getFileProps({...props, generation: 2})).to.eventually.have.property("generation", 2); - await expect(vfs.getFileProps({...props, generation: 1})).to.eventually.have.property("generation", 1); + + it("can't archive twice", async function(){ + await vfs.archiveScene(scene_id); + await expect(vfs.archiveScene(scene_id), (await vfs._db.all("SELECT * FROM scenes"))[0].scene_name).to.be.rejectedWith(NotFoundError); }); - it("get archived by generation", async function(){ - await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null}); - let id = await vfs.removeFile({...props, user_id: null}); - await expect(vfs.getFileProps({...props, archive: true, generation: 3})).to.eventually.have.property("id", id); - await expect(vfs.getFileProps({...props, archive: true, generation: 3}, true)).to.eventually.have.property("id", id); + it("can remove archived scene (by id)", async function(){ + await vfs.archiveScene(scene_id); + await expect(vfs.removeScene(scene_id)).to.be.fulfilled; }); - it("get document", async function(){ - let {ctime:docCtime, ...doc} = await vfs.writeDoc("{}", {...props, user_id: null}); - await expect(vfs.getFileProps({...props, archive: true, generation: doc.generation}, true)).to.eventually.deep.equal({...doc, ctime, data: "{}"}); + it("can remove archived scene (by name)", async function(){ + await vfs.archiveScene(scene_id); + await expect(vfs.removeScene(`foo#${scene_id}`)).to.be.fulfilled; }); - + it("store archive time", async function(){ + //To be used later + await vfs.archiveScene(scene_id); + let {archived} = await vfs._db.get(`SELECT archived FROM scenes WHERE scene_id= $1`, [scene_id]); + expect(archived).to.be.instanceof(Date); + expect(archived.toString()).not.to.equal('Invalid Date'); + expect(archived.valueOf(), archived.toUTCString()).to.be.above(new Date().valueOf() - 2000); + expect(archived.valueOf(), archived.toUTCString()).to.be.below(new Date().valueOf()+1); + }) }); - describe("getFileBefore()", function(){ - - it("a file is \"before\" another", async function (){ - //Source file - let {id:expectedId} = r; - let {id: refId} = await vfs.writeDoc("", {scene: scene_id, name: "reference.txt", mime: "text/html", user_id: null}); - expect(refId).to.be.a("number"); + describe("unarchiveScene()", function(){ + it("restores an archived scene", async function(){ + await vfs.archiveScene(scene_id); + await vfs.unarchiveScene(`foo#${scene_id}`); + expect(await vfs.getScene("foo")).to.have.property("archived", null); + }); - //Ensure a matching file exists AFTER our reference - await vfs.writeDoc("", {...props, user_id: null}); + it("throws if archive doesn't exist", async function(){ + await expect(vfs.unarchiveScene("xxx")).to.be.rejectedWith(NotFoundError); + }) + }) - let f = await vfs.getFileBefore({...props, before: refId, scene: scene_id}); - expect(f).to.have.property("id", expectedId); + describe("createFile()", function(){ + it("can create an empty file", async function(){ + let r = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: null, size: 0}); + expect(r).to.have.property("id"); + expect(r).to.have.property("generation", 1); + expect(r).to.have.property("hash", null); }); - it("a file is \"before\" itself", async function (){ - //Ensure a matching file exists AFTER our reference - await vfs.writeDoc("", {...props, user_id: null}); + it("can create a dummy file", async function(){ + let r = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "xxxxxx", size: 150}); + }) - let f = await vfs.getFileBefore({...props, before: r.id, scene: scene_id}); - expect(f).to.have.property("id", r.id); + it("autoincrements generation", async function(){ + await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "xxxxxx", size: 150}); + let r = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "yyyyy", size: 150}); + expect(r).to.have.property("generation", 2); + }) + it("can copy a file", async function(){ + let foo = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, {hash: "xxxxxx", size: 150}); + let bar = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/bar.txt", user_id: null}, {hash: "xxxxxx", size: 150}); + expect(bar).to.have.property("id").not.equal(foo.id); + expect(bar).to.have.property("generation", 1); + expect(bar).to.have.property("hash", foo.hash); + expect(bar).to.have.property("size", foo.size); }); - it("throws an error if reference file doesn't exist", async function(){ - await expect(vfs.getFileBefore({...props, before: -1, scene: scene_id})).to.be.rejectedWith(NotFoundError); + it("can use custom callbacks", async function(){ + let foo = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}, ()=>Promise.resolve({hash: null, size: 150})); + expect(foo).to.have.property("hash", null); + expect(foo).to.have.property("size", 150); + + let bar = await vfs.createFile( {scene: "foo", mime: "text/html", name: "articles/bar.txt", user_id: null}, ()=>Promise.resolve({hash: "xxxxxx", size: 0})); + expect(bar).to.have.property("hash", "xxxxxx"); + expect(bar).to.have.property("size", 0); }); - it("throws an error if referenced file is from another scene", async function(){ - let name = randomBytes(6).toString("base64url"); - await vfs.createScene(name); - let {id} = await vfs.writeDoc("", {name: "foo.txt", scene: name, mime: "text/plain", user_id: null}); - await expect(vfs.getFileBefore({...props, before: id, scene: scene_id})).to.be.rejectedWith(NotFoundError); + it("Set scene type to html when a file named index.html is created", async function(){ + await vfs.createFile( {scene: "foo", mime: "text/html", name: "index.html", user_id: null}, {hash: "xxxxxx", size: 150}); + let scene = await vfs.getScene("foo"); + expect(scene.type).to.equal("html"); }); - it("throws if a a file was removed at the reference point", async function(){ - await vfs.removeFile({...props, user_id: null}); - - //The reference point - let {id: refId} = await vfs.writeDoc("", {scene: scene_id, name: "reference.txt", mime: "text/html", user_id: null}); - expect(refId).to.be.a("number"); - - await vfs.writeDoc("foo", {...props, user_id: null}); - - await expect(vfs.getFileBefore({...props, before: refId, scene: scene_id})).to.be.rejectedWith(NotFoundError); + it("Set scene type to html when a file named scene.svx.json is created", async function(){ + await vfs.createFile( {scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json", user_id: null}, {hash: "xxxxxx", size: 150}); + let scene = await vfs.getScene("foo"); + expect(scene.type).to.equal("voyager"); }); - }); - describe("getFile()", function(){ - it("get a file", async function(){ - let {stream} = await vfs.getFile(props); - let str = ""; - for await (let d of stream!){ - str += d.toString("utf8"); - } - expect(str).to.equal("foo\n"); + it("Voyager scene type overrides html scene types", async function(){ + await vfs.createFile( {scene: "foo", mime: "text/html", name: "index.html", user_id: null}, {hash: "xxxxxx", size: 150}); + let scene = await vfs.getScene("foo"); + expect(scene.type).to.equal("html"); + await vfs.createFile( {scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json", user_id: null}, {hash: "xxxxxx", size: 150}); + scene = await vfs.getScene("foo"); + expect(scene.type).to.equal("voyager"); }); + + it("html scene type does not override voyager scene type", async function(){ + await vfs.createFile( {scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json", user_id: null}, {hash: "xxxxxx", size: 150}); + let scene = await vfs.getScene("foo"); + expect(scene.type).to.equal("voyager"); + await vfs.createFile( {scene: "foo", mime: "text/html", name: "index.html", user_id: null}, {hash: "xxxxxx", size: 150}); + scene = await vfs.getScene("foo"); + expect(scene.type).to.equal("voyager"); + }); + }); - it("get a document", async function(){ - //getFile can sometimes be used to get a stream to an existing document. Its shouldn't care and do it. - await vfs.writeDoc("Hello World\n", {...props, user_id: null}); - let {stream} = await vfs.getFile(props); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str).to.equal("Hello World\n"); - }); - - it("get a range of a document", async function(){ - await vfs.writeDoc("Hello World\n", {...props, user_id: null}); - let start = 3; - let end = 7; - let {stream} = await vfs.getFile({...props,start,end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str).to.equal("lo W"); + describe("writeFile()", function(){ + it("can upload a file (relative)", async function(){ + let r = await vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}); + expect(r).to.have.property("id").a("number"); + expect(r).to.have.property("generation", 1); + await expect(fs.access(path.join(this.dir, "objects", r.hash as any)), "can't access object file").to.be.fulfilled; + await expect(empty(this.uploads)); }); + it("can upload a file (absolute)", async function(){ + let r = await expect( + vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}) + ).to.be.fulfilled; + expect(r).to.have.property("generation", 1); + expect(r).to.have.property("id").a("number"); - it("get a document range of a document with NO end", async function(){ - await vfs.writeDoc("Hello World\n", {...props, user_id: null}); - let start = 3; - let {stream} = await vfs.getFile({...props,start}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); + await expect(fs.access(path.join(this.dir, "objects", r.hash)), "can't access object file").to.be.fulfilled; + await expect(empty(this.uploads)); + }); + it("gets proper generation", async function(){ + await vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}); + for(let i=2; i < 5; i++){ + let foo = await vfs.writeFile(dataStream(["bar","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}); + expect(foo).to.have.property("generation", i); } - expect(str).to.equal("lo World\n"); + let bar = await vfs.writeFile(dataStream(["bar","\n"]), {scene: "foo", mime: "text/html", name: "articles/bar.txt", user_id: null}); + expect(bar).to.have.property("generation", 1); }); + it("can upload over an existing file", async function(){ + await expect( + vfs.writeFile(dataStream(["foo","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}) + ).to.eventually.have.property("generation", 1); + let r = await expect( + vfs.writeFile(dataStream(["bar","\n"]), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null}) + ).to.be.fulfilled; - - it("get a document of a document with NO start", async function(){ - await vfs.writeDoc("Hello World\n", {...props, user_id: null}); - let end = 3; - let {stream} = await vfs.getFile({...props,end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); + expect(r).to.have.property("generation", 2); + await expect(fs.access(path.join(this.dir, "objects", r.hash)), "can't access object file").to.be.fulfilled; + await expect(empty(this.uploads)); + }); + + it("cleans up on errors", async function(){ + async function* badStream(){ + yield Promise.resolve(Buffer.from("foo")); + yield Promise.reject(new Error("CONNRESET")); } - expect(str).to.equal("Hel"); + await expect(vfs.writeFile(badStream(), {scene: "foo", mime: "text/html", name: "articles/foo.txt", user_id: null})).to.be.rejectedWith("CONNRESET"); + await expect(fs.access(path.join(this.dir, "foo.txt")), "can't access foo.txt").to.be.rejectedWith("ENOENT"); + await expect(empty(this.uploads)); }); + }); - it("get a document with end after end of file", async function(){ - await vfs.writeDoc("Hello World\n", {...props, user_id: null}); - let start = 3; - let end = 100; - let {stream} = await vfs.getFile({...props,start,end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str).to.equal("lo World\n"); + + describe("", function(){ + let r:FileProps, ctime :Date; + let props :GetFileParams = {scene: "foo", name: "articles/foo.txt"}; + this.beforeEach(async function(){ + r = await vfs.writeFile(dataStream(["foo","\n"]), {...props, mime: "text/html", user_id: null} ); + ctime = r.ctime; }); + describe("getFileProps", function(){ + it("get a file properties", async function(){ + let r = await expect(vfs.getFileProps(props)).to.be.fulfilled; + expect(r).to.have.property("generation", 1); + expect(r).to.have.property("ctime").instanceof(Date); + expect(r).to.have.property("mtime").instanceof(Date); + expect(r.ctime.valueOf()).to.equal(ctime.valueOf()); + expect(r.mtime.valueOf()).to.equal(ctime.valueOf()); + }); + it("uses the same format as writeFile", async function(){ + await expect(vfs.getFileProps(props)).to.eventually.deep.equal(r); + }) + it("get proper mtime and ctime", async function(){ + let mtime = new Date(Math.floor(Date.now())+100*1000); + let r = await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null}); + r = await expect(run(`UPDATE files SET ctime = $2 WHERE file_id = $1`, [ r.id, mtime.toISOString()])).to.be.fulfilled; + expect(r).to.have.property("changes", 1); + r = await expect(vfs.getFileProps(props)).to.be.fulfilled; + expect(r.ctime.valueOf()).to.equal(ctime.valueOf()); + expect(r.mtime.valueOf()).to.equal(mtime.valueOf()); + }); - it("get a document with start after end of file", async function(){ - await vfs.writeDoc("Hello World\n", {...props, user_id: null}); - let start = 50; //getFile can sometimes be used to get a stream to an existing document. Its shouldn't care and do it. + it("can use a scene ID", async function(){ + let r = await expect(vfs.getFileProps({...props, scene: scene_id})).to.be.fulfilled; + expect(r).to.have.property("name", props.name); + }) - let end = 100; - let {stream} = await vfs.getFile({...props,start,end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str).to.equal(""); - }); - - it("get a range of bytes of a document with start and end", async function(){ - // getFile can get start and end properties to read parts of a file - let start = 1; - let end = 3; - let {stream} = await vfs.getFile({...props, start, end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str.length).to.equal(end-start); - expect(str).to.equal("oo"); - }); - - it("get a range of bytes of a document with start and NO end", async function(){ - // When getting only a start, getFile goes from start property to end of the file - let start = 1; - let {stream} = await vfs.getFile({...props, start}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str.length).to.equal("foo\n".length-start); - expect(str).to.equal("oo\n"); - }); - - it("get a range of bytes of a document with NO start and end", async function(){ - // When getting only an end, getFile goes from the start of the file to end property - let end = 2; - let {stream} = await vfs.getFile({...props, end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str.length).to.equal(end); - expect(str).to.equal("fo"); - }); + it("throw 404 error if file doesn't exist", async function(){ + await expect(vfs.getFileProps({...props, name: "bar.html"})).to.be.rejectedWith("404"); + }); + it("get archived file", async function(){ + let id = await vfs.removeFile({...props, user_id: null}); + await expect(vfs.getFileProps(props), `File with id ${id} shouldn't be returned`).to.be.rejectedWith("[404]"); + await expect(vfs.getFileProps({...props, archive: true})).to.eventually.have.property("id", id); + }); - it("get a range of bytes of a document with end after end of file", async function(){ - let start = 1; - let end = 50; - let {stream} = await vfs.getFile({...props, start, end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str).to.equal("oo\n"); - }); + it("get by generation", async function(){ + let r = await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null}); + expect(r).to.have.property("generation", 2); + await expect(vfs.getFileProps({...props, generation: 2})).to.eventually.have.property("generation", 2); + await expect(vfs.getFileProps({...props, generation: 1})).to.eventually.have.property("generation", 1); + }); - it("get a range of bytes of a document with start after end of file", async function(){ - let start = 20; - let end = 50; - let {stream} = await vfs.getFile({...props, start, end}); - let str = ""; - for await (let d of stream!){ - expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; - str += d.toString("utf8"); - } - expect(str).to.equal(""); - }); + it("get archived by generation", async function(){ + await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null}); + let id = await vfs.removeFile({...props, user_id: null}); + await expect(vfs.getFileProps({...props, archive: true, generation: 3})).to.eventually.have.property("id", id); + await expect(vfs.getFileProps({...props, archive: true, generation: 3}, true)).to.eventually.have.property("id", id); + }); + + it("get document", async function(){ + let {ctime:docCtime, ...doc} = await vfs.writeDoc("{}", {...props, user_id: null}); + await expect(vfs.getFileProps({...props, archive: true, generation: doc.generation}, true)).to.eventually.deep.equal({...doc, ctime, data: "{}"}); + }); - it("throw 404 error if file doesn't exist", async function(){ - await expect(vfs.getFile({...props, name: "bar.html"})).to.be.rejectedWith("404"); }); - it("throw 404 error if file was deleted", async function(){ - await vfs.removeFile({...props, user_id: null}); - await expect(vfs.getFile(props)).to.be.rejectedWith("404"); - }); + describe("getFileBefore()", function(){ - it("won't try to open a folder, just returns props", async function(){ - let file = await expect(vfs.getFile({scene: props.scene, name: "articles"})).to.be.fulfilled; - expect(file).to.have.property("mime", "text/directory"); - expect(file).to.not.have.property("stream"); - }); - }); + it("a file is \"before\" another", async function (){ + //Source file + let {id:expectedId} = r; + let {id: refId} = await vfs.writeDoc("", {scene: scene_id, name: "reference.txt", mime: "text/html", user_id: null}); + expect(refId).to.be.a("number"); - describe("getFileById()", function(){ - it("gets file props using its id", async function(){ - const {scene_id:stored_scene_id, data, ...file} = await vfs.getFileById(r.id); - expect(file).to.deep.equal(r); - expect(data).to.be.null; - expect(stored_scene_id).to.equal(scene_id); - }); + //Ensure a matching file exists AFTER our reference + await vfs.writeDoc("", {...props, user_id: null}); - it("gets a document's data using its id", async function(){ - let doc = await vfs.writeDoc("Hello!", {...props, user_id: null}); - const {scene_id:stored_scene_id, data, ...file} = await vfs.getFileById(doc.id); - expect(file.id).to.equal(doc.id); - expect(data).to.equal("Hello!"); - expect(stored_scene_id).to.equal(scene_id); - }); + let f = await vfs.getFileBefore({...props, before: refId, scene: scene_id}); + expect(f).to.have.property("id", expectedId); + }); - it("throws 404 if id doesn't map to a file", async function(){ - await expect(vfs.getFileById(-1)).to.be.rejectedWith(NotFoundError); - }); - }); - - describe("getFileHistory()", function(){ - it("get previous versions of a file", async function(){ - let r2 = await vfs.writeFile(dataStream(["foo2","\n"]), {...props, user_id: null} ); - let r3 = await vfs.writeFile(dataStream(["foo3","\n"]), {...props, user_id: null} ); - await vfs.writeFile(dataStream(["bar","\n"]), {...props, name:"bar", user_id: null} ); //another file - let versions = await vfs.getFileHistory(props); - let fileProps = await vfs.getFileProps(props); - //Expect reverse order - expect(versions.map(v=>v.generation)).to.deep.equal([3, 2, 1]); - versions.forEach((version, i)=>{ - expect(Object.keys(version).sort(),`Bad file properties at index ${i}`).to.deep.equal(Object.keys(fileProps).sort()) + it("a file is \"before\" itself", async function (){ + //Ensure a matching file exists AFTER our reference + await vfs.writeDoc("", {...props, user_id: null}); + + let f = await vfs.getFileBefore({...props, before: r.id, scene: scene_id}); + expect(f).to.have.property("id", r.id); }); - }); - it("works using a scene's name", async function(){ - await expect(vfs.getFileHistory({...props, scene: "foo"})).to.be.fulfilled; - }); - it("throw a 404 if file doesn't exist", async function(){ - await expect(vfs.getFileHistory({...props, name: "missing"})).to.be.rejectedWith("404"); - }); - it("throw a 404 if scene doesn't exist (by name)", async function(){ - await expect(vfs.getFileHistory({...props, scene: "missing"})).to.be.rejectedWith("404"); - }); - it("throw a 404 if scene doesn't exist (by id)", async function(){ - await expect(vfs.getFileHistory({...props, scene: scene_id+1})).to.be.rejectedWith("404"); - }); - }); - describe("removeFile()", function(){ - it("add an entry with state = REMOVED", async function(){ - await vfs.removeFile({...props, user_id: null}); - let files = await all(`SELECT * FROM files WHERE name = '${props.name}'`); - expect(files).to.have.property("length", 2); - expect(files[0]).to.include({ - hash: "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw", - generation: 1 + it("throws an error if reference file doesn't exist", async function(){ + await expect(vfs.getFileBefore({...props, before: -1, scene: scene_id})).to.be.rejectedWith(NotFoundError); }); - expect(files[1]).to.include({ - hash: null, - generation: 2 + + it("throws an error if referenced file is from another scene", async function(){ + let name = randomBytes(6).toString("base64url"); + await vfs.createScene(name); + let {id} = await vfs.writeDoc("", {name: "foo.txt", scene: name, mime: "text/plain", user_id: null}); + await expect(vfs.getFileBefore({...props, before: id, scene: scene_id})).to.be.rejectedWith(NotFoundError); }); - }); - it("requires the file to actually exist", async function(){ - await expect(vfs.removeFile({...props, name: "bar.txt", user_id: null})).to.be.rejectedWith("404"); - }); - it("require file to be in active state", async function(){ - await expect(vfs.removeFile({...props, user_id: null})).to.be.fulfilled, - await expect(vfs.removeFile({...props, user_id: null})).to.be.rejectedWith("already deleted"); - }); - }); - - describe("renameFile()", function(){ - it("rename a file", async function(){ - await vfs.renameFile({...props, user_id: null}, "bar.txt"); - await expect(vfs.getFileProps(props), "old file should not be reported anymore").to.be.rejectedWith("404"); - let file = await expect(vfs.getFileProps({...props, name: "bar.txt"})).to.be.fulfilled; - expect(file).to.have.property("mime", "text/html"); - }); + it("throws if a a file was removed at the reference point", async function(){ + await vfs.removeFile({...props, user_id: null}); - it("throw 404 error if scene doesn't exist", async function(){ - await expect(vfs.renameFile({...props, user_id: null, scene: "bar"}, "bar.txt")).to.be.rejectedWith("404"); + //The reference point + let {id: refId} = await vfs.writeDoc("", {scene: scene_id, name: "reference.txt", mime: "text/html", user_id: null}); + expect(refId).to.be.a("number"); + + await vfs.writeDoc("foo", {...props, user_id: null}); + + await expect(vfs.getFileBefore({...props, before: refId, scene: scene_id})).to.be.rejectedWith(NotFoundError); + }); }); + + describe("getFile()", function(){ + it("get a file", async function(){ + let {stream} = await vfs.getFile(props); + let str = ""; + for await (let d of stream!){ + str += d.toString("utf8"); + } + expect(str).to.equal("foo\n"); + }); + + it("get a document", async function(){ + //getFile can sometimes be used to get a stream to an existing document. Its shouldn't care and do it. + await vfs.writeDoc("Hello World\n", {...props, user_id: null}); + let {stream} = await vfs.getFile(props); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal("Hello World\n"); + }); + + it("get a range of a document", async function(){ + await vfs.writeDoc("Hello World\n", {...props, user_id: null}); + let start = 3; + let end = 7; + let {stream} = await vfs.getFile({...props,start,end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal("lo W"); + }); + + + it("get a document range of a document with NO end", async function(){ + await vfs.writeDoc("Hello World\n", {...props, user_id: null}); + let start = 3; + let {stream} = await vfs.getFile({...props,start}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal("lo World\n"); + }); + - it("throw 404 error if file doesn't exist", async function(){ - await expect(vfs.renameFile({...props, user_id: null, name: "bar.html"}, "baz.html")).to.be.rejectedWith("404"); - }); + it("get a document of a document with NO start", async function(){ + await vfs.writeDoc("Hello World\n", {...props, user_id: null}); + let end = 3; + let {stream} = await vfs.getFile({...props,end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal("Hel"); + }); - it("throw 409 error if destination file already exist", async function(){ - await vfs.writeDoc("Hello World\n", {...props, user_id: null, name: "baz.txt"}); - await expect(vfs.renameFile({...props, user_id: null}, "baz.txt")).to.be.rejectedWith("409"); - }); - it("file can be created back after rename", async function(){ - await vfs.renameFile({...props, user_id: null}, "bar.txt"); - await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null} ); - await expect(vfs.getFileProps({...props, name: "bar.txt"})).to.be.fulfilled; - //Check if it doesn't mess with the history - let hist = await vfs.getFileHistory(props); - expect(hist.map(f=>f.hash)).to.deep.equal([ - "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw", - null, - "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw" - ]); + it("get a document with end after end of file", async function(){ + await vfs.writeDoc("Hello World\n", {...props, user_id: null}); + let start = 3; + let end = 100; + let {stream} = await vfs.getFile({...props,start,end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal("lo World\n"); + }); + + it("get a document with start after end of file", async function(){ + await vfs.writeDoc("Hello World\n", {...props, user_id: null}); + let start = 50; //getFile can sometimes be used to get a stream to an existing document. Its shouldn't care and do it. + + let end = 100; + let {stream} = await vfs.getFile({...props,start,end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal(""); + }); + + it("get a range of bytes of a document with start and end", async function(){ + // getFile can get start and end properties to read parts of a file + let start = 1; + let end = 3; + let {stream} = await vfs.getFile({...props, start, end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str.length).to.equal(end-start); + expect(str).to.equal("oo"); + }); + + it("get a range of bytes of a document with start and NO end", async function(){ + // When getting only a start, getFile goes from start property to end of the file + let start = 1; + let {stream} = await vfs.getFile({...props, start}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str.length).to.equal("foo\n".length-start); + expect(str).to.equal("oo\n"); + }); + + it("get a range of bytes of a document with NO start and end", async function(){ + // When getting only an end, getFile goes from the start of the file to end property + let end = 2; + let {stream} = await vfs.getFile({...props, end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str.length).to.equal(end); + expect(str).to.equal("fo"); + }); + + + it("get a range of bytes of a document with end after end of file", async function(){ + let start = 1; + let end = 50; + let {stream} = await vfs.getFile({...props, start, end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal("oo\n"); + }); + + it("get a range of bytes of a document with start after end of file", async function(){ + let start = 20; + let end = 50; + let {stream} = await vfs.getFile({...props, start, end}); + let str = ""; + for await (let d of stream!){ + expect(Buffer.isBuffer(d), `chunk is a ${typeof d}. Expected a buffer`).to.be.true; + str += d.toString("utf8"); + } + expect(str).to.equal(""); + }); + + + it("throw 404 error if file doesn't exist", async function(){ + await expect(vfs.getFile({...props, name: "bar.html"})).to.be.rejectedWith("404"); + }); + + it("throw 404 error if file was deleted", async function(){ + await vfs.removeFile({...props, user_id: null}); + await expect(vfs.getFile(props)).to.be.rejectedWith("404"); + }); + + it("won't try to open a folder, just returns props", async function(){ + let file = await expect(vfs.getFile({scene: props.scene, name: "articles"})).to.be.fulfilled; + expect(file).to.have.property("mime", "text/directory"); + expect(file).to.not.have.property("stream"); + }); }); - it("can move to a deleted file", async function(){ - await vfs.renameFile({...props, user_id: null}, "bar.txt"); - //move it back in place after it was deleted - await vfs.renameFile({...props, name: "bar.txt", user_id: null}, props.name); - let hist = await vfs.getFileHistory(props); - expect(hist.map(f=>`${f.name}#${f.generation}: ${f.hash}`)).to.deep.equal([ - `articles/foo.txt#3: tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw`, - `articles/foo.txt#2: null`, - `articles/foo.txt#1: tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw` - ]); - await expect(vfs.getFile({...props, name: "bar.txt"})).to.be.rejectedWith(NotFoundError); + + describe("getFileById()", function(){ + it("gets file props using its id", async function(){ + const {scene_id:stored_scene_id, data, ...file} = await vfs.getFileById(r.id); + expect(file).to.deep.equal(r); + expect(data).to.be.null; + expect(stored_scene_id).to.equal(scene_id); + }); + + it("gets a document's data using its id", async function(){ + let doc = await vfs.writeDoc("Hello!", {...props, user_id: null}); + const {scene_id:stored_scene_id, data, ...file} = await vfs.getFileById(doc.id); + expect(file.id).to.equal(doc.id); + expect(data).to.equal("Hello!"); + expect(stored_scene_id).to.equal(scene_id); + }); + + it("throws 404 if id doesn't map to a file", async function(){ + await expect(vfs.getFileById(-1)).to.be.rejectedWith(NotFoundError); + }); }); - it("can move in a folder", async function(){ - await vfs.renameFile({...props, user_id: null}, "articles/bar.txt"); - await expect(vfs.getFileProps(props)).to.be.rejectedWith(NotFoundError); - expect(await vfs.getFileProps({...props, name: "articles/bar.txt"})).to.have.property("hash", "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw"); + + describe("getFileHistory()", function(){ + it("get previous versions of a file", async function(){ + let r2 = await vfs.writeFile(dataStream(["foo2","\n"]), {...props, user_id: null} ); + let r3 = await vfs.writeFile(dataStream(["foo3","\n"]), {...props, user_id: null} ); + await vfs.writeFile(dataStream(["bar","\n"]), {...props, name:"bar", user_id: null} ); //another file + let versions = await vfs.getFileHistory(props); + let fileProps = await vfs.getFileProps(props); + //Expect reverse order + expect(versions.map(v=>v.generation)).to.deep.equal([3, 2, 1]); + versions.forEach((version, i)=>{ + expect(Object.keys(version).sort(),`Bad file properties at index ${i}`).to.deep.equal(Object.keys(fileProps).sort()) + }); + }); + it("works using a scene's name", async function(){ + await expect(vfs.getFileHistory({...props, scene: "foo"})).to.be.fulfilled; + }); + it("throw a 404 if file doesn't exist", async function(){ + await expect(vfs.getFileHistory({...props, name: "missing"})).to.be.rejectedWith("404"); + }); + it("throw a 404 if scene doesn't exist (by name)", async function(){ + await expect(vfs.getFileHistory({...props, scene: "missing"})).to.be.rejectedWith("404"); + }); + it("throw a 404 if scene doesn't exist (by id)", async function(){ + await expect(vfs.getFileHistory({...props, scene: scene_id+1})).to.be.rejectedWith("404"); + }); }); - it("can move a document", async function(){ - const props = {scene: scene_id, user_id: null, name:"foo.json", mime: "application/json"}; - let doc = await vfs.writeDoc("{}",props); - expect(doc).to.have.property("hash").ok; - await expect(vfs.renameFile(props, "bar.json")).to.be.fulfilled; - expect(await vfs.getFileProps({...props, name: "bar.json"})).to.have.property("hash", doc.hash); + describe("removeFile()", function(){ + it("add an entry with state = REMOVED", async function(){ + await vfs.removeFile({...props, user_id: null}); + let files = await all(`SELECT * FROM files WHERE name = '${props.name}'`); + expect(files).to.have.property("length", 2); + expect(files[0]).to.include({ + hash: "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw", + generation: 1 + }); + expect(files[1]).to.include({ + hash: null, + generation: 2 + }); + }); + it("requires the file to actually exist", async function(){ + await expect(vfs.removeFile({...props, name: "bar.txt", user_id: null})).to.be.rejectedWith("404"); + }); + it("require file to be in active state", async function(){ + await expect(vfs.removeFile({...props, user_id: null})).to.be.fulfilled, + await expect(vfs.removeFile({...props, user_id: null})).to.be.rejectedWith("already deleted"); + }); }); - }); - }) - + + describe("renameFile()", function(){ + + it("rename a file", async function(){ + await vfs.renameFile({...props, user_id: null}, "bar.txt"); + await expect(vfs.getFileProps(props), "old file should not be reported anymore").to.be.rejectedWith("404"); + let file = await expect(vfs.getFileProps({...props, name: "bar.txt"})).to.be.fulfilled; + expect(file).to.have.property("mime", "text/html"); + }); - describe("writeDoc()", function(){ - it("insert a new document using scene_id", async function(){ - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await expect(all(`SELECT * FROM files WHERE name = 'scene.svx.json'`)).to.eventually.have.property("length", 1); - }) - it("insert a new document using scene_name", async function(){ - await vfs.writeDoc("{}", {scene: "foo", user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await expect(all(`SELECT * FROM files WHERE name = 'scene.svx.json'`)).to.eventually.have.property("length", 1); - }) - it("requires a scene to exist", async function(){ - await expect(vfs.writeDoc("{}", {scene: 125 /*arbitrary non-existent scene id */, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).to.be.rejectedWith("404"); - await expect(all(`SELECT * FROM files WHERE fk_scene_id = 125`)).to.eventually.have.property("length", 0); - }); - it("can provide an author", async function(){ - let user_id = Uid.make(); - await get(`INSERT INTO users ( user_id, username ) VALUES ($1, 'alice')`, [user_id]); - await expect(vfs.writeDoc("{}", {scene: scene_id, user_id: user_id, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).to.be.fulfilled; - let files = await all(`SELECT data, fk_author_id AS fk_author_id FROM files WHERE name = 'scene.svx.json'`); - expect(files).to.have.length(1); - expect(files[0]).to.deep.equal({ - data: "{}", - fk_author_id: user_id, - }); - }); - it("updates scene's current doc", async function(){ - for(let i = 1; i<=3; i++){ - let id = (await vfs.writeDoc(`{"i":${i}}`, {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).id; - await expect(vfs.getDoc(scene_id)).to.eventually.deep.include({id}); - } - }); + it("throw 404 error if scene doesn't exist", async function(){ + await expect(vfs.renameFile({...props, user_id: null, scene: "bar"}, "bar.txt")).to.be.rejectedWith("404"); + }); + + it("throw 404 error if file doesn't exist", async function(){ + await expect(vfs.renameFile({...props, user_id: null, name: "bar.html"}, "baz.html")).to.be.rejectedWith("404"); + }); - it("reports byte size, not character size", async function(){ - let str = `{"id":"你好"}`; - expect(str.length).not.to.equal(Buffer.byteLength(str)); - const doc = await vfs.writeDoc(str, {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - expect(doc).to.have.property("size", Buffer.byteLength(str)); - }) + it("throw 409 error if destination file already exist", async function(){ + await vfs.writeDoc("Hello World\n", {...props, user_id: null, name: "baz.txt"}); + await expect(vfs.renameFile({...props, user_id: null}, "baz.txt")).to.be.rejectedWith("409"); + }); - }); + it("file can be created back after rename", async function(){ + await vfs.renameFile({...props, user_id: null}, "bar.txt"); + await vfs.writeFile(dataStream(["foo","\n"]), {...props, user_id: null} ); + await expect(vfs.getFileProps({...props, name: "bar.txt"})).to.be.fulfilled; + //Check if it doesn't mess with the history + let hist = await vfs.getFileHistory(props); + expect(hist.map(f=>f.hash)).to.deep.equal([ + "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw", + null, + "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw" + ]); + }); + it("can move to a deleted file", async function(){ + await vfs.renameFile({...props, user_id: null}, "bar.txt"); + //move it back in place after it was deleted + await vfs.renameFile({...props, name: "bar.txt", user_id: null}, props.name); + let hist = await vfs.getFileHistory(props); + expect(hist.map(f=>`${f.name}#${f.generation}: ${f.hash}`)).to.deep.equal([ + `articles/foo.txt#3: tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw`, + `articles/foo.txt#2: null`, + `articles/foo.txt#1: tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw` + ]); + await expect(vfs.getFile({...props, name: "bar.txt"})).to.be.rejectedWith(NotFoundError); + }); + it("can move in a folder", async function(){ + await vfs.renameFile({...props, user_id: null}, "articles/bar.txt"); + await expect(vfs.getFileProps(props)).to.be.rejectedWith(NotFoundError); + expect(await vfs.getFileProps({...props, name: "articles/bar.txt"})).to.have.property("hash", "tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw"); + }); - - describe("getScene()", function(){ - this.beforeEach(async function(){ - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - }); + it("can move a document", async function(){ + const props = {scene: scene_id, user_id: null, name:"foo.json", mime: "application/json"}; + let doc = await vfs.writeDoc("{}",props); + expect(doc).to.have.property("hash").ok; + await expect(vfs.renameFile(props, "bar.json")).to.be.fulfilled; + expect(await vfs.getFileProps({...props, name: "bar.json"})).to.have.property("hash", doc.hash); + }); + }); + }) + - it("throw an error if not found", async function(){ - await expect(vfs.getScene("bar")).to.be.rejectedWith("scene_name"); - }); + describe("writeDoc()", function(){ + it("insert a new document using scene_id", async function(){ + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await expect(all(`SELECT * FROM files WHERE name = 'scene.svx.json'`)).to.eventually.have.property("length", 1); + }) + it("insert a new document using scene_name", async function(){ + await vfs.writeDoc("{}", {scene: "foo", user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await expect(all(`SELECT * FROM files WHERE name = 'scene.svx.json'`)).to.eventually.have.property("length", 1); + }) + it("requires a scene to exist", async function(){ + await expect(vfs.writeDoc("{}", {scene: 125 /*arbitrary non-existent scene id */, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).to.be.rejectedWith("404"); + await expect(all(`SELECT * FROM files WHERE fk_scene_id = 125`)).to.eventually.have.property("length", 0); + }); + it("can provide an author", async function(){ + let user_id = Uid.make(); + await get(`INSERT INTO users ( user_id, username ) VALUES ($1, 'alice')`, [user_id]); + await expect(vfs.writeDoc("{}", {scene: scene_id, user_id: user_id, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).to.be.fulfilled; + let files = await all(`SELECT data, fk_author_id AS fk_author_id FROM files WHERE name = 'scene.svx.json'`); + expect(files).to.have.length(1); + expect(files[0]).to.deep.equal({ + data: "{}", + fk_author_id: user_id, + }); + }); + it("updates scene's current doc", async function(){ + for(let i = 1; i<=3; i++){ + let id = (await vfs.writeDoc(`{"i":${i}}`, {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).id; + await expect(vfs.getDoc(scene_id)).to.eventually.deep.include({id}); + } + }); - it("get a valid scene", async function(){ - let scene = await vfs.getScene("foo"); + it("reports byte size, not character size", async function(){ + let str = `{"id":"你好"}`; + expect(str.length).not.to.equal(Buffer.byteLength(str)); + const doc = await vfs.writeDoc(str, {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + expect(doc).to.have.property("size", Buffer.byteLength(str)); + }) - let props = sceneProps(scene_id); - props.type = "voyager"; - let key:keyof Scene; - for(key in props){ - if(typeof props[key] ==="undefined"){ - expect(scene, `${(scene as any)[key]}`).not.to.have.property(key); - }else if(typeof props[key] === "function"){ - expect(scene).to.have.property(key).instanceof(props[key]); - }else{ - expect(scene).to.have.property(key).to.deep.equal(props[key]); - } - } }); - it("get an empty scene", async function(){ - let id = await vfs.createScene("empty"); - let scene = await vfs.getScene("empty"); - expect(scene).to.have.property("ctime").instanceof(Date); - expect(scene).to.have.property("mtime").instanceof(Date); - expect(scene).to.have.property("id", id).a("number"); - expect(scene).to.have.property("name", "empty"); - expect(scene).to.have.property("author", "default"); - }); + + describe("getScene()", function(){ + this.beforeEach(async function(){ + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + }); - it("get a scene's thumbnail if it exist (jpg)", async function(){ - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); - let s = await vfs.getScene(scene_id); - expect(s).to.have.property("thumb", "scene-image-thumb.jpg"); - }); + it("throw an error if not found", async function(){ + await expect(vfs.getScene("bar")).to.be.rejectedWith("scene_name"); + }); - it("get a scene's thumbnail if it exist (png)", async function(){ - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.png", mime: "image/png"}); - let s = await vfs.getScene(scene_id); - expect(s).to.have.property("thumb", "scene-image-thumb.png"); - }); + it("get a valid scene", async function(){ + let scene = await vfs.getScene("foo"); + + let props = sceneProps(scene_id); + props.type = "voyager"; + let key:keyof Scene; + for(key in props){ + if(typeof props[key] ==="undefined"){ + expect(scene, `${(scene as any)[key]}`).not.to.have.property(key); + }else if(typeof props[key] === "function"){ + expect(scene).to.have.property(key).instanceof(props[key]); + }else{ + expect(scene).to.have.property(key).to.deep.equal(props[key]); + } + } + }); - it("get a scene's thumbnail if it exist (prioritized)", async function(){ - let times = [ - new Date("2022-01-01"), - new Date("2023-01-01"), - new Date("2024-01-01") - ]; - const setDate = (i:number, d:Date)=>vfs._db.run(`UPDATE files SET ctime = $2 WHERE file_id = $1`, [ i, d ]); - let png = await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.png", mime: "image/png"}); - let jpg = await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); + it("get an empty scene", async function(){ + let id = await vfs.createScene("empty"); + let scene = await vfs.getScene("empty"); + expect(scene).to.have.property("ctime").instanceof(Date); + expect(scene).to.have.property("mtime").instanceof(Date); + expect(scene).to.have.property("id", id).a("number"); + expect(scene).to.have.property("name", "empty"); + expect(scene).to.have.property("author", "default"); + }); - let r = await setDate(jpg.id, times[1]); - await setDate(png.id, times[2]); - let s = await vfs.getScene(scene_id); - expect(s, `use PNG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.png"); + it("get a scene's thumbnail if it exist (jpg)", async function(){ + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); + let s = await vfs.getScene(scene_id); + expect(s).to.have.property("thumb", "scene-image-thumb.jpg"); + }); - await setDate(png.id, times[0]); - s = await vfs.getScene(scene_id); - expect(s, `use JPG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.jpg"); + it("get a scene's thumbnail if it exist (png)", async function(){ + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.png", mime: "image/png"}); + let s = await vfs.getScene(scene_id); + expect(s).to.have.property("thumb", "scene-image-thumb.png"); + }); - //If date is equal, prioritize jpg - await setDate(png.id, times[1]); - s = await vfs.getScene(scene_id); - expect(s, `With equal dates, alphanumeric order shopuld prioritize JPG over PNG file`).to.have.property("thumb", "scene-image-thumb.jpg"); - }); + it("get a scene's thumbnail if it exist (prioritized)", async function(){ + let times = [ + new Date("2022-01-01"), + new Date("2023-01-01"), + new Date("2024-01-01") + ]; + const setDate = (i:number, d:Date)=>vfs._db.run(`UPDATE files SET ctime = $2 WHERE file_id = $1`, [ i, d ]); + let png = await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.png", mime: "image/png"}); + let jpg = await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene-image-thumb.jpg", mime: "image/jpeg"}); + + let r = await setDate(jpg.id, times[1]); + await setDate(png.id, times[2]); + let s = await vfs.getScene(scene_id); + expect(s, `use PNG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.png"); + + await setDate(png.id, times[0]); + s = await vfs.getScene(scene_id); + expect(s, `use JPG thumbnail if it's the most recent`).to.have.property("thumb", "scene-image-thumb.jpg"); + + //If date is equal, prioritize jpg + await setDate(png.id, times[1]); + s = await vfs.getScene(scene_id); + expect(s, `With equal dates, alphanumeric order shopuld prioritize JPG over PNG file`).to.have.property("thumb", "scene-image-thumb.jpg"); + }); - it("get requester's access right", async function(){ - let userManager = new UserManager(vfs._db); - let alice = await userManager.addUser("alice", "xxxxxxxx", "create"); + it("get requester's access right", async function(){ + let userManager = new UserManager(vfs._db); + let alice = await userManager.addUser("alice", "xxxxxxxx", "create"); - let id = await vfs.createScene("alice's", alice.uid); - await vfs.writeDoc("{}", {scene: id, user_id: alice.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let scene = await vfs.getScene("alice's", alice.uid); - expect(scene).to.have.property("access").to.equal("admin"); - }); + let id = await vfs.createScene("alice's", alice.uid); + await vfs.writeDoc("{}", {scene: id, user_id: alice.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let scene = await vfs.getScene("alice's", alice.uid); + expect(scene).to.have.property("access").to.equal("admin"); + }); - it("get requester's group access right", async function(){ - let userManager = new UserManager(vfs._db); - let alice = await userManager.addUser("alice", "xxxxxxxx", "create"); - let group = await userManager.addGroup("My Group"); - await userManager.addMemberToGroup(alice.uid, group.groupUid); + it("get requester's group access right", async function(){ + let userManager = new UserManager(vfs._db); + let alice = await userManager.addUser("alice", "xxxxxxxx", "create"); + let group = await userManager.addGroup("My Group"); + await userManager.addMemberToGroup(alice.uid, group.groupUid); - let id = await vfs.createScene("foo2"); - await userManager.grantGroup(id, group.groupUid, "write"); + let id = await vfs.createScene("foo2"); + await userManager.grantGroup(id, group.groupUid, "write"); - await vfs.writeDoc("{}", {scene: id, user_id: alice.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let scene = await vfs.getScene("foo2", alice.uid); - expect(scene).to.have.property("access").to.equal("write"); - }); + await vfs.writeDoc("{}", {scene: id, user_id: alice.uid, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let scene = await vfs.getScene("foo2", alice.uid); + expect(scene).to.have.property("access").to.equal("write"); + }); - it("performs requests for default user", async function(){ - let scene = await vfs.getScene("foo", 0); - expect(scene).to.be.ok; - expect(scene).to.have.property("access").to.equal("read"); + it("performs requests for default user", async function(){ + let scene = await vfs.getScene("foo", 0); + expect(scene).to.be.ok; + expect(scene).to.have.property("access").to.equal("read"); + }); }); - }); - describe("getSceneHistory()", function(){ - let default_folders = 2 - describe("get an ordered history", function(){ - this.beforeEach(async function(){ - let fileProps :WriteFileParams = {user_id: null, scene:scene_id, mime: "model/gltf-binary", name:"models/foo.glb"} - await vfs.writeFile(dataStream(), fileProps); - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await vfs.writeFile(dataStream(), fileProps); - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - //Ensure all ctime are equal to prevent ordering issues - await vfs._db.run(`UPDATE files SET ctime = $1 WHERE fk_scene_id = $2`, [new Date(), scene_id]); - }); - - it("all events", async function(){ - let history = await vfs.getSceneHistory(scene_id); - expect(history).to.have.property("length", 4 + default_folders); - //Couln't easily test ctime sort - expect(history.map(e=>e.name)).to.deep.equal([ - "scene.svx.json", - "scene.svx.json", - "models/foo.glb", - "models/foo.glb", - "models", - "articles", - ]); - expect(history.map(e=>e.generation)).to.deep.equal([2,1,2,1,1,1]); + describe("getSceneHistory()", function(){ + let default_folders = 2 + describe("get an ordered history", function(){ + this.beforeEach(async function(){ + let fileProps :WriteFileParams = {user_id: null, scene:scene_id, mime: "model/gltf-binary", name:"models/foo.glb"} + await vfs.writeFile(dataStream(), fileProps); + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await vfs.writeFile(dataStream(), fileProps); + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + //Ensure all ctime are equal to prevent ordering issues + await vfs._db.run(`UPDATE files SET ctime = $1 WHERE fk_scene_id = $2`, [new Date(), scene_id]); + }); + + it("all events", async function(){ + let history = await vfs.getSceneHistory(scene_id); + expect(history).to.have.property("length", 4 + default_folders); + //Couln't easily test ctime sort + expect(history.map(e=>e.name)).to.deep.equal([ + "scene.svx.json", + "scene.svx.json", + "models/foo.glb", + "models/foo.glb", + "models", + "articles", + ]); + expect(history.map(e=>e.generation)).to.deep.equal([2,1,2,1,1,1]); + }); + + it("with limit", async function(){ + let history = await vfs.getSceneHistory(scene_id, {limit: 1}); + expect(history).to.have.property("length", 1); + //Couln't easily test ctime sort + expect(history.map(e=>e.name)).to.deep.equal([ + "scene.svx.json", + ]); + expect(history.map(e=>e.generation)).to.deep.equal([2]); + }); + it("with offset", async function(){ + let history = await vfs.getSceneHistory(scene_id, {limit: 2, offset: 1}); + expect(history).to.have.property("length", 2); + //Couln't easily test ctime sort + expect(history.map(e=>e.name)).to.deep.equal([ + "scene.svx.json", + "models/foo.glb", + ]); + expect(history.map(e=>e.generation)).to.deep.equal([1,2]); + }); }); - - it("with limit", async function(){ - let history = await vfs.getSceneHistory(scene_id, {limit: 1}); - expect(history).to.have.property("length", 1); - //Couln't easily test ctime sort - expect(history.map(e=>e.name)).to.deep.equal([ - "scene.svx.json", + + it("supports pagination", async function(){ + for(let i=0; i < 20; i++){ + await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + } + + let history = await vfs.getSceneHistory(scene_id, {limit: 2, offset: 0}); + expect(history.map(e=>e.generation)).to.deep.equal([ + 20, + 19, ]); - expect(history.map(e=>e.generation)).to.deep.equal([2]); - }); - it("with offset", async function(){ - let history = await vfs.getSceneHistory(scene_id, {limit: 2, offset: 1}); + history = await vfs.getSceneHistory(scene_id, {limit: 2, offset: 2}); expect(history).to.have.property("length", 2); - //Couln't easily test ctime sort - expect(history.map(e=>e.name)).to.deep.equal([ - "scene.svx.json", - "models/foo.glb", + expect(history.map(e=>e.generation)).to.deep.equal([ + 18, + 17, ]); - expect(history.map(e=>e.generation)).to.deep.equal([1,2]); }); }); - - it("supports pagination", async function(){ - for(let i=0; i < 20; i++){ - await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - } - - let history = await vfs.getSceneHistory(scene_id, {limit: 2, offset: 0}); - expect(history.map(e=>e.generation)).to.deep.equal([ - 20, - 19, - ]); - history = await vfs.getSceneHistory(scene_id, {limit: 2, offset: 2}); - expect(history).to.have.property("length", 2); - expect(history.map(e=>e.generation)).to.deep.equal([ - 18, - 17, - ]); - }); - }); - - describe("getSceneMeta()", function() { - it("can get default meta (0) data", async function(){ - await vfs.writeDoc( JSON.stringify({ - metas: [ - {collection: - {titles: { - "EN": "English title", - "FR": "French title" - }} - } - ] - }) - , {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - const meta = await vfs.getSceneMeta("foo"); - expect(meta.titles).to.be.deep.equal({ - "EN": "English title", - "FR": "French title" - }) - }); - it("can get non-default meta data", async function(){ - await vfs.writeDoc( JSON.stringify({ - scenes: [{meta: 1}], - metas: [ - {}, - {collection: - {titles: { - "EN": "English title", - "FR": "French title" - }} - } - ] - }) - , {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - const meta = await vfs.getSceneMeta("foo"); - expect(meta.titles).to.deep.equal({ - "EN": "English title", - "FR": "French title" - }) - }); - - it("can get primary title and intros", async function(){ - await vfs.writeDoc( JSON.stringify( - { - scenes: [{meta: 1}], - setups: [ {language: {language: "FR"}}], - metas: [ - {}, - {collection: - {titles: { - "EN": "English title", - "FR": "French title" - }, - intros: { - "EN": "English intro", - "FR": "French intro" - }} - }] - }) - , {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - const meta = await vfs.getSceneMeta("foo"); - expect(meta.primary_title).to.equal("French title"); - expect(meta.primary_intro).to.equal("French intro"); - }); - - }); + describe("getSceneMeta()", function() { + it("can get default meta (0) data", async function(){ + await vfs.writeDoc( JSON.stringify({ + metas: [ + {collection: + {titles: { + "EN": "English title", + "FR": "French title" + }} + } + ] + }) + , {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + const meta = await vfs.getSceneMeta("foo"); + expect(meta.titles).to.be.deep.equal({ + "EN": "English title", + "FR": "French title" + }) + }); + + it("can get non-default meta data", async function(){ + await vfs.writeDoc( JSON.stringify({ + scenes: [{meta: 1}], + metas: [ + {}, + {collection: + {titles: { + "EN": "English title", + "FR": "French title" + }} + } + ] + }) + , {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + const meta = await vfs.getSceneMeta("foo"); + expect(meta.titles).to.deep.equal({ + "EN": "English title", + "FR": "French title" + }) + }); + + it("can get primary title and intros", async function(){ + await vfs.writeDoc( JSON.stringify( + { + scenes: [{meta: 1}], + setups: [ {language: {language: "FR"}}], + metas: [ + {}, + {collection: + {titles: { + "EN": "English title", + "FR": "French title" + }, + intros: { + "EN": "English intro", + "FR": "French intro" + }} + }] + }) + , {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + const meta = await vfs.getSceneMeta("foo"); + expect(meta.primary_title).to.equal("French title"); + expect(meta.primary_intro).to.equal("French intro"); + }); + + }); - describe("listFiles()", function(){ - let tref = new Date("2022-12-08T10:49:46.196Z"); + describe("listFiles()", function(){ + let tref = new Date("2022-12-08T10:49:46.196Z"); + + it("Get files created for a scene", async function(){ + let f1 = await vfs.writeFile(dataStream(), {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}); + let f2 = await vfs.writeFile(dataStream(), {user_id: null, scene:"foo", mime: "image/jpeg", name:"foo.jpg"}); + let d1 = await vfs.writeDoc('{}', {user_id: null, scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json"}); + await run(`UPDATE files SET ctime = $1`, [tref.toISOString()]); + let files = await collapseAsync(vfs.listFiles(scene_id)); + expect(files).to.deep.equal([ + { + size: 4, + hash: 'tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw', + generation: 1, + id: f2.id, + name: 'foo.jpg', + mime: "image/jpeg", + ctime: tref, + mtime: tref, + author_id: null, + author: "default", + },{ + size: 4, + hash: 'tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw', + generation: 1, + id: f1.id, + name: 'models/foo.glb', + mime: "model/gltf-binary", + ctime: tref, + mtime: tref, + author_id: null, + author: "default", + }, + { + size: 2, + hash: "RBNvo1WzZ4oRRq0W9-hknpT7T8If536DEMBg9hyq_4o", + generation: 1, + id: d1.id, + mime: "application/si-dpo-3d.document+json", + name: "scene.svx.json", + ctime: tref, + mtime: tref, + author_id: null, + author: "default", + } + ]); + }); - it("Get files created for a scene", async function(){ - let f1 = await vfs.writeFile(dataStream(), {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}); - let f2 = await vfs.writeFile(dataStream(), {user_id: null, scene:"foo", mime: "image/jpeg", name:"foo.jpg"}); - let d1 = await vfs.writeDoc('{}', {user_id: null, scene: "foo", mime: "application/si-dpo-3d.document+json", name: "scene.svx.json"}); - await run(`UPDATE files SET ctime = $1`, [tref.toISOString()]); - let files = await collapseAsync(vfs.listFiles(scene_id)); - expect(files).to.deep.equal([ - { - size: 4, - hash: 'tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw', - generation: 1, + it("Groups files versions", async function(){ + let tnext = new Date(tref.getTime()+8000); + let originalFiles = (await all("SELECT * FROM files")).length + let f1 = await vfs.writeFile(dataStream(["foo", "\n"]), {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}); + let del = await vfs.createFile({user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}, {hash: null, size: 0}); + let f2 = await vfs.writeFile(dataStream(["hello world", "\n"]), {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}); + await expect(all("SELECT * FROM files")).to.eventually.have.property("length", 3+originalFiles); + await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [tref.toISOString(), f1.id]); + await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [tref.toISOString(), del.id]); + await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [tnext.toISOString(), f2.id]); + + let files = await collapseAsync(vfs.listFiles(scene_id)); + expect(files).to.have.property("length", 1); + expect(files).to.deep.equal([{ + size: 12, + hash: 'qUiQTy8PR5uPgZdpSzAYSw0u0cHNKh7A-4XSmaGSpEc', + generation: 3, id: f2.id, - name: 'foo.jpg', - mime: "image/jpeg", - ctime: tref, - mtime: tref, - author_id: null, - author: "default", - },{ - size: 4, - hash: 'tbudgBSg-bHWHiHnlteNzN8TUvI80ygS9IULh4rklEw', - generation: 1, - id: f1.id, name: 'models/foo.glb', mime: "model/gltf-binary", ctime: tref, - mtime: tref, + mtime: tnext, author_id: null, author: "default", - }, - { - size: 2, - hash: "RBNvo1WzZ4oRRq0W9-hknpT7T8If536DEMBg9hyq_4o", - generation: 1, - id: d1.id, - mime: "application/si-dpo-3d.document+json", - name: "scene.svx.json", - ctime: tref, - mtime: tref, - author_id: null, - author: "default", - } - ]); - }); + }]); + }); - it("Groups files versions", async function(){ - let tnext = new Date(tref.getTime()+8000); - let originalFiles = (await all("SELECT * FROM files")).length - let f1 = await vfs.writeFile(dataStream(["foo", "\n"]), {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}); - let del = await vfs.createFile({user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}, {hash: null, size: 0}); - let f2 = await vfs.writeFile(dataStream(["hello world", "\n"]), {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"}); - await expect(all("SELECT * FROM files")).to.eventually.have.property("length", 3+originalFiles); - await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [tref.toISOString(), f1.id]); - await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [tref.toISOString(), del.id]); - await run(`UPDATE files SET ctime = $1 WHERE file_id = $2`, [tnext.toISOString(), f2.id]); + it("returns only files that are not removed", async function(){ + let props :WriteFileParams = {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"} + let f1 = await vfs.writeFile(dataStream(), props); + await vfs.removeFile(props); + let files = await collapseAsync(vfs.listFiles(scene_id)); + expect(files).to.have.property("length", 0); + }); - let files = await collapseAsync(vfs.listFiles(scene_id)); - expect(files).to.have.property("length", 1); - expect(files).to.deep.equal([{ - size: 12, - hash: 'qUiQTy8PR5uPgZdpSzAYSw0u0cHNKh7A-4XSmaGSpEc', - generation: 3, - id: f2.id, - name: 'models/foo.glb', - mime: "model/gltf-binary", - ctime: tref, - mtime: tnext, - author_id: null, - author: "default", - }]); - }); + it("can get a list of archived files", async function(){ + await vfs.writeFile(dataStream(["foo", "\n"]), {user_id: null, scene: scene_id, mime: "text/html", name:"articles/hello.txt"}); + let del = await vfs.createFile({user_id: null, scene: scene_id, name:"articles/hello.txt"}, {hash: null, size: 0}); - it("returns only files that are not removed", async function(){ - let props :WriteFileParams = {user_id: null, scene:"foo", mime: "model/gltf-binary", name:"models/foo.glb"} - let f1 = await vfs.writeFile(dataStream(), props); - await vfs.removeFile(props); - let files = await collapseAsync(vfs.listFiles(scene_id)); - expect(files).to.have.property("length", 0); + let files = await collapseAsync(vfs.listFiles(scene_id, {withArchives: true})); + expect(files).to.have.property("length", 1); + expect(files[0]).to.have.property("hash", null); + expect(files[0]).to.have.property("id", del.id); + }); + + it("can get file data", async function(){ + await vfs.writeDoc(`{"foo":"bar"}`, {user_id: null, scene: scene_id, mime: "text/html", name: "foo.txt"}); + let files = await collapseAsync(vfs.listFiles(scene_id, {withData: true})); + expect(files).to.have.property("length", 1); + expect(files[0]).to.have.property("data", `{"foo":"bar"}`); + }); }); + + describe("getDoc()", function(){ + it("throw if not found", async function(){ + await expect(vfs.getDoc(scene_id)).to.be.rejectedWith("[404]"); + }); - it("can get a list of archived files", async function(){ - await vfs.writeFile(dataStream(["foo", "\n"]), {user_id: null, scene: scene_id, mime: "text/html", name:"articles/hello.txt"}); - let del = await vfs.createFile({user_id: null, scene: scene_id, name:"articles/hello.txt"}, {hash: null, size: 0}); + it("get document data as a string", async function(){ + await vfs.writeDoc(Buffer.from("{}"), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + let doc = await vfs.getDoc(scene_id); + expect(doc).to.have.property("data").a("string"); + }); - let files = await collapseAsync(vfs.listFiles(scene_id, {withArchives: true})); - expect(files).to.have.property("length", 1); - expect(files[0]).to.have.property("hash", null); - expect(files[0]).to.have.property("id", del.id); - }); + it("throws if file is not a document", async function(){ + await vfs.writeFile(dataStream(["{}"]), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); + await expect(vfs.getDoc(scene_id)).to.be.rejectedWith(BadRequestError); + }) - it("can get file data", async function(){ - await vfs.writeDoc(`{"foo":"bar"}`, {user_id: null, scene: scene_id, mime: "text/html", name: "foo.txt"}); - let files = await collapseAsync(vfs.listFiles(scene_id, {withData: true})); - expect(files).to.have.property("length", 1); - expect(files[0]).to.have.property("data", `{"foo":"bar"}`); - }); - }); - - describe("getDoc()", function(){ - it("throw if not found", async function(){ - await expect(vfs.getDoc(scene_id)).to.be.rejectedWith("[404]"); + it("fetch currently active document", async function(){ + let id = (await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).id; + let doc = await expect(vfs.getDoc(scene_id)).to.be.fulfilled; + expect(doc).to.have.property("id", id); + expect(doc).to.have.property("ctime").instanceof(Date); + expect(doc).to.have.property("mtime").instanceof(Date); + expect(doc).to.have.property("author_id", null); + expect(doc).to.have.property("author", "default"); + expect(doc).to.have.property("data", "{}"); + expect(doc).to.have.property("generation", 1); + }); }); - it("get document data as a string", async function(){ - await vfs.writeDoc(Buffer.from("{}"), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - let doc = await vfs.getDoc(scene_id); - expect(doc).to.have.property("data").a("string"); + describe("cleanLooseObjects()", function(){ + it("remove old dangling blobs", async function(){ + let file = await vfs.writeFile(dataStream(["Hello World\n"]), {scene: scene_id, name: "foo.txt", mime: "text/plain", user_id: null}); + //lie about the file's mtime: it is old enough + await fs.utimes(vfs.filepath(file), new Date(Date.now() - 3600*1000*2), new Date(Date.now()- 3600*1000*3)); + await vfs.removeScene(scene_id); + //Blob should still be here + await expect(fs.access(vfs.filepath(file as any), constants.R_OK)).to.be.fulfilled; + let report = await vfs.cleanLooseObjects(); + expect(report).to.equal(`Cleaned 1 loose object`); + await expect(fs.access(vfs.filepath(file as any), constants.R_OK)).to.be.rejectedWith("ENOENT"); + }); }); - it("throws if file is not a document", async function(){ - await vfs.writeFile(dataStream(["{}"]), {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"}); - await expect(vfs.getDoc(scene_id)).to.be.rejectedWith(BadRequestError); + describe("checkForMissingObjects()", function(){ + it("reports missing blobs", async function(){ + let file = await vfs.writeFile(dataStream(["Hello World\n"]), {scene: scene_id, name: "foo.txt", mime: "text/plain", user_id: null}); + //force: false, so it throws if file is not here + await expect(fs.rm(vfs.getPath(file as any), {force: false})).to.be.fulfilled; + let report = await vfs.checkForMissingObjects(); + expect(report).to.equal("File 0qhPS4tlCTfsj3PNi-LHSt1akRumTfJ0WO2CKdqASiY can't be read on disk (can't fix). Some data have been lost!") + }); }) - - it("fetch currently active document", async function(){ - let id = (await vfs.writeDoc("{}", {scene: scene_id, user_id: null, name: "scene.svx.json", mime: "application/si-dpo-3d.document+json"})).id; - let doc = await expect(vfs.getDoc(scene_id)).to.be.fulfilled; - expect(doc).to.have.property("id", id); - expect(doc).to.have.property("ctime").instanceof(Date); - expect(doc).to.have.property("mtime").instanceof(Date); - expect(doc).to.have.property("author_id", null); - expect(doc).to.have.property("author", "default"); - expect(doc).to.have.property("data", "{}"); - expect(doc).to.have.property("generation", 1); - }); }); - - describe("cleanLooseObjects()", function(){ - it("remove old dangling blobs", async function(){ - let file = await vfs.writeFile(dataStream(["Hello World\n"]), {scene: scene_id, name: "foo.txt", mime: "text/plain", user_id: null}); - //lie about the file's mtime: it is old enough - await fs.utimes(vfs.filepath(file), new Date(Date.now() - 3600*1000*2), new Date(Date.now()- 3600*1000*3)); - await vfs.removeScene(scene_id); - //Blob should still be here - await expect(fs.access(vfs.filepath(file as any), constants.R_OK)).to.be.fulfilled; - let report = await vfs.cleanLooseObjects(); - expect(report).to.equal(`Cleaned 1 loose object`); - await expect(fs.access(vfs.filepath(file as any), constants.R_OK)).to.be.rejectedWith("ENOENT"); - }); - }); - - describe("checkForMissingObjects()", function(){ - it("reports missing blobs", async function(){ - let file = await vfs.writeFile(dataStream(["Hello World\n"]), {scene: scene_id, name: "foo.txt", mime: "text/plain", user_id: null}); - //force: false, so it throws if file is not here - await expect(fs.rm(vfs.getPath(file as any), {force: false})).to.be.fulfilled; - let report = await vfs.checkForMissingObjects(); - expect(report).to.equal("File 0qhPS4tlCTfsj3PNi-LHSt1akRumTfJ0WO2CKdqASiY can't be read on disk (can't fix). Some data have been lost!") - }); - }) }); }); -}); +}) diff --git a/source/ui/MainView.ts b/source/ui/MainView.ts index 162af8116..c1c438606 100644 --- a/source/ui/MainView.ts +++ b/source/ui/MainView.ts @@ -5,7 +5,8 @@ import "./styles/globals.scss"; import "./screens/SceneHistory"; +import "./handlers/toggle"; + import "./composants/SubmitFragment"; -import "./composants/UploadForm"; -import "./handlers/toggle"; \ No newline at end of file +import "./composants/UploadManager"; diff --git a/source/ui/composants/Size.ts b/source/ui/composants/Size.ts index 2ab8a55b3..5effcac59 100644 --- a/source/ui/composants/Size.ts +++ b/source/ui/composants/Size.ts @@ -1,23 +1,23 @@ import { LitElement, html } from 'lit'; import { customElement, property } from 'lit/decorators.js'; +type BinaryUnit = 'B'|'kB'|'MB'|'GB'|'TB'|'PB'|'EB'|'ZB'|'YB'; +export function formatBytes(bytes: number, unit?: BinaryUnit){ - - -export function formatBytes(bytes, si=true){ - const thresh = si ? 1000 : 1024; - if(Math.abs(bytes) < thresh) { - return bytes + ' B'; + if(unit === "B" || Math.abs(bytes) < 1000) { + return bytes + (unit?'':' B'); } - let units = si - ? ['kB','MB','GB','TB','PB','EB','ZB','YB'] - : ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB']; + let units = ['kB','MB','GB','TB','PB','EB','ZB','YB']; let u = -1; do { - bytes /= thresh; + bytes /= 1000; ++u; - } while(Math.abs(bytes) >= thresh && u < units.length - 1); - return Math.round(bytes*100)/100 + ' '+units[u]; + } while(unit? units[u] !== unit : Math.abs(bytes) >= 1000 && u < units.length - 1); + return Math.round(bytes*100)/100 + (unit? '' : ' '+units[u]); +} + +export function binaryUnit(bytes: number) :BinaryUnit{ + return formatBytes(bytes).split(" ").pop() as any; } @@ -26,10 +26,8 @@ export default class Size extends LitElement{ @property({type: Number}) b :number; - @property({type: Boolean}) - i :boolean = false; render(){ - return html`${formatBytes(this.b, !this.i)}`; + return html`${formatBytes(this.b)}`; } } \ No newline at end of file diff --git a/source/ui/composants/UploadManager.ts b/source/ui/composants/UploadManager.ts new file mode 100644 index 000000000..c49cbcc2c --- /dev/null +++ b/source/ui/composants/UploadManager.ts @@ -0,0 +1,505 @@ +import { css, html, LitElement, PropertyValues, TemplateResult } from "lit"; +import { customElement, state } from "lit/decorators.js"; + +import Notification from "./Notification"; +import { formatBytes, binaryUnit } from "./Size"; +import HttpError from "../state/HttpError"; +import { SceneUploadResult, Uploader, UploadOperation } from "../state/uploader"; + + + + +@customElement("upload-manager") +export default class UploadManager extends LitElement{ + //static shadowRootOptions = {...LitElement.shadowRootOptions, delegatesFocus: true}; + private uploader = new Uploader(this); + @state() + busy: boolean = false; + + @state() + error:string|null = null; + + connectedCallback(): void { + super.connectedCallback(); + // We register a number of global events that might influence app behaviour + window.addEventListener("drop", this.handleGlobalDrop); + window.addEventListener("dragover", this.handleGlobalDragover); + } + + disconnectedCallback(): void { + super.disconnectedCallback(); + window.removeEventListener("drop", this.handleGlobalDrop); + window.removeEventListener("dragover", this.handleGlobalDragover); + } + + /** + * Prevent default action on drop at the window level when files are dropped in the page + */ + private handleGlobalDrop = (e: DragEvent)=>{ + if(e.defaultPrevented) return; + if([...e.dataTransfer.items].every((item) => item.kind !== "file")) return; + e.preventDefault(); + this.classList.remove("drag-active"); + } + + /** + * Prevent default action on drop at the window level when files are dragged through the page + */ + private handleGlobalDragover = (e: DragEvent)=>{ + if([...e.dataTransfer.items].every((item) => item.kind !== "file")) return; + e.preventDefault(); + if (!this.shadowRoot.contains(e.target as Node)) { + e.dataTransfer.dropEffect = "none"; + } + } + + + + createScene = (ev:MouseEvent)=>{ + ev.preventDefault(); + ev.stopPropagation(); + const form = this.uploadForm; + const data= new FormData(form); + if(!form.checkValidity()){ + Notification.show("Upload form is invalid", "warning", 1500); + return; + } + const tasks = this.uploader.uploads.map(u=>u.task_id) + console.log("Submit, form :", data, tasks); + this.busy = true; + this.error = null; + fetch("/tasks", { + method: "POST", + headers: { + "Content-Type": "application/json; charset=utf-8" + }, + body: JSON.stringify({ + type: "createSceneFromFiles", + data: { + tasks, + name: data.get("name"), + language: data.get("language")?.toString().toUpperCase(), + options: { + optimize: data.get("optimize") ?? false + } + } + }) + }).then(async (res)=>{ + await HttpError.okOrThrow(res); + let task = await res.json(); + if(!task.task_id) throw new Error(`Unexpected body shape: ${JSON.stringify(task)}`); + const u = new URL(window.location.href); + u.searchParams.append("task", task.task_id); + //Reload the page + window.location.href = u.toString(); + }).catch((e)=>{ + console.error(e); + this.error = e.message; + }).finally(()=> this.busy = false); + return false; + } + + + extractArchives = (ev:MouseEvent)=>{ + ev.preventDefault(); + ev.stopPropagation(); + this.busy = true; + this.error = null; + const tasks = this.uploader.uploads.map(u=>u.task_id); + fetch("/tasks", { + method: "POST", + headers: { + "Content-Type": "application/json; charset=utf-8" + }, + body: JSON.stringify({ + type: "extractScenesArchives", + data: { + tasks, + } + }) + }).then(async (res)=>{ + await HttpError.okOrThrow(res); + let task = await res.json(); + if(!task.task_id) throw new Error(`Unexpected body shape: ${JSON.stringify(task)}`); + const u = new URL(window.location.href); + //Remove any existing "task" parameters to avoid confusion about what was imported + u.searchParams.set("task", task.task_id); + //Reload the page + window.location.href = u.toString(); + }).catch((e)=>{ + console.error(e); + this.error = e.message; + }).finally(()=> this.busy = false); + } + + + + /** + * Handles files being dropped onto the upload zone + * @fixme files is empty? + */ + public ondrop = (ev: DragEvent)=>{ + ev.preventDefault(); + this.classList.remove("drag-active"); + console.log("Drop :", ev.dataTransfer.files); + // @todo add the files + this.uploader.handleFiles(ev.dataTransfer.files); + } + + public ondragover = (ev: DragEvent)=>{ + ev.preventDefault(); + ev.dataTransfer.dropEffect = "copy"; + this.classList.add("drag-active"); + } + + public ondragleave = (ev: DragEvent)=>{ + this.classList.remove("drag-active"); + } + + /** + * Handles "change" events in the file input + * @param ev + */ + protected handleChange = (ev: Event)=>{ + ev.preventDefault(); + this.uploader.handleFiles((ev.target as HTMLInputElement).files); + } + + /** + * Proxy scene default title to prevent having to query slotted input repeatedly + */ + private _defaultTitle: string = ""; + /** + * slotted element selector to edit proposed scene title + */ + get nameInput() :HTMLInputElement|undefined{ + const slot = this.shadowRoot.querySelector('slot[name="upload-form"]'); + return slot?.assignedElements().map(e=> e.querySelector(`input[name="name"]`)).filter(n=>!!n)[0]; + } + + get uploadForm() :HTMLFormElement|undefined{ + const slot = this.shadowRoot.querySelector('slot[name="upload-form"]'); + return slot?.assignedElements().map(e=> e instanceof HTMLFormElement? e: e.querySelector(`FORM`)).filter(n=>!!n)[0]; + } + + + protected update(changedProperties: PropertyValues): void { + const models = this.uploader.uploads.filter(u=>u.isModel); + const defaultTitle = models[0]?.filename.split(".").slice(0, -1).join(".") ?? ""; + + if(this._defaultTitle != defaultTitle && !this.uploader.has_pending_uploads){ + console.log("Assign default title %s to ", defaultTitle, ["", defaultTitle].indexOf(this.nameInput.value) != -1, this.nameInput) + if(this.nameInput){ + if(["", this._defaultTitle].indexOf(this.nameInput.value) != -1) this.nameInput.value = defaultTitle; + this._defaultTitle = defaultTitle + } + } + super.update(changedProperties); + } + + /** + * Renders an individual upload operation + */ + private renderUploadItem = (u:UploadOperation)=>{ + const onActionClick = (ev: MouseEvent)=>{ + ev.preventDefault(); + ev.stopPropagation(); + if(u.error || u.done){ + u.task_id && fetch(`/tasks/${u.task_id}`, {method: "DELETE"}) + .then(res=> HttpError.okOrThrow(res)) + .catch(err=>{ + Notification.show(`Failed to delete upload task for ${u.filename}. Data may remain on the server`, "warning", 10000); + console.warn(err) + }); + this.uploader.remove(u.id); + }else{ + console.log("Abort upload: ", u.id); + u.abort(); + } + } + let state = "pending"; + let stateText: TemplateResult|string = html``; + let filetype: TemplateResult|null = null; + let unit = binaryUnit(u.total); + let progress = `${formatBytes(u.progress, unit)}/${formatBytes(u.total)}`; + if(u.error){ + state = "error"; + stateText = "⚠"; + progress= `${u.error.message}`; + }else if(u.done){ + state = "done"; + stateText = "✓"; + progress = formatBytes(u.total); + } + if(u.isModel){ + filetype = html` + ` + }else if(u.scenes?.length){ + filetype = html` + + + + + + + ` + } + + return html` +
  • + ${stateText} + + ${filetype} + ${u.filename} + + ${progress} + 🗙 +
  • + `; + } + + + /** + * When it looks like the user is uploading model(s) for a scene creation, show this form. + * The submit button is shown only when all uploads have settled. + */ + private renderSceneCreationForm(){ + const can_submit = this.uploader.uploads.findIndex(u=>u.isModel) !== -1; + return html` +
    ${this.error}
    + ${(()=>{ + if(this.uploader.has_pending_uploads|| this.busy) return html`` + else if(this.uploader.has_errors) return html`Some uploads have failed` + else if(this.uploader.size && can_submit) return html`` + else if(this.uploader.size) return html`Provide at least one model` + else return null; + })()} +
    + `; + } + + /** + * Renders the details of zipfiles contents + */ + private renderScenesContentSummary(){ + const can_submit = this.uploader.uploads.findIndex(u=>u.scenes?.length) !== -1; + return html` + Scenes: +
      + ${this.uploader.uploads.map(u=>(u.scenes?.map(s=>html`
    • + [${s.action.toUpperCase()}] + ${s.name} +
    • `)) ?? null)} +
    +
    ${this.error}
    + ${(()=>{ + if(this.uploader.has_pending_uploads|| this.busy) return html`` + else if(this.uploader.has_errors) return html`Some uploads have failed` + else if(this.uploader.size && can_submit) return html`` + else return null; + })()} +
    + `; + } + + /** + * Prints a warning when mixed content prevents any action. + */ + private renderMixedContentWarning(){ + return html`Mixed content: can't proceed. Remove some of the uploaded files.`; + } + + protected render(): unknown { + const uploads = this.uploader.uploads; + const is_active = uploads.some(u=>!u.done && !u.error ); + const scene_archives = uploads.filter(u=>u.mime === "application/zip"); + let form_content :TemplateResult|null = null; + if(scene_archives.length && scene_archives.length == uploads.length){ + form_content = this.renderScenesContentSummary(); + }else if(scene_archives.length){ + form_content = this.renderMixedContentWarning(); + }else if(uploads.length){ + form_content = this.renderSceneCreationForm(); + }else{ + form_content = html`Start uploading files in the box above.` + } + return html` + Create or Update a scene +
    +
      + ${uploads.map(this.renderUploadItem)} +
    + +
    + ${form_content} + `; + } + + static styles = [css` + .dropzone{ + display: block; + max-width: 100%; + border: 1px solid #99999988; + border-radius: 2px; + transition: background-color .1s ease; + &:hover{ + background-color: #99999910; + } + + label { + display: block; + cursor: pointer; + padding: 1rem; + } + input[type=file] { + display: none; + } + } + + + :host(.drag-active) .dropzone{ + border: 1px dotted #99999988; + background-color: #99999905; + } + + + @keyframes l1 { + 0% {background-size: 20% 100%,20% 100%,20% 100%} + 33% {background-size: 20% 10% ,20% 100%,20% 100%} + 50% {background-size: 20% 100%,20% 10% ,20% 100%} + 66% {background-size: 20% 100%,20% 100%,20% 10% } + 100%{background-size: 20% 100%,20% 100%,20% 100%} + } + + .loader{ + display: block; + width: 24px; + height: 24px; + aspect-ratio: 1; + --c: no-repeat linear-gradient(var(--color-loader, var(--color-info)) 0 0); + background: + var(--c) 0% 50%, + var(--c) 50% 50%, + var(--c) 100% 50%; + background-size: 20% 100%; + animation: l1 1s infinite linear; + } + + .upload-list{ + max-width: 100%; + margin: .25rem; + padding: 0 .5rem; + display: flex; + flex-direction: column; + gap: 3px; + + .upload-line{ + display: flex; + justify-content: stretch; + gap: .5rem; + + &:not(:last-child){ + border-bottom: 2px solid #00000010; + } + + .upload-filetype{ + font-family: monospace; + font-weight: bold; + align-self: center; + &.filetype-source{ + color: var(--color-warning); + } + &.filetype-glb{ + color: var(--color-success); + } + } + + .upload-filename{ + flex-grow: 1; + } + + .upload-state{ + width: 24px; + } + + + .upload-action{ + cursor: pointer; + &.action-cancel{ + color: var(--color-error); + &:hover{ + filter: saturate(0.6); + } + } + } + + &.upload-done{ + .upload-state{ + color: var(--color-success); + } + } + + &.upload-error{ + color: var(--color-error); + } + } + } + .drop-label{ + opacity: 0; + transition: opacity 0.2s ease; + display: flex; + justify-content: center; + } + + .upload-list:empty ~ .drop-label, + .dropzone.empty .drop-label { + opacity: 1; + } + + .dropzone:hover, + :host(.drag-active) { + .drop-label{ + opacity: 0.7; + } + } + + .scenes-list-actions{ + list-style: none; + .scene-action{ + display: inline-block; + min-width: 4.6rem; + } + .scene-action-create{ + color: var(--color-success); + } + .scene-action-update{ + color: var(--color-info); + } + .scene-action-error{ + color: var(--color-error); + } + } + `]; +} \ No newline at end of file diff --git a/source/ui/state/uploader.ts b/source/ui/state/uploader.ts new file mode 100644 index 000000000..f7c829bc1 --- /dev/null +++ b/source/ui/state/uploader.ts @@ -0,0 +1,296 @@ +import { ReactiveController, ReactiveControllerHost } from "lit"; +import HttpError from "./HttpError"; +import Notification from "../composants/Notification"; + +export type SceneUploadResult = {name: string, action: "create"|"update"|"error"} + + +// @FIXME use more like 100MB in production +const CHUNK_SIZE = 10000000; + +export interface UploadOperation{ + //Unique ID of the upload. Might be different from "name" when we upload a scene zip + id: string; + filename: string; + mime?: string; + isModel?: boolean; + /** The file to upload. Should be required? */ + file?: File; + /** When the file is an archive we will store the list of files it contains here */ + files?:string[]; + scenes?: SceneUploadResult[]; + //An array to be able to show a list of imported scenes in case of zip uploads + error ?:{code?:number, name?: string, message:string}; + done :boolean; + active ?:boolean; + task_id?:number; + total ?:number; + progress :number; + signal: AbortSignal; + abort: ()=>void; +} + +export interface ParsedUploadTaskOutput{ + mime: string, + scenes?: SceneUploadResult[], + files?: string[], +} + + +export class Uploader implements ReactiveController{ + host: ReactiveControllerHost; + /** + * List of upload operations + * Shouldn't be mutated as it will trigger an update when reassigned + */ + uploads: readonly UploadOperation[]; + has_pending_uploads: boolean = false; + has_errors: boolean = true; + + + get size(){ + return this.uploads.length; + } + + hostConnected() { + //Initialize + this.uploads = []; + window.addEventListener("online", this.handleGlobalOnline); + } + + hostDisconnected() { + // Clear uploads when host disconnects + for(let op of this.uploads){ + op.abort(); + } + this.uploads = []; + window.removeEventListener("online", this.handleGlobalOnline); + } + + constructor(host: ReactiveControllerHost){ + (this.host = host).addController(this); + //Make a closure around uploads list to prevent mutation + let _uploads : readonly UploadOperation[] = []; + const self = this; + Object.defineProperties(this,{ + "uploads": { + get() { + return _uploads; + }, + set(value){ + if(_uploads === value) return + _uploads = value; + this.has_pending_uploads = this.uploads.some(u=>!u.done && !u.error ); + this.has_errors = this.uploads.some(u=>!!u.error && u.error.name !== "AbortError" ); + this.processUploads(); + self.host?.requestUpdate(); + } + } + }) + } + + /** + * Amend a running download operation + * @param name scene name that uniquely identifies the operation + * @param changes partial object to merge into operation + */ + protected splice(id: string, changes?: Partial) { + this.uploads = this.uploads.map(current => { + if (current.id !== id) return current; + else if (changes && current) return { ...current, ...changes }; + else return undefined; + }).filter(u=>!!u); + } + + public remove(id: string){ + let size = this.uploads.length; + this.uploads = this.uploads.map(current => { + if (current.id !== id) return current; + else return undefined; + }).filter(u=>!!u); + return size != this.uploads.length; + } + + public reset(){ + this.uploads = []; + } + + /** + * Listen for the global "online" event to resume downloads that had a NETWORK_ERR + */ + private handleGlobalOnline = ()=>{ + this.uploads = this.uploads.map(u=>{ + if(u.error && u.error.code == DOMException.NETWORK_ERR) return {...u, error: undefined}; + else return u; + }); + this.processUploads(); + } + + /** + * Handles a list of files that was submitted through dragdrop or the file input + * @param files + */ + public handleFiles(files: Iterable): void{ + const uploads = new Map(this.uploads.map(u=>([u.id, u]))); + for(let file of files){ + console.log("Handle file : ", file); + const prev = uploads.get(file.name); + if(prev){ + if(!prev.done && ! prev.error) prev.abort(); + // @fixme remove from server? + uploads.delete(file.name); + } + + uploads.set(file.name, this.createUploadOperation(file)); + } + this.uploads = Array.from(uploads.values()); + } + + protected createUploadOperation(file: File) :UploadOperation{ + const c = new AbortController(); + const task :UploadOperation = { + id: file.name, + filename: file.name, + file, + done: false, + active: false, + progress: 0, + total: file.size, + signal: c.signal, + abort: ()=> { + c.abort(); + this.splice(file.name, {error: {name: "AbortError", message: "Upload was aborted"}}); + } + }; + return task; + } + + + async initUpload(task: UploadOperation) :Promise{ + console.debug("Initializing upload task for %s", task.filename); + + const res = await fetch(`/tasks`, { + method: "POST", + signal: task.signal, + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + type: "parseUserUpload", + status: "initializing", + data:{ + filename: task.filename, + size: task.total, + } + }), + }); + await HttpError.okOrThrow(res); + const body = await res.json() + if(typeof body.task_id !== "number"){ + console.warn("Can't use body:", body); + throw new HttpError(500, "Server answered with an unreadable task identifier"); + } + return body.task_id; + } + + + async finalizeUpload(task: UploadOperation) :Promise{ + const res = await fetch(`/tasks/${task.task_id}`, { + method: "GET", + headers:{"Content-Type": "application/json"}, + }); + await HttpError.okOrThrow(res); + const body = await res.json(); + if(!body?.task?.output || typeof body.task.output !== "object" || !body.task.output.mime){ + console.warn("Unexpected format for scene output:", body); + throw new Error("Invalid response body"); + } + + return body.task.output; + } + + + /** + * Process queued uploads up to a max of 5 concurrent requests + */ + private processUploads(){ + const inFlight = this.uploads.filter(u=>u.active && (!u.done && !u.error)).length; + if(2 <= inFlight) return; + const task = this.uploads.find(u=>!u.active && !u.done && !u.error); + if(!task) return; + + if(!task.file){ + Notification.show(`Can't upload : No file provided`, "error", 4000); + return; + } + + const starting_offset = task.progress; + const end_offset = Math.min(task.progress+CHUNK_SIZE, task.total) + const chunk = task.file.slice(task.progress, end_offset); + + const update :((changes :Partial)=>void) = this.splice.bind(this, task.id); + const setError = (err: Error|{code: number, message: string})=>{ + console.error("Upload request failed :", err); + update({active: false, progress: starting_offset, error: err}); + } + update({active: true}); + //Initialize upload + if(typeof task.task_id === "undefined"){ + this.initUpload(task).then( + (id)=> update({active: false, task_id: id}), + setError, + ); + return; + } + + let xhr = new XMLHttpRequest(); + task.signal.addEventListener("abort", xhr.abort.bind(xhr)) + xhr.onload = ()=>{ + if (299 < xhr.status) { + let message = xhr.statusText; + const contentType = xhr.getResponseHeader("Content-Type") || ""; + if (contentType.includes("application/json")) { + const fail_response = JSON.parse(xhr.responseText) as { message?: string }; + if (fail_response.message) message = fail_response.message; + } else if (xhr.responseText) { + message = xhr.responseText.slice(0, 300); + } + console.error("Upload Request failed :", message); + setError({code: xhr.status, message}); + }else if(xhr.status === 201){ + this.finalizeUpload(task).then((output)=>{ + console.debug("Finalized upload task. Parsed content :", output); + update({active: false, progress: task.total, done: true, ...output}); + }, setError); + }else{ + console.debug("Chunk uploaded. Set progress to :", end_offset); + update({active: false, progress: end_offset}); + } + } + + xhr.upload.onprogress = (evt)=>{ + if(evt.lengthComputable){ + console.debug("Progress event : %d (%d)", starting_offset + evt.loaded, evt.loaded); + update({progress: starting_offset + evt.loaded}); + } + } + xhr.ontimeout = function(ev){ + console.log("XHR Timeout", ev); + } + xhr.onerror = function onUploadError(ev){ + console.log("XHR Error", ev); + setError({ code: xhr.status ||DOMException.NETWORK_ERR, message: xhr.response.message || xhr.statusText || (navigator.onLine? "Server is unreachable": "Disconnected") }); + } + + xhr.onabort = function onUploadAbort(){ + setError({ code: 20, name: "AbortError", message: "Upload was aborted"}); + } + + let url = new URL(`/tasks/${task.task_id}/artifact`, window.location.href); + + + xhr.open('PUT', url); + xhr.setRequestHeader("Content-Range", `bytes ${starting_offset}-${end_offset-1}/${task.total}`); + xhr.send(chunk); + } +} \ No newline at end of file diff --git a/source/ui/styles/forms.scss b/source/ui/styles/forms.scss index 253cd5e99..d9c232ca3 100644 --- a/source/ui/styles/forms.scss +++ b/source/ui/styles/forms.scss @@ -19,6 +19,9 @@ display:flex; justify-content: stretch; flex-direction: row; + .form-item > select{ + width: auto; + } } &.column{ display:flex; diff --git a/source/ui/styles/main.scss b/source/ui/styles/main.scss index 00f67b89e..591ed8e17 100644 --- a/source/ui/styles/main.scss +++ b/source/ui/styles/main.scss @@ -3,6 +3,7 @@ @import "./landing.scss"; @import "./card.scss"; @import "./tags.scss"; +@import "./tasks.scss"; html { color: var(--color-text); diff --git a/source/ui/styles/tables.scss b/source/ui/styles/tables.scss index 81ef03bfa..b34c9f8ce 100644 --- a/source/ui/styles/tables.scss +++ b/source/ui/styles/tables.scss @@ -5,9 +5,22 @@ table.list-table { width: 100%; display: table; + @at-root section > &{ + --m: calc(var(--section-padding) * -1); + margin: 0 var(--m); + width: calc(-2 * var(--m) + 100%); + &:first-child{ + margin-top: var(--m); + } + &:last-child{ + margin-bottom: var(--m); + } + } + background: var(--color-element); color: var(--color-text); + tbody tr:nth-child(2n+1){ background: var(--color-section); } @@ -43,6 +56,12 @@ table.list-table { border-left: 0; border-right: 0; white-space: nowrap; + &.compact{ + width: 1%; + } + &.mono{ + font-family: var(--font-mono, monospace); + } } tbody tr:hover{ background: var(--color-dark); diff --git a/source/ui/styles/tasks.scss b/source/ui/styles/tasks.scss new file mode 100644 index 000000000..203cb4f9f --- /dev/null +++ b/source/ui/styles/tasks.scss @@ -0,0 +1,105 @@ +/* Shared styles for task views (compiled/copied to /dist/css/tasks.css by build) + Rules are chosen to prefer the definitions originally in `task.hbs` when conflicts arise. +*/ + +/* Task node/tree */ +.task-tree { + font-family: var(--font-mono, monospace); + container-type: inline-size; + > .task-node { + margin-left: 0; + border-left: none; + } +} +.task-node { + margin: 0; + padding: .25rem .5rem; + border-left: 2px solid var(--color-element, #444); + margin-left: .5rem; +} +.task-header { + display: flex; + flex-wrap: wrap; + align-items: baseline; + gap: .5rem; + padding: .25rem 0; +} +.task-id { color: var(--color-secondary, #aaa); } +.task-type { font-weight: bold; } +.task-children { margin-top: .25rem; } + +/* Status badges (preferred definitions from task.hbs) + Keep these synchronized with other UI components that use the same classes. */ +.task-status { + padding: .1em .45em; + border-radius: .25em; + font-weight: bold; + text-transform: uppercase; + font-size: .8em; + &.task-status-large{ + padding: .5rem ; + width: 75px; + text-align: center; + } +} + +.status-pending, .status-initializing { background: var(--color-element, #444); color: var(--color-light, #eee); } +.status-running { background: #2a6baa; color: #fff; } +.status-success { background: #2a7a4b; color: #fff; } +.status-error { background: #a03030; color: #fff; } +.status-aborting { background: #7a5a20; color: #fff; } + +/* Tasks list table */ +.tasks-table { + width: 100%; + border-collapse: collapse; + font-size: .9em; +} + +.tasks-table th { + text-align: left; + padding: .4rem .6rem; + border-bottom: 2px solid var(--color-element, #444); + white-space: nowrap; +} + +.tasks-table td { + padding: .3rem .6rem; + vertical-align: middle; + border-bottom: 1px solid color-mix(in srgb, var(--color-element, #444) 40%, transparent); +} + +/* Logs table */ +.logs-table { + font-family: var(--font-mono, monospace); +} + +.logs-table th { + text-align: left; + padding: .3rem .5rem; + border-bottom: 1px solid var(--color-element, #444); + color: var(--color-primary, #aaa); + font-weight: bold; + font-size: 110%; +} +.logs-table td { + padding: .2rem .5rem; + vertical-align: top; + border-bottom: 1px solid color-mix(in srgb, var(--color-element, #444) 40%, transparent); +} + +.log-task-id { color: #aaa; white-space: nowrap; width: 1%; } +.log-time { color: #aaa; white-space: nowrap; width: 1%; } +.log-level { white-space: nowrap; width: 1%; } +.log-debug { color: var(--color-info, #aaa); } +.log-log { color: var(--color-light, #eee); } +.log-warn { color: #d4a030; } +.log-error { color: #e05050; } +.log-message { white-space: pre-wrap; word-break: break-all; width: 100%; } + +/* Small responsive helpers */ +@media (max-width: 640px) { + .task-header { gap: .25rem; } + .tasks-table th, .tasks-table td { padding: .25rem .4rem; } + .logs-table th, .logs-table td { padding: .15rem .4rem; } +} diff --git a/source/ui/styles/titles.scss b/source/ui/styles/titles.scss index 57f871643..6a1ad4687 100644 --- a/source/ui/styles/titles.scss +++ b/source/ui/styles/titles.scss @@ -5,6 +5,7 @@ h1, h2, h3{ } h1{ + position: relative; display: inline-block; color: var(--color-text); padding: .5rem 0.2rem 0rem 1.5rem; @@ -13,6 +14,15 @@ h1{ @include text-colors { border-bottom-color: currentColor; } + > .title-caret{ + position: absolute; + left: 0; + height: 100%; + text-decoration: none; + &:hover{ + color: var(--color-primary); + } + } } h2{