diff --git a/.env.development.sample b/.env.development.sample index 32f9d9b..5f42033 100644 --- a/.env.development.sample +++ b/.env.development.sample @@ -4,9 +4,12 @@ REACT_APP_RPC={} REACT_APP_PROVIDER_SOCKET=ws://127.0.0.1:8000 # These keys are read-only, no harm in exposing them -REACT_APP_PINATA_API_KEY="86c94c7482c7ea399726" -REACT_APP_PINATA_API_SECRET="13d5d71e42a838f13f8cafa2f8ebc21b24d4060bbf50a3330ae3d85a07c9c6a7" -REACT_APP_PINATA_GATEWAY="https://azure-above-clam-231.mypinata.cloud" +REACT_APP_APILLON_API_KEY=bf5d1512-8d0f-488b-ade0-96f4e2a6f42e +REACT_APP_APILLON_API_SECRET=tpnApjlsU74$ +REACT_APP_APILLON_BUCKET_UUID=7a4f9488-bd7b-4372-9fe8-a38fe03014f3 + +# Cloudflare Worker Upload Endpoint +REACT_APP_CLOUDFLARE_WORKER_URL=https://poi-upload-worker-prod.accounts-40a.workers.dev # Provider Socket Options # REACT_APP_PROVIDER_SOCKET=wss://rpc.ibp.network/kusama diff --git a/.env.production b/.env.production index f5d2da6..14a45be 100644 --- a/.env.production +++ b/.env.production @@ -3,7 +3,11 @@ REACT_APP_KEYRING_PREFIX=2 REACT_APP_RPC={} REACT_APP_PROVIDER_SOCKET=wss://sys.ibp.network/asset-hub-kusama -# These keys are read-only, no harm in exposing them -REACT_APP_PINATA_API_KEY="86c94c7482c7ea399726" -REACT_APP_PINATA_API_SECRET="13d5d71e42a838f13f8cafa2f8ebc21b24d4060bbf50a3330ae3d85a07c9c6a7" -REACT_APP_PINATA_GATEWAY="https://azure-above-clam-231.mypinata.cloud" +# Apillon Storage Configuration (Read-Only) +# Frontend uses read-only API credentials to list files +REACT_APP_APILLON_API_KEY=bf5d1512-8d0f-488b-ade0-96f4e2a6f42e +REACT_APP_APILLON_API_SECRET=tpnApjlsU74$ +REACT_APP_APILLON_BUCKET_UUID=7a4f9488-bd7b-4372-9fe8-a38fe03014f3 + +# Cloudflare Worker Upload Endpoint +REACT_APP_CLOUDFLARE_WORKER_URL=https://poi-upload-worker-prod.accounts-40a.workers.dev diff --git a/.env.test b/.env.test index addbebb..6dbf5c1 100644 --- a/.env.test +++ b/.env.test @@ -3,7 +3,10 @@ REACT_APP_KEYRING_PREFIX=2 REACT_APP_RPC={} REACT_APP_PROVIDER_SOCKET=wss://sys.ibp.network/asset-hub-kusama -# These keys are read-only, no harm in exposing them -REACT_APP_PINATA_API_KEY="6410248abd4b4babc313" -REACT_APP_PINATA_API_SECRET="f781dcdc8f87713609e047e7d97ea6ee395b68ea5b92abfa35caa92ed7d89919" -REACT_APP_PINATA_GATEWAY="https://peach-accurate-gull-904.mypinata.cloud" +# Apillon Storage Configuration (Read-Only) +REACT_APP_APILLON_API_KEY=test_apillon_api_key +REACT_APP_APILLON_API_SECRET=test_apillon_api_secret +REACT_APP_APILLON_BUCKET_UUID=test_apillon_bucket_uuid + +# Cloudflare Worker Upload Endpoint +REACT_APP_CLOUDFLARE_WORKER_URL=https://poi-upload-worker-prod.accounts-40a.workers.dev diff --git a/.gitignore b/.gitignore index f0a174d..b50ceed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. # dependencies -/node_modules +**/node_modules /.pnp .pnp.js /.yarn diff --git a/README.md b/README.md index fa5e58e..90cb28c 100644 --- a/README.md +++ b/README.md @@ -4,14 +4,15 @@ This is a dedicated interface for Kusama Society. ## Dependencies -* [Node v18.15.0](#) -* [yarn](https://yarnpkg.com) +- [Node v18.15.0](#) +- [yarn](https://yarnpkg.com) ## Setup - Clone this repository. - Copy the development sample config file on the root folder: + ```bash cp .env.development.sample .env.development ``` @@ -27,6 +28,7 @@ yarn install ## Running with Docker - Run: + ```bash docker-compose up ``` @@ -38,16 +40,19 @@ docker-compose up ### Using Chopsticks: - Copy the development sample config file on the config folder: + ```bash cp config/kusama.yml.sample config/kusama.yml ``` - Use [Chopsticks](https://github.com/AcalaNetwork/chopsticks) and set `"PROVIDER_SOCKET": "ws://127.0.0.1:8000"` on your `.env.development` to run a local fork of Kusama with predetermined Society storage and a custom runtime (uncomment the `wasm-override` parameter on the config file if you want a custom runtime): + ``` yarn chopsticks ``` - Private keys for development accounts. Change hard derivation key to switch from `Alice` to `Bob`, `Charlie`, etc. Use this private key to import these development accounts to a wallet of your choice. + ``` bottom drive obey lake curtain smoke basket hold race lonely fit walk//Alice ``` @@ -55,69 +60,61 @@ bottom drive obey lake curtain smoke basket hold race lonely fit walk//Alice ### Building and running your own custom runtime Chopsticks allows for custom runtimes to be used. You can build a custom runtime using our fork of the runtimes repository, this version changes the rotation periods from days to seconds, in order to facilitate tests and development. + - Follow [this guide](https://docs.substrate.io/install/) to install Rust and the necessary dependencies to build Substrate - Clone the [forked repository](https://github.com/KappaSigmaMu/custom-kusama-runtime) and checkout to this branch: + ``` git checkout customized-society-pallet ``` + - Change the code (if you need, if not you can skip this step and use our customized version) - In the root folder of the forked repository, browse to Kusama's runtime directory + ``` cd relay/kusama ``` + - Inside the directory, run: + ``` cargo build --release ``` + - After finishing the build, browse back to the root directory and copy the wasm blob to this repository, renaming it to `custom-kusama-runtime.wasm`: + ``` cp target/release/wbuild/staging-kusama-runtime/staging_kusama_runtime.wasm ../kappasigmamu.github.io/custom-kusama-runtime.wasm ``` + - Uncomment the `wasm-override` parameter on `config/kusama.yml` and run Chopsticks: + ``` yarn chopsticks ``` - You can also access [Polkadotjs pointing to your development node](https://polkadot.js.org/apps/?rpc=ws%3A%2F%2F127.0.0.1%3A8000#/society) to interact with it. -### Managing Proof-of-Ink images +### Submitting Proof-of-Ink -We use IPFS to host the images and Pinata to pin the folder. The images are optimized and renamed to `.jpg` before getting uploaded. The scripts can be found inside `scripts/poi`. +Society members can submit their Kappa Sigma Mu tattoo images directly through the web interface at: -#### Requirements: -- Python libraries: ``` -pip3 install Pillow pillow-heif python-dotenv +https://kappasigmamu.github.io/explore/poi/submit ``` -#### Optimizing images -- Optimize an entire folder: -``` -python3 optimize_multiple.py -``` -- Rename and optimize single image: -``` -python3 rename_and_optimize.py -``` +#### Submission Process: -#### Interacting with IPFS/Pinata -- PS: requires a `.env` inside `scripts/poi` with `PINATA_API_KEY` and `PINATA_API_SECRET` -- Install IPFS and run it: -``` -ipfs daemon -``` -- Upload folder to Pinata and pin it: -``` -python3 upload.py -``` -- Download pinned folder: -``` -python3 download.py -``` -- Full job - takes a new image, renames and optimizes it, uploads the new folder to Pinata and pins it, and finally unpins the old folder. The optional param `force` let's you overwrite an image that already exists. -``` -python3 job.py [optional=force] -``` +1. Navigate to the submit page +2. Upload your tattoo image (JPG, PNG, HEIC supported) +3. Submit for approval + +#### Automated Workflow: + +- **Society Members**: Automatically approved and uploaded to Apillon storage +- **Non-Members**: Submitted for manual review before approval +- Images are stored permanently on Apillon's decentralized storage +- Approval is handled by a Cloudflare Worker (`workers/poi-upload-worker`) ## Application: @@ -133,7 +130,6 @@ yarn lint You can automatically fix some issues with `yarn lint:fix` - ## Tests ``` diff --git a/scripts/apillon-bucket/.env.example b/scripts/apillon-bucket/.env.example new file mode 100644 index 0000000..fc7fa19 --- /dev/null +++ b/scripts/apillon-bucket/.env.example @@ -0,0 +1,10 @@ +# Apillon Configuration +# Get these from https://app.apillon.io + +APILLON_API_KEY=your_api_key_here +APILLON_API_SECRET=your_api_secret_here +APILLON_BUCKET_UUID=your_bucket_uuid_here + +# Optional: Matrix/Element Notifications +MATRIX_ROOM_ID=!YourRoomId:matrix.org +MATRIX_ACCESS_TOKEN=your_access_token diff --git a/scripts/apillon-bucket/README.md b/scripts/apillon-bucket/README.md new file mode 100644 index 0000000..1d96a87 --- /dev/null +++ b/scripts/apillon-bucket/README.md @@ -0,0 +1,61 @@ +# Apillon Bucket Setup Scripts + +Scripts for setting up and migrating Proof-of-Ink tattoo images to Apillon IPFS storage. + +## Quick Start + +```bash +cd scripts/apillon-bucket +npm install +cp .env.example .env +# Edit .env with your credentials +npm run setup +npm run migrate +``` + +## Commands + +### `npm run setup` + +Initial bucket setup (run once): +- Creates folder structure (`pending/`, `approved/`, `rejected/`) +- Verifies configuration + +### `npm run migrate` + +Migrate images from Pinata to Apillon: +1. Downloads images from Pinata IPFS +2. Uploads to Apillon's `approved/` folder +3. Automatically skips duplicates + +**Prerequisites:** +- Run `npm run setup` first +- Configure Pinata credentials in `.env` +- Optional: Create `filelist.txt` with one filename per line (156 images total) + +## Environment Variables + +Create `scripts/apillon-bucket/.env`: + +```env +# Apillon credentials +APILLON_API_KEY=your_api_key +APILLON_API_SECRET=your_api_secret +APILLON_BUCKET_UUID=your_bucket_uuid + +# Pinata credentials +PINATA_API_KEY=your_pinata_api_key +PINATA_API_SECRET=your_pinata_secret_api_key +PINATA_GATEWAY=https://your-gateway.mypinata.cloud +``` + +Get Apillon credentials from: https://app.apillon.io + +## Bucket Structure + +``` +kappa-sigma-mu-poi/ +├── pending/ # Awaiting approval +├── approved/ # Live in gallery +└── rejected/ # Archive +``` diff --git a/scripts/apillon-bucket/migrate-pinata-to-apillon.js b/scripts/apillon-bucket/migrate-pinata-to-apillon.js new file mode 100644 index 0000000..d35585b --- /dev/null +++ b/scripts/apillon-bucket/migrate-pinata-to-apillon.js @@ -0,0 +1,553 @@ +#!/usr/bin/env node + +/** + * Migrate existing Pinata tattoos to Apillon + * + * Downloads all images from Pinata folder and uploads to Apillon approved/ + */ + +import { Storage } from '@apillon/sdk'; +import fs from 'fs'; +import path from 'path'; +import fetch from 'node-fetch'; +import { fileURLToPath } from 'url'; +import 'dotenv/config'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const colors = { + reset: '\x1b[0m', + green: '\x1b[32m', + red: '\x1b[31m', + yellow: '\x1b[33m', + blue: '\x1b[34m', + cyan: '\x1b[36m' +}; + +function log(message, color = 'reset') { + console.log(`${colors[color]}${message}${colors.reset}`); +} + +async function getPinataFolderContents(folderHash) { + log(' Fetching folder contents using IPFS API...', 'blue'); + + // Try Pinata's own gateway API first + const pinataGateway = process.env.PINATA_GATEWAY; + if (pinataGateway) { + try { + log(` Trying Pinata gateway API...`, 'blue'); + const apiUrl = `${pinataGateway}/api/v0/ls?arg=${folderHash}`; + const response = await fetch(apiUrl, { method: 'POST' }); + + if (response.ok) { + const data = await response.json(); + if (data.Objects && data.Objects[0] && data.Objects[0].Links) { + const files = data.Objects[0].Links + .filter(link => (link.Name || '').match(/\.(jpg|jpeg|png|heic|webp)$/i)) + .map(link => link.Name); + + log(` ✓ Found ${files.length} files via Pinata API`, 'green'); + return files; + } + } + } catch (error) { + log(` ✗ Pinata API failed: ${error.message}`, 'yellow'); + } + } + + // Try public IPFS HTTP API endpoints + const ipfsApiEndpoints = [ + 'https://ipfs.io', + 'https://dweb.link', + 'https://gateway.pinata.cloud' + ]; + + for (const endpoint of ipfsApiEndpoints) { + try { + log(` Trying ${endpoint}...`, 'blue'); + const apiUrl = `${endpoint}/api/v0/ls?arg=${folderHash}`; + const response = await fetch(apiUrl, { method: 'POST' }); + + if (response.ok) { + const data = await response.json(); + if (data.Objects && data.Objects[0] && data.Objects[0].Links) { + const files = data.Objects[0].Links + .filter(link => (link.Name || '').match(/\.(jpg|jpeg|png|heic|webp)$/i)) + .map(link => link.Name); + + log(` ✓ Found ${files.length} files via IPFS API`, 'green'); + return files; + } + } + } catch (error) { + log(` ✗ ${endpoint} failed: ${error.message}`, 'yellow'); + continue; + } + } + + // Final fallback: Use Kubo RPC API format + try { + log(' Trying IPFS Kubo RPC format...', 'blue'); + const response = await fetch('https://ipfs.io/api/v0/ls', { + method: 'POST', + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body: `arg=/ipfs/${folderHash}` + }); + + if (response.ok) { + const data = await response.json(); + if (data.Objects && data.Objects[0] && data.Objects[0].Links) { + const files = data.Objects[0].Links + .filter(link => (link.Name || '').match(/\.(jpg|jpeg|png|heic|webp)$/i)) + .map(link => link.Name); + + log(` ✓ Found ${files.length} files via Kubo RPC`, 'green'); + return files; + } + } + } catch (error) { + log(` ✗ Kubo RPC failed: ${error.message}`, 'yellow'); + } + + // Last resort: Parse gateway HTML (limited to ~100 files) + log(' Falling back to gateway HTML parsing...', 'yellow'); + log(' ⚠ Warning: This method is limited and may not return all files', 'yellow'); + + const gateway = process.env.PINATA_GATEWAY || 'https://ipfs.io'; + const gatewayUrl = `${gateway}/ipfs/${folderHash}`; + + const gatewayResponse = await fetch(gatewayUrl); + if (!gatewayResponse.ok) { + throw new Error(`Failed to fetch folder: ${gatewayResponse.statusText}`); + } + + const html = await gatewayResponse.text(); + const filePattern = /]*>/gi; + const files = []; + let match; + + while ((match = filePattern.exec(html)) !== null) { + let filename = decodeURIComponent(match[1]); + filename = filename.replace(/^\/ipfs\/[^\/]+\//, ''); + if (filename && !files.includes(filename)) { + files.push(filename); + } + } + + log(` ⚠ HTML parsing found ${files.length} files (may be incomplete)`, 'yellow'); + log(` 💡 Consider manually creating a file list at: filelist.txt`, 'cyan'); + + return files; +} + +async function getLatestPinataHash() { + const url = 'https://api.pinata.cloud/data/pinList?status=pinned&sort=date_pinned,desc'; + + const response = await fetch(url, { + headers: { + 'pinata_api_key': process.env.PINATA_API_KEY, + 'pinata_secret_api_key': process.env.PINATA_API_SECRET + } + }); + + if (!response.ok) { + throw new Error(`Pinata API error: ${response.statusText}`); + } + + const data = await response.json(); + return data.rows[0]?.ipfs_pin_hash; +} + +async function downloadFromIPFS(hash, filename, tempDir) { + const gateway = process.env.PINATA_GATEWAY || 'https://ipfs.io'; + const url = `${gateway}/ipfs/${hash}/${filename}`; + + log(` Downloading ${filename}...`, 'blue'); + + const response = await fetch(url); + if (!response.ok) { + throw new Error(`Failed to download ${filename}: ${response.statusText}`); + } + + const buffer = await response.arrayBuffer(); + const filepath = path.join(tempDir, filename); + fs.writeFileSync(filepath, Buffer.from(buffer)); + + return filepath; +} + +async function getApillonFiles() { + try { + log(' Fetching existing files from Apillon (may take a while)...', 'cyan'); + + const auth = btoa(`${process.env.APILLON_API_KEY}:${process.env.APILLON_API_SECRET}`); + const bucketUuid = process.env.APILLON_BUCKET_UUID; + + let allFiles = []; + let page = 1; + let hasMore = true; + + while (hasMore) { + const response = await fetch( + `https://api.apillon.io/storage/buckets/${bucketUuid}/files?limit=100&page=${page}`, + { + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + } + } + ); + + if (!response.ok) { + throw new Error(`API error: ${response.status}`); + } + + const data = await response.json(); + const items = data.data?.items || []; + + allFiles.push(...items.filter(item => item.name !== '.gitkeep')); + + const total = data.data?.total || 0; + hasMore = allFiles.length < total && items.length > 0; + page++; + } + + // Extract just the filenames + const filenames = allFiles.map(item => item.name); + log(` Found ${filenames.length} files already in Apillon`, 'cyan'); + + return filenames; + } catch (error) { + log(`⚠ Could not list Apillon files: ${error.message}`, 'yellow'); + log(' Will attempt to upload all files', 'yellow'); + return []; + } +} + +async function uploadToApillon(bucket, filepath, filename) { + log(` Uploading ${filename} to Apillon...`, 'blue'); + + const auth = btoa(`${process.env.APILLON_API_KEY}:${process.env.APILLON_API_SECRET}`); + const bucketUuid = process.env.APILLON_BUCKET_UUID; + + // Step 1: Initiate upload session + const initiateUrl = `https://api.apillon.io/storage/buckets/${bucketUuid}/upload`; + const fileBuffer = fs.readFileSync(filepath); + + const initiateResponse = await fetch(initiateUrl, { + method: 'POST', + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + files: [{ + fileName: filename, + contentType: 'image/jpeg', + path: 'approved' + }] + }) + }); + + if (!initiateResponse.ok) { + const error = await initiateResponse.text(); + throw new Error(`Failed to initiate upload: ${error}`); + } + + const initiateData = await initiateResponse.json(); + const sessionUuid = initiateData.data.sessionUuid; + const uploadUrl = initiateData.data.files[0].url; + + // Step 2: Upload file to signed URL + const uploadResponse = await fetch(uploadUrl, { + method: 'PUT', + body: fileBuffer, + headers: { + 'Content-Type': 'image/jpeg' + } + }); + + if (!uploadResponse.ok) { + throw new Error(`Failed to upload file: ${uploadResponse.statusText}`); + } + + // Step 3: Complete upload session + const completeUrl = `https://api.apillon.io/storage/buckets/${bucketUuid}/upload/${sessionUuid}/end`; + const completeResponse = await fetch(completeUrl, { + method: 'POST', + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + } + }); + + if (!completeResponse.ok) { + const error = await completeResponse.text(); + throw new Error(`Failed to complete upload: ${error}`); + } + + log(` ✓ ${filename} uploaded`, 'green'); +} + +async function main() { + log('\n╔════════════════════════════════════════════╗', 'cyan'); + log('║ Pinata → Apillon Migration Tool ║', 'cyan'); + log('╚════════════════════════════════════════════╝\n', 'cyan'); + + // Step 1: Check environment variables + log('Step 1: Checking environment variables...', 'blue'); + + const requiredVars = [ + 'APILLON_API_KEY', + 'APILLON_API_SECRET', + 'APILLON_BUCKET_UUID', + 'PINATA_API_KEY', + 'PINATA_API_SECRET' + ]; + + const missing = requiredVars.filter(v => !process.env[v]); + + if (missing.length > 0) { + log('✗ Missing environment variables:', 'red'); + missing.forEach(v => log(` - ${v}`, 'red')); + log('\nEdit scripts/apillon-bucket/.env with required variables', 'yellow'); + process.exit(1); + } + + log('✓ All environment variables present\n', 'green'); + + // Step 2: Initialize Apillon + log('Step 2: Connecting to Apillon...', 'blue'); + + const storage = new Storage({ + key: process.env.APILLON_API_KEY, + secret: process.env.APILLON_API_SECRET + }); + + const bucket = storage.bucket(process.env.APILLON_BUCKET_UUID); + log('✓ Connected to Apillon\n', 'green'); + + // Step 3: Get current Pinata hash + log('Step 3: Finding current Pinata folder...', 'blue'); + + let folderHash; + try { + folderHash = await getLatestPinataHash(); + if (!folderHash) { + log('✗ No Pinata folder found', 'red'); + process.exit(1); + } + log(`✓ Found folder: ${folderHash}\n`, 'green'); + } catch (error) { + log('✗ Could not access Pinata API', 'red'); + log(`Error: ${error.message}`, 'red'); + process.exit(1); + } + + // Step 4: List all files in Pinata folder + log('Step 4: Listing files in Pinata folder...', 'blue'); + + let files; + + // Check for manual file list first + const filelistPath = path.join(__dirname, 'filelist.txt'); + if (fs.existsSync(filelistPath)) { + log(' 📄 Found filelist.txt, using manual file list', 'green'); + const filelistContent = fs.readFileSync(filelistPath, 'utf-8'); + files = filelistContent + .split('\n') + .map(line => line.trim()) + .filter(line => line && !line.startsWith('#')) + .filter(line => line.match(/\.(jpg|jpeg|png|heic|webp)$/i)); + + log(`✓ Loaded ${files.length} files from filelist.txt\n`, 'green'); + } else { + // Auto-discover from IPFS + try { + files = await getPinataFolderContents(folderHash); + if (files.length === 0) { + log('✗ No image files found in Pinata folder', 'red'); + process.exit(1); + } + log(`✓ Found ${files.length} image(s)\n`, 'green'); + + // Warn if it looks incomplete + if (files.length === 100) { + log('⚠ Warning: Found exactly 100 files, which suggests pagination limit', 'yellow'); + log('💡 Create a filelist.txt with all filenames (one per line) for complete migration\n', 'cyan'); + } + } catch (error) { + log('✗ Could not list folder contents', 'red'); + log(`Error: ${error.message}`, 'red'); + log('\n💡 Create a filelist.txt file with one filename per line', 'cyan'); + process.exit(1); + } + } + + // Step 5: Check existing files in Apillon + log('Step 5: Checking existing files in Apillon...', 'blue'); + + const existingFiles = await getApillonFiles(); + log(`✓ Found ${existingFiles.length} existing files in Apillon`, 'green'); + + if (existingFiles.length > 0) { + log(` Sample existing files:`, 'cyan'); + existingFiles.slice(0, 5).forEach(f => log(` - ${f}`, 'cyan')); + } + log('', 'reset'); + + // Step 6: Create temp directory for downloads + log('Step 6: Setting up temporary directory...', 'blue'); + + const tempDir = path.join(__dirname, 'temp-migration'); + if (!fs.existsSync(tempDir)) { + fs.mkdirSync(tempDir, { recursive: true }); + } + + log(`✓ Created: ${tempDir}\n`, 'green'); + + // Step 7: Download all images from Pinata + log('Step 7: Downloading images from Pinata...', 'blue'); + + const downloadedFiles = []; + let downloadFailed = 0; + const downloadErrors = []; + + for (let i = 0; i < files.length; i++) { + const filename = files[i]; + const filepath = path.join(tempDir, filename); + + // Skip if already downloaded + if (fs.existsSync(filepath)) { + log(`[${i + 1}/${files.length}] ${filename} - already downloaded, skipping`, 'cyan'); + downloadedFiles.push({ filename, filepath }); + continue; + } + + try { + log(`[${i + 1}/${files.length}] Downloading ${filename}...`, 'cyan'); + const downloadedPath = await downloadFromIPFS(folderHash, filename, tempDir); + downloadedFiles.push({ filename, filepath: downloadedPath }); + log(` ✓ Downloaded\n`, 'green'); + } catch (error) { + downloadFailed++; + downloadErrors.push({ filename, error: error.message }); + log(` ✗ Failed: ${error.message}\n`, 'red'); + } + } + + log(`\nDownload summary: ${downloadedFiles.length} successful, ${downloadFailed} failed\n`, 'cyan'); + + // Step 8: Upload all images to Apillon + log('Step 8: Uploading images to Apillon...', 'blue'); + + // Calculate how many files would be skipped + const filesToSkip = downloadedFiles.filter(({ filename }) => existingFiles.includes(filename)); + const filesToUpload = downloadedFiles.filter(({ filename }) => !existingFiles.includes(filename)); + + log(` Files to skip (already exist): ${filesToSkip.length}`, 'yellow'); + log(` Files to upload (new): ${filesToUpload.length}`, 'green'); + + if (filesToUpload.length === 0) { + log('\n✓ All files already exist in Apillon. Nothing to upload!\n', 'green'); + return; + } + + log(`\n⚠ About to upload ${filesToUpload.length} files. Press Ctrl+C to cancel, or wait 3 seconds...`, 'yellow'); + await new Promise(resolve => setTimeout(resolve, 3000)); + + let uploadSuccessful = 0; + let uploadFailed = 0; + let uploadSkipped = 0; + const uploadErrors = []; + + for (let i = 0; i < downloadedFiles.length; i++) { + const { filename, filepath } = downloadedFiles[i]; + + // Skip if already exists in Apillon + if (existingFiles.includes(filename)) { + uploadSkipped++; + if (uploadSkipped <= 5) { + log(`[${i + 1}/${downloadedFiles.length}] ${filename} - already exists, skipping`, 'yellow'); + } + continue; + } + + try { + log(`[${i + 1}/${downloadedFiles.length}] Uploading ${filename}...`, 'cyan'); + await uploadToApillon(bucket, filepath, filename); + uploadSuccessful++; + log(` ✓ Uploaded\n`, 'green'); + } catch (error) { + uploadFailed++; + uploadErrors.push({ filename, error: error.message }); + log(` ✗ Failed: ${error.message}\n`, 'red'); + } + } + + log(`\nUpload summary: ${uploadSuccessful} successful, ${uploadFailed} failed\n`, 'cyan'); + + // Step 9: Cleanup + log('Step 9: Cleaning up temporary files...', 'blue'); + + try { + // Remove all downloaded files + for (const { filepath } of downloadedFiles) { + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + } + } + // Remove temp directory + if (fs.existsSync(tempDir)) { + fs.rmdirSync(tempDir); + } + log('✓ Temporary files removed\n', 'green'); + } catch (error) { + log('⚠ Could not remove all temp files', 'yellow'); + } + + // Step 10: Final Summary + log('╔════════════════════════════════════════════╗', 'cyan'); + log('║ Migration Complete! ║', 'cyan'); + log('╚════════════════════════════════════════════╝\n', 'cyan'); + + log('Download Results:', 'blue'); + log(` ✓ Downloaded: ${downloadedFiles.length}`, 'green'); + if (downloadFailed > 0) { + log(` ✗ Failed: ${downloadFailed}`, 'red'); + } + + log('\nUpload Results:', 'blue'); + log(` ✓ Uploaded: ${uploadSuccessful}`, 'green'); + log(` ⊘ Skipped (already exists): ${uploadSkipped}`, 'yellow'); + if (uploadFailed > 0) { + log(` ✗ Failed: ${uploadFailed}`, 'red'); + } + + if (downloadErrors.length > 0) { + log('\nDownload Errors:', 'yellow'); + downloadErrors.forEach(({ filename, error }) => { + log(` - ${filename}: ${error}`, 'yellow'); + }); + } + + if (uploadErrors.length > 0) { + log('\nUpload Errors:', 'yellow'); + uploadErrors.forEach(({ filename, error }) => { + log(` - ${filename}: ${error}`, 'yellow'); + }); + } + + log('\nNext steps:', 'blue'); + log('1. Verify images in Apillon dashboard', 'yellow'); + log('2. Update frontend to use Apillon instead of Pinata', 'yellow'); + log('3. Test the gallery page\n', 'yellow'); +} + +main().catch(error => { + log('\n✗ Migration failed', 'red'); + log(error.message, 'red'); + console.error(error); + process.exit(1); +}); diff --git a/scripts/apillon-bucket/package.json b/scripts/apillon-bucket/package.json new file mode 100644 index 0000000..857bfcb --- /dev/null +++ b/scripts/apillon-bucket/package.json @@ -0,0 +1,15 @@ +{ + "name": "poi-scripts", + "version": "1.0.0", + "description": "Proof-of-Ink management scripts", + "type": "module", + "scripts": { + "setup": "node setup-apillon.js", + "migrate": "node migrate-pinata-to-apillon.js" + }, + "dependencies": { + "@apillon/sdk": "^2.0.0", + "dotenv": "^16.0.0", + "node-fetch": "^3.0.0" + } +} diff --git a/scripts/apillon-bucket/setup-apillon.js b/scripts/apillon-bucket/setup-apillon.js new file mode 100644 index 0000000..aa2a256 --- /dev/null +++ b/scripts/apillon-bucket/setup-apillon.js @@ -0,0 +1,195 @@ +#!/usr/bin/env node + +/** + * Apillon Proof-of-Ink Setup Script + * + * Sets up bucket structure and verifies configuration + * Run once before implementing direct uploads + */ + +import { Storage } from '@apillon/sdk'; +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import 'dotenv/config'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Colors for terminal output +const colors = { + reset: '\x1b[0m', + green: '\x1b[32m', + red: '\x1b[31m', + yellow: '\x1b[33m', + blue: '\x1b[34m', + cyan: '\x1b[36m' +}; + +function log(message, color = 'reset') { + console.log(`${colors[color]}${message}${colors.reset}`); +} + +async function main() { + log('\n╔════════════════════════════════════════════╗', 'cyan'); + log('║ Apillon Proof-of-Ink Setup Script ║', 'cyan'); + log('╚════════════════════════════════════════════╝\n', 'cyan'); + + // Step 1: Check environment variables + log('Step 1: Checking environment variables...', 'blue'); + + const requiredVars = [ + 'APILLON_API_KEY', + 'APILLON_API_SECRET', + 'APILLON_BUCKET_UUID' + ]; + + const missing = requiredVars.filter(v => !process.env[v]); + + if (missing.length > 0) { + log('✗ Missing environment variables:', 'red'); + missing.forEach(v => log(` - ${v}`, 'red')); + log('\nCreate scripts/.env with these variables', 'yellow'); + process.exit(1); + } + + log('✓ All environment variables present\n', 'green'); + + // Step 2: Initialize Apillon SDK + log('Step 2: Connecting to Apillon...', 'blue'); + + let storage, bucket; + try { + storage = new Storage({ + key: process.env.APILLON_API_KEY, + secret: process.env.APILLON_API_SECRET + }); + + bucket = storage.bucket(process.env.APILLON_BUCKET_UUID); + log('✓ Connected to Apillon\n', 'green'); + } catch (error) { + log('✗ Failed to connect to Apillon', 'red'); + log(error.message, 'red'); + process.exit(1); + } + + // Step 3: Create directory structure + log('Step 3: Setting up bucket directories...', 'blue'); + + const directories = ['pending', 'approved', 'rejected']; + + try { + // Create a dummy file in each directory to ensure they exist + // Apillon doesn't have explicit "create directory" - directories are created when files are uploaded + + const dummyContent = Buffer.from('# Placeholder file - safe to delete after first real upload'); + + for (const dir of directories) { + log(` Creating ${dir}/...`, 'blue'); + + await bucket.uploadFiles([{ + fileName: '.gitkeep', + content: dummyContent, + contentType: 'text/plain' + }], { + directoryPath: dir + }); + + log(` ✓ ${dir}/ created`, 'green'); + } + + log('✓ Directory structure ready\n', 'green'); + } catch (error) { + log('✗ Failed to create directories', 'red'); + log(error.message, 'red'); + process.exit(1); + } + + // Step 4: Verify structure + log('Step 4: Verifying bucket structure...', 'blue'); + + try { + const rootFiles = await bucket.listObjects(); + + log(`\n Bucket contents:`, 'cyan'); + log(` - Total items: ${rootFiles.items.length}`, 'cyan'); + + for (const dir of directories) { + const files = await bucket.listObjects({ directoryPath: dir }); + log(` - ${dir}/: ${files.items.length} file(s)`, 'cyan'); + } + + log('\n✓ Structure verified\n', 'green'); + } catch (error) { + log('⚠ Could not verify structure (may be OK)', 'yellow'); + log(error.message, 'yellow'); + } + + // Step 5: Create .env.example if it doesn't exist + log('Step 5: Creating .env.example...', 'blue'); + + const envExamplePath = path.join(__dirname, '.env.example'); + const envExample = `# Apillon Configuration +# Get these from https://app.apillon.io + +APILLON_API_KEY=your_api_key_here +APILLON_API_SECRET=your_api_secret_here +APILLON_BUCKET_UUID=your_bucket_uuid_here + +# Optional: Matrix/Element Notifications +MATRIX_ROOM_ID=!YourRoomId:matrix.org +MATRIX_ACCESS_TOKEN=your_access_token +`; + + fs.writeFileSync(envExamplePath, envExample); + log('✓ Created .env.example\n', 'green'); + + // Step 6: Check for existing Pinata images + log('Step 6: Checking for Pinata migration...', 'blue'); + + const pinataEnvPath = path.resolve(__dirname, '../.env.production'); + + if (fs.existsSync(pinataEnvPath)) { + const envContent = fs.readFileSync(pinataEnvPath, 'utf-8'); + const hasPinata = envContent.includes('REACT_APP_PINATA_GATEWAY'); + + if (hasPinata) { + log('⚠ Pinata configuration detected', 'yellow'); + log(' Run migration script to move existing tattoos:', 'yellow'); + log(' npm run migrate-pinata-to-apillon\n', 'yellow'); + } else { + log('✓ No Pinata migration needed\n', 'green'); + } + } + + // Step 7: Summary + log('╔════════════════════════════════════════════╗', 'cyan'); + log('║ Setup Complete! ✓ ║', 'cyan'); + log('╚════════════════════════════════════════════╝\n', 'cyan'); + + log('Next steps:', 'blue'); + log('1. Update .env.production with frontend credentials:', 'yellow'); + log(' REACT_APP_APILLON_API_KEY=', 'yellow'); + log(' REACT_APP_APILLON_BUCKET_UUID=\n', 'yellow'); + + log('2. Build the upload form component', 'yellow'); + log(' File: src/pages/explore/ProofOfInkPage/SubmitPage.tsx\n', 'yellow'); + + log('3. Test the approval workflow:', 'yellow'); + log(' npm run approve-poi\n', 'yellow'); + + log('Bucket structure:', 'cyan'); + log(' kappa-sigma-mu-poi/', 'cyan'); + log(' ├── pending/ (awaiting approval)', 'cyan'); + log(' ├── approved/ (live in gallery)', 'cyan'); + log(' └── rejected/ (archive)\n', 'cyan'); + + log('Ready to implement! 🚀\n', 'green'); +} + +main().catch(error => { + log('\n✗ Setup failed', 'red'); + log(error.message, 'red'); + console.error(error); + process.exit(1); +}); diff --git a/scripts/poi/download.py b/scripts/poi/download.py deleted file mode 100644 index 1673101..0000000 --- a/scripts/poi/download.py +++ /dev/null @@ -1,52 +0,0 @@ -import sys -import requests -import os -import json - - -def download(ipfs_hash, download_path): - try: - ls_response = requests.post( - f'http://127.0.0.1:5001/api/v0/ls?arg={ipfs_hash}') - if ls_response.status_code != 200: - raise Exception( - f"Error listing folder contents: {ls_response.text}") - - folder_contents = json.loads(ls_response.text)['Objects'][0]['Links'] - - if not os.path.exists(download_path): - os.makedirs(download_path) - - for item in folder_contents: - file_name = item['Name'] - file_hash = item['Hash'] - file_path = os.path.join(download_path, file_name) - - cat_response = requests.post( - f'http://127.0.0.1:5001/api/v0/cat?arg={file_hash}', stream=True) - if cat_response.status_code != 200: - raise Exception( - f"Error downloading file {file_name}: {cat_response.text}") - - with open(file_path, 'wb') as f: - for chunk in cat_response.iter_content(chunk_size=8192): - f.write(chunk) - - print(f"Downloaded {file_name} to {download_path}") - except Exception as e: - print(f"Error: {e}") - - -def main(): - if len(sys.argv) != 3: - print("Usage: python3 download.py ") - sys.exit(1) - - ipfs_hash = sys.argv[1] - download_path = sys.argv[2] - - download(ipfs_hash, download_path) - - -if __name__ == "__main__": - main() diff --git a/scripts/poi/job.py b/scripts/poi/job.py deleted file mode 100644 index 55b2ee4..0000000 --- a/scripts/poi/job.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import requests -import shutil -import sys -import traceback -from dotenv import load_dotenv -from download import download -from rename_and_optimize import rename_and_optimize -from upload import unpin, upload - -load_dotenv() - -API_KEY = os.getenv('PINATA_API_KEY') -API_SECRET = os.getenv('PINATA_API_SECRET') - - -def get_latest_pinned_hash(): - url = 'https://api.pinata.cloud/data/pinList' - - headers = { - 'pinata_api_key': API_KEY, - 'pinata_secret_api_key': API_SECRET - } - - try: - response = requests.get(url, headers=headers) - - if response.status_code == 200: - pinned_items = response.json().get('rows', []) - - if not pinned_items: - print("No pinned items found.") - return None - - latest_item = max( - pinned_items, key=lambda x: x.get('date_pinned', '')) - - print(f"Latest pinned item: {latest_item.get('ipfs_pin_hash')}") - return latest_item.get('ipfs_pin_hash') - else: - print(f"Error fetching pinned items: {response.text}") - return None - - except Exception as e: - print(f"Exception occurred: {e}") - return None - - -def move_file_to_folder(file_path, destination_folder, force): - if not os.path.exists(destination_folder): - os.makedirs(destination_folder) - - destination_file_path = os.path.join( - destination_folder, os.path.basename(file_path)) - - if os.path.exists(destination_file_path) and not force: - print( - f"Error: The file {file_path} already exists. Use `force` to overwrite.") - sys.exit(1) - - shutil.move(file_path, destination_file_path) - - print(f"File {file_path} moved to {destination_file_path}") - - -def job(image_path, member_hash, force): - try: - image_folder = 'images' - current_pinned_hash = get_latest_pinned_hash() - - download(current_pinned_hash, image_folder) - new_filename = rename_and_optimize(image_path, member_hash) - move_file_to_folder(new_filename, image_folder, force) - success, new_pinned_hash = upload(image_folder) - if success and new_pinned_hash != current_pinned_hash: - unpin(current_pinned_hash) - except Exception as e: - tb = traceback.format_exc() - print(f"An error occurred: {e}") - print("Traceback details:") - print(tb) - - -def main(): - if len(sys.argv) < 3 or len(sys.argv) > 4: - print( - "Usage: python3 job.py [optional=force]") - sys.exit(1) - - image_path = sys.argv[1] - member_hash = sys.argv[2] - force = False - - if len(sys.argv) == 4 and sys.argv[3] == 'force': - force = True - - job(image_path, member_hash, force) - - -if __name__ == "__main__": - main() diff --git a/scripts/poi/optimize_multiple.py b/scripts/poi/optimize_multiple.py deleted file mode 100644 index 6418f21..0000000 --- a/scripts/poi/optimize_multiple.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -import sys -from rename_and_optimize import resize_image - - -def process_images(folder_path): - for filename in os.listdir(folder_path): - if filename.endswith(('.png', '.jpg', '.jpeg', '.gif', '.bmp')): - file_path = os.path.join(folder_path, filename) - - new_filename = os.path.splitext(filename)[0] + '.jpg' - new_file_path = os.path.join(folder_path, new_filename) - - resize_image(file_path, new_file_path) - - if new_file_path != file_path: - os.remove(file_path) - print(f'Optimized {filename}') - else: - print(f'File not supported {filename}') - - -def main(): - if len(sys.argv) != 2: - print("Usage: python3 optimize_multiple.py ") - sys.exit(1) - - folder_path = sys.argv[1] - - process_images(folder_path) - - -if __name__ == "__main__": - main() diff --git a/scripts/poi/rename_and_optimize.py b/scripts/poi/rename_and_optimize.py deleted file mode 100644 index 066c677..0000000 --- a/scripts/poi/rename_and_optimize.py +++ /dev/null @@ -1,75 +0,0 @@ -import os -import sys -from PIL import Image, ExifTags -from pillow_heif import register_heif_opener - - -def resize_image(input_path, output_path): - try: - register_heif_opener() - - with Image.open(input_path) as img: - if img.height > 1000 or img.width > 1000: - img.thumbnail((1000, 1000)) - - try: - for orientation in ExifTags.TAGS.keys(): - if ExifTags.TAGS[orientation] == 'Orientation': - break - - exif = dict(img._getexif().items()) - if exif[orientation] == 3: - img = img.rotate(180, expand=True) - elif exif[orientation] == 6: - img = img.rotate(270, expand=True) - elif exif[orientation] == 8: - img = img.rotate(90, expand=True) - except (AttributeError, KeyError, IndexError): - pass - - if img.mode == 'RGBA': - background = Image.new('RGB', img.size, (255, 255, 255)) - background.paste(img, mask=img.split()[3]) - img = background - - if img.mode != 'RGB': - img = img.convert('RGB') - - img.save(output_path, format='JPEG', quality=85, optimize=True) - except Exception as e: - print(f"Exception occurred: {e}") - sys.exit(1) - - -def rename_and_optimize(image_path, member_hash): - try: - directory = os.path.dirname(image_path) - - new_filename = f"{member_hash}.jpg" - new_image_path = os.path.join(directory, new_filename) - - resize_image(image_path, image_path) - print(f'Optimized {image_path}') - - os.rename(image_path, new_image_path) - print(f"Image renamed to: {new_filename}") - - return new_filename - except Exception as e: - print(f"Exception occurred: {e}") - sys.exit(1) - - -def main(): - if len(sys.argv) != 3: - print("Usage: python3 rename_and_optimize.py ") - sys.exit(1) - - image_path = sys.argv[1] - member_hash = sys.argv[2] - - rename_and_optimize(image_path, member_hash) - - -if __name__ == "__main__": - main() diff --git a/scripts/poi/upload.py b/scripts/poi/upload.py deleted file mode 100644 index 5774732..0000000 --- a/scripts/poi/upload.py +++ /dev/null @@ -1,94 +0,0 @@ -import json -import sys -import requests -import os -from datetime import datetime -from dotenv import load_dotenv - -load_dotenv() - -API_KEY = os.getenv('PINATA_API_KEY') -API_SECRET = os.getenv('PINATA_API_SECRET') - - -def unpin(ipfs_hash): - endpoint = f'https://api.pinata.cloud/pinning/unpin/{ipfs_hash}' - - headers = { - 'pinata_api_key': API_KEY, - 'pinata_secret_api_key': API_SECRET - } - - try: - response = requests.delete(endpoint, headers=headers) - - if response.status_code == 200: - print( - f"File with IPFS CID {ipfs_hash} has been successfully unpinned.") - else: - print(f"Error unpinning file. Status code: {response.status_code}") - print(response.text) - - except Exception as e: - print(f"An error occurred: {e}") - - -def create_payload(folder_path, metadata): - payload = [] - - payload.append(('pinataMetadata', (None, json.dumps(metadata)))) - - for root, _, files in os.walk(os.path.abspath(folder_path)): - for f in files: - complete_path = os.path.join(root, f) - payload.append(('file', (os.sep.join(complete_path.split( - os.sep)[-2:]), open(complete_path, 'rb')))) - - return payload - - -def upload_and_pin(folder_path): - url = 'https://api.pinata.cloud/pinning/pinFileToIPFS' - - headers = { - 'pinata_api_key': API_KEY, - 'pinata_secret_api_key': API_SECRET, - } - - timestamp = datetime.now().strftime('%Y%m%d%H%M%S') - metadata = { - 'name': f'kappa-sigma-mu-poi-{timestamp}' - } - - payload = create_payload(folder_path, metadata) - - response = requests.post(url, files=payload, headers=headers) - - return response - - -def upload(folder_path): - response = upload_and_pin(folder_path) - if response.status_code == 200: - new_pinned_hash = response.json()['IpfsHash'] - print( - f"Folder pinned successfully! Pinata IPFS Hash: {new_pinned_hash}") - return (True, new_pinned_hash) - else: - print( - f"Error uploading folder to Pinata: {response.status_code} - {response.text}") - return (False, None) - - -def main(): - if len(sys.argv) != 2: - print("Usage: python3 upload.py ") - sys.exit(1) - - folder_path = sys.argv[1] - - upload(folder_path) - - -if __name__ == "__main__": - main() diff --git a/src/components/MemberOffcanvas.tsx b/src/components/MemberOffcanvas.tsx index af2e79a..7adc07a 100644 --- a/src/components/MemberOffcanvas.tsx +++ b/src/components/MemberOffcanvas.tsx @@ -1,8 +1,8 @@ import { useEffect, useState } from 'react' import { Col, Row, Offcanvas, Container, Badge, Spinner } from 'react-bootstrap' import styled from 'styled-components' -import { fastestGateway, getLatestPinnedHash, imageUrl } from '../helpers/ipfs' import { Identicon } from '../pages/explore/components/Identicon' +import { apillonClient } from '../services/apillonClient' const formatHash = (str: string) => { if (!str) return '' @@ -18,22 +18,44 @@ const MemberOffcanvas = (props: { show: boolean; handleClose: any; member: any } const { member } = props const [loading, setLoading] = useState(true) const [error, setError] = useState(false) - const [folderHash, setFolderHash] = useState('') - const [gateway, setGateway] = useState('') + const [imageUrl, setImageUrl] = useState(undefined) useEffect(() => { - const fetchPinnedHash = async () => { - const folderHash = await getLatestPinnedHash() - setFolderHash(folderHash) - const gateway = await fastestGateway(folderHash) - setGateway(gateway) - } + const fetchTattooImage = async () => { + if (!member?.hash) { + setError(true) + setLoading(false) + return + } - fetchPinnedHash() - }, []) + try { + setLoading(true) + setError(false) + const listData = await apillonClient.listFiles(1000) // Fetch up to 1000 files + const allFiles = listData.data?.items || [] - useEffect(() => { - setLoading(true) + // Find the approved tattoo for this member + const approvedFile = allFiles.find((file: any) => { + const fileAddress = file.name.replace(/\.(jpg|png|heic|webp)$/i, '') + return ( + (file.path === 'approved/' || file.path?.startsWith('approved/')) && fileAddress === member.hash + ) + }) + + if (approvedFile) { + setImageUrl(approvedFile.link) + } else { + setError(true) + } + } catch (error) { + console.error('Failed to fetch tattoo:', error) + setError(true) + } finally { + setLoading(false) + } + } + + fetchTattooImage() }, [member]) return ( @@ -68,17 +90,19 @@ const MemberOffcanvas = (props: { show: boolean; handleClose: any; member: any } <> -

Be patient. The proof-of-ink pictures are hosted on IPFS and might take a while to load.

+

Loading proof-of-ink from Apillon...

)} - setError(true)} - onLoad={() => setLoading(false)} - style={loading ? { display: 'none' } : {}} - /> + {imageUrl && ( + setError(true)} + onLoad={() => setLoading(false)} + style={loading ? { display: 'none' } : {}} + /> + )} ) : ( diff --git a/src/helpers/ipfs.ts b/src/helpers/ipfs.ts deleted file mode 100644 index 087f9eb..0000000 --- a/src/helpers/ipfs.ts +++ /dev/null @@ -1,58 +0,0 @@ -interface PinataResponse { - rows: Array<{ ipfs_pin_hash: string; date_pinned: string }> -} - -async function getLatestPinnedHash(): Promise { - const pinataApiKey = process.env.REACT_APP_PINATA_API_KEY - const pinataApiSecret = process.env.REACT_APP_PINATA_API_SECRET - - if (!pinataApiKey || !pinataApiSecret) { - return '' - } - - const url = 'https://api.pinata.cloud/data/pinList?status=pinned&sort=date_pinned,desc' - - try { - const response = await fetch(url, { - method: 'GET', - headers: { - pinata_api_key: pinataApiKey, - pinata_secret_api_key: pinataApiSecret - } - }) - - if (!response.ok) { - throw new Error(`Error: ${response.status}`) - } - - const data: PinataResponse = await response.json() - if (data.rows.length > 0) { - return data.rows[0].ipfs_pin_hash - } - - return '' - } catch (error) { - console.error(error) - return '' - } -} - -async function fastestGateway(folderHash: string) { - const publicGateways = ['https://ipfs.rmrk.link', 'https://ipfs2.rmrk.link', 'https://dweb.link'] - - try { - const fetchPromises = publicGateways.map((gateway) => - fetch(`${gateway}/ipfs/${folderHash}`).then((response) => (response.ok ? gateway : Promise.reject())) - ) - return await Promise.race(fetchPromises) - } catch (error) { - return '' - } -} - -const imageUrl = ({ gateway, folderHash, member }: { gateway: string; folderHash: string; member: string }) => { - const ipfsGateway = gateway ? gateway : process.env.REACT_APP_PINATA_GATEWAY - return `${ipfsGateway}/ipfs/${folderHash}/${member}.jpg` -} - -export { getLatestPinnedHash, fastestGateway, imageUrl } diff --git a/src/pages/CyborgGuidePage.tsx b/src/pages/CyborgGuidePage.tsx index fcbb67f..f2d0685 100644 --- a/src/pages/CyborgGuidePage.tsx +++ b/src/pages/CyborgGuidePage.tsx @@ -13,9 +13,7 @@ const WikiLink = styled(ExternalLink).attrs(() => ({ text-decoration: none; ` -const SocietyElementLink = styled(ExternalLink).attrs(() => ({ - href: 'https://riot.im/app/#/room/#kappasigmamulounge:matrix.parity.io' -}))` +const SubmitPoiLink = styled(LinkWithQuery)` text-decoration: none; ` @@ -119,8 +117,9 @@ const CyborgGuidePage = () => {

Your tattoo, the Proof of Ink (PoI)

- After getting your PoI, it’s time to show it to the world! You can submit a photo or video of it  - here on our Kappa Sigma Mu Lounge. + After getting your PoI, it's time to show it to the world! You can submit a photo of it  + here on our submission page. Society members + get automatically approved!

Getting votes on your Proof of Ink

diff --git a/src/pages/explore/MembersPage/index.tsx b/src/pages/explore/MembersPage/index.tsx index 8c68206..0911397 100644 --- a/src/pages/explore/MembersPage/index.tsx +++ b/src/pages/explore/MembersPage/index.tsx @@ -30,11 +30,14 @@ const MembersPage = ({ api }: MembersPageProps): JSX.Element => { setTrigger(true) society?.info().then((info: ExtendedDeriveSociety) => { api?.query.society.defending().then((defending) => { - defender = defending.unwrap()[0] - skeptic = defending.unwrap()[1] - - info.defender = defender - info.skeptic = skeptic + // Safe unwrap: only set defender/skeptic if defending has a value + if (defending.isSome) { + const defendingValue = defending.unwrap() + defender = defendingValue[0] + skeptic = defendingValue[1] + info.defender = defender + info.skeptic = skeptic + } deriveMembersInfo(api).then((responseMembers: ExtendedDeriveSociety[]) => { setMembers(buildSocietyMembersArray(responseMembers, info, graceStrikes)) diff --git a/src/pages/explore/ProofOfInkPage/GalleryPage.tsx b/src/pages/explore/ProofOfInkPage/GalleryPage.tsx index c2929a4..4d61e51 100644 --- a/src/pages/explore/ProofOfInkPage/GalleryPage.tsx +++ b/src/pages/explore/ProofOfInkPage/GalleryPage.tsx @@ -3,19 +3,22 @@ import { StorageKey } from '@polkadot/types' import { AccountId32 } from '@polkadot/types/interfaces' import { useEffect, useState } from 'react' import { Container, Row, Col, Modal, Spinner } from 'react-bootstrap' +import toast from 'react-hot-toast' import styled from 'styled-components' import { AccountIdentity } from '../../../components/AccountIdentity' -import { getLatestPinnedHash, fastestGateway, imageUrl } from '../../../helpers/ipfs' +import { apillonClient } from '../../../services/apillonClient' import { Identicon } from '../components/Identicon' type ExamplesPageProps = { api: ApiPromise | null } +const WORKER_URL = process.env.REACT_APP_CLOUDFLARE_WORKER_URL + const GalleryPage = ({ api }: ExamplesPageProps): JSX.Element => { const [members, setMembers] = useState>([]) - const [folderHash, setFolderHash] = useState('') - const [gateway, setGateway] = useState('') + const [tattooMap, setTattooMap] = useState>(new Map()) + const [syncAttempted, setSyncAttempted] = useState(false) const society = api?.query?.society @@ -31,47 +34,123 @@ const GalleryPage = ({ api }: ExamplesPageProps): JSX.Element => { }, [society]) useEffect(() => { - const fetchPinnedHash = async () => { - const folderHash = await getLatestPinnedHash() - setFolderHash(folderHash) - const gateway = await fastestGateway(folderHash) - setGateway(gateway) + const fetchApprovedTattoos = async () => { + try { + const listData = await apillonClient.listFiles() + const allFiles = listData.data?.items || [] + + // Filter approved folder files + const approvedFiles = allFiles.filter( + (file: any) => file.path === 'approved/' || file.path?.startsWith('approved/') + ) + + // Build map: address -> CDN link + const map = new Map() + approvedFiles.forEach((file: any) => { + const address = file.name.replace(/\.(jpg|png|heic|webp)$/i, '') + map.set(address, file.link) + }) + + setTattooMap(map) + } catch (error) { + console.error('Failed to fetch tattoos:', error) + } } - fetchPinnedHash() + fetchApprovedTattoos() }, []) - return !folderHash && !gateway ? ( + // Auto-sync: Move pending tattoos to approved for members + useEffect(() => { + if (members.length === 0 || syncAttempted) return + + const syncApprovedMembers = async () => { + try { + // Fetch pending files from Apillon + const listData = await apillonClient.listFiles() + const allFiles = listData.data?.items || [] + + // Extract addresses from pending files + const pendingAddresses = allFiles + .filter((file: any) => file.path === 'pending/' || file.path?.startsWith('pending/')) + .map((file: any) => file.name.replace(/\.(jpg|png|heic|webp)$/i, '')) + .filter((addr: string) => addr && addr.length > 0) + + // Cross-reference: find members with pending files + const toSync = members.filter((member) => pendingAddresses.includes(member)) + + if (toSync.length > 0) { + // Limit to 50 addresses per request (backend limit) + const addresses = toSync.slice(0, 50) + + // Call worker to move files + const response = await fetch(`${WORKER_URL}/sync-approved-members`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Origin: window.location.origin + }, + body: JSON.stringify({ addresses }) + }) + + const result = await response.json() + + if (result.success && result.moved && result.moved.length > 0) { + // Notify user and reload page to show new tattoos + toast.success( + `${result.moved.length} new tattoo${result.moved.length > 1 ? 's' : ''} approved! Reloading gallery...` + ) + setTimeout(() => { + window.location.reload() + }, 2000) + } + } + } catch (error) { + // Silent fail - will retry on next page load + } finally { + setSyncAttempted(true) + } + } + + syncApprovedMembers() + }, [members, syncAttempted]) + + return tattooMap.size === 0 ? ( ) : ( - {members.map((member) => ( - - ))} + {members.map((member) => { + const imageUrl = tattooMap.get(member) + return + })} ) } const ProofOfInkImage = ({ - gateway, - folderHash, member, + imageUrl, api }: { - gateway: string - folderHash: string member: string + imageUrl: string | undefined api: ApiPromise }): JSX.Element => { - const [error, setError] = useState(false) - const [loading, setLoading] = useState(true) + const [error, setError] = useState(!imageUrl) + const [loading, setLoading] = useState(!!imageUrl) const [selectedImage, setSelectedImage] = useState('') const [modalShow, setModalShow] = useState(false) const loadingTimeout = 10000 useEffect(() => { + if (!imageUrl) { + setError(true) + setLoading(false) + return + } + const timer = setTimeout(() => { if (loading && !error) { setError(true) @@ -80,7 +159,7 @@ const ProofOfInkImage = ({ }, loadingTimeout) return () => clearTimeout(timer) - }, [loading, error]) + }, [loading, error, imageUrl]) const handleImageClick = (image: string) => { if (loading || error) return @@ -92,24 +171,23 @@ const ProofOfInkImage = ({ <> - handleImageClick(imageUrl({ gateway, folderHash, member }))} - $clickable={!error && !loading} - > + imageUrl && handleImageClick(imageUrl)} $clickable={!error && !loading}> {loading && !error && ( )} {!loading && error &&

Missing Proof-of-Ink

} - { - setError(false) - setLoading(false) - }} - style={loading || error ? { display: 'none' } : {}} - /> + {imageUrl && ( + { + setError(false) + setLoading(false) + }} + style={loading || error ? { display: 'none' } : {}} + /> + )}
diff --git a/src/pages/explore/ProofOfInkPage/SubmitPage.tsx b/src/pages/explore/ProofOfInkPage/SubmitPage.tsx new file mode 100644 index 0000000..e731ca3 --- /dev/null +++ b/src/pages/explore/ProofOfInkPage/SubmitPage.tsx @@ -0,0 +1,239 @@ +import { ApiPromise } from '@polkadot/api' +import { useState, useEffect } from 'react' +import { Container, Row, Col, Form, Button, Alert } from 'react-bootstrap' +import toast from 'react-hot-toast' +import styled from 'styled-components' +import { useAccount } from '../../../account/AccountContext' +import { apillonClient } from '../../../services/apillonClient' + +const MAX_FILE_SIZE = 2 * 1024 * 1024 // 2MB in bytes + +type SubmitPageProps = { + api: ApiPromise | null +} + +const SubmitPage = ({ api }: SubmitPageProps): JSX.Element => { + const { activeAccount } = useAccount() + const [selectedFile, setSelectedFile] = useState(null) + const [uploading, setUploading] = useState(false) + const [isCandidate, setIsCandidate] = useState(false) + const [isMember, setIsMember] = useState(false) + const [checkingStatus, setCheckingStatus] = useState(true) + + // Check if user is a candidate or member + useEffect(() => { + const checkMembershipStatus = async () => { + if (!api || !activeAccount) { + setCheckingStatus(false) + return + } + + try { + setCheckingStatus(true) + + // Check if user is a member + const memberEntry = await api.query.society.members(activeAccount.address) + const isMemberResult = memberEntry.isSome + setIsMember(isMemberResult) + + // Check if user is a candidate + const candidatesEntries = await api.query.society.candidates.entries() + const candidateAddresses = candidatesEntries.map(([key]) => key.args[0].toString()) + const isCandidateResult = candidateAddresses.includes(activeAccount.address) + setIsCandidate(isCandidateResult) + } catch (error) { + // Silent fail - user will see "not eligible" message + } finally { + setCheckingStatus(false) + } + } + + checkMembershipStatus() + }, [api, activeAccount]) + + const handleFileChange = (event: React.ChangeEvent) => { + const file = event.target.files?.[0] + if (file) { + // Validate file type + if (!file.type.startsWith('image/')) { + toast.error('Please select a valid image file (JPG or PNG)') + return + } + // Validate file size (2MB max) + if (file.size > MAX_FILE_SIZE) { + toast.error('File size must be less than 2MB. Please compress your image before uploading.') + return + } + setSelectedFile(file) + } + } + + const handleSubmit = async (event: React.FormEvent) => { + event.preventDefault() + + if (!selectedFile || !activeAccount) { + return + } + + // Validate user is candidate or member + if (!isCandidate && !isMember) { + toast.error('Only Society candidates and members can submit proof-of-ink') + return + } + + setUploading(true) + + try { + // Step 1: Check for existing tattoo using read-only Apillon API + const memberHash = activeAccount.address + const listData = await apillonClient.listFiles() + const allFiles = listData.data?.items || [] + + const fileName = `${memberHash}.jpg` + + // Check for existing tattoo in both pending and approved folders + const existingApproved = allFiles.find( + (file: any) => file.name === fileName && (file.path === 'approved/' || file.path?.startsWith('approved/')) + ) + + const existingPending = allFiles.find( + (file: any) => file.name === fileName && (file.path === 'pending/' || file.path?.startsWith('pending/')) + ) + + // Step 2: Determine upload path based on membership status and existing files + let directoryPath: 'pending' | 'approved' + let successMessage: string + + if (existingApproved) { + // Replace existing approved tattoo + const confirmed = window.confirm('Replace your existing tattoo?') + if (!confirmed) { + setUploading(false) + setSelectedFile(null) + const fileInput = document.getElementById('file-input') as HTMLInputElement + if (fileInput) fileInput.value = '' + return + } + directoryPath = 'approved' + successMessage = 'Tattoo updated!' + } else if (existingPending) { + // Replace existing pending submission + const confirmed = window.confirm('You already have a pending submission. Replace it with this new image?') + if (!confirmed) { + setUploading(false) + setSelectedFile(null) + const fileInput = document.getElementById('file-input') as HTMLInputElement + if (fileInput) fileInput.value = '' + return + } + // Members upload directly to approved, candidates to pending + directoryPath = isMember ? 'approved' : 'pending' + successMessage = isMember ? 'Tattoo submitted to approved!' : 'Pending submission updated!' + } else { + // New submission: Members go to approved, candidates to pending + if (isMember) { + directoryPath = 'approved' + successMessage = 'Tattoo submitted to approved folder!' + } else { + directoryPath = 'pending' + successMessage = 'Submitted for approval! Your tattoo will be approved after you become a member.' + } + } + + // Step 3: Upload via Cloudflare Worker + const result = await apillonClient.uploadFile(selectedFile, fileName, directoryPath) + + if (!result.success) { + throw new Error(result.message || 'Upload failed') + } + + // Step 4: Show success message + toast.success(successMessage) + + // Step 5: Clear form + setSelectedFile(null) + const fileInput = document.getElementById('file-input') as HTMLInputElement + if (fileInput) fileInput.value = '' + } catch (error) { + toast.error(`Upload failed: ${(error as Error).message}`) + } finally { + setUploading(false) + } + } + + const isFormDisabled = !activeAccount || uploading || checkingStatus || (!isCandidate && !isMember) + + return ( + + + + +

Submit Proof-of-Ink

+ + {!activeAccount && ( + + Wallet Not Connected +
+ Please connect your wallet to submit your tattoo. +
+ )} + + {activeAccount && !checkingStatus && !isCandidate && !isMember && ( + + Not Eligible +
+ Only Society candidates and members can submit proof-of-ink. Please apply to join the Society first. +
+ )} + + {activeAccount && !checkingStatus && isCandidate && !isMember && ( + + Candidate +
+ Your submission will be reviewed when you become a member. +
+ )} + + {activeAccount && !checkingStatus && isMember && ( + + Member +
+ Your tattoo will appear in the gallery after upload. +
+ )} + +
+ + Select Tattoo Image + + + Accepted formats: JPG, PNG. Maximum size: 2MB. Please compress your image if it exceeds this limit. + + + + +
+
+ +
+
+ ) +} + +const StyledCard = styled.div` + background-color: ${(props) => props.theme.colors.lightGrey}; + border-radius: 10px; + padding: 2rem; + margin-top: 2rem; + box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2); +` + +export { SubmitPage } diff --git a/src/pages/explore/ProofOfInkPage/index.tsx b/src/pages/explore/ProofOfInkPage/index.tsx index 3e0d151..106e755 100644 --- a/src/pages/explore/ProofOfInkPage/index.tsx +++ b/src/pages/explore/ProofOfInkPage/index.tsx @@ -4,6 +4,7 @@ import { ExamplesPage } from './ExamplesPage' import { GalleryPage } from './GalleryPage' import { NextHeadPage } from './NextHeadPage' import { RulesPage } from './RulesPage' +import { SubmitPage } from './SubmitPage' import { NavigateWithQuery } from '../../../components/NavigateWithQuery' type ProofOfInkPageProps = { @@ -19,6 +20,7 @@ const ProofOfInkPage = ({ api }: ProofOfInkPageProps): JSX.Element => { } /> } /> } /> + } /> ) diff --git a/src/services/apillonClient.ts b/src/services/apillonClient.ts new file mode 100644 index 0000000..68eda53 --- /dev/null +++ b/src/services/apillonClient.ts @@ -0,0 +1,163 @@ +/** + * Apillon Storage Client (Read-Only) + * + * This client uses a read-only API key to list files in the Apillon bucket. + * Upload operations are handled by the Cloudflare Worker to protect write credentials. + */ + +const APILLON_API_BASE = 'https://api.apillon.io' + +interface ApillonFile { + name: string + path: string + CID: string + link: string + size: number + dateCreated: string +} + +interface ApillonListResponse { + data: { + items: ApillonFile[] + } +} + +class ApillonClient { + private apiKey: string + private apiSecret: string + private bucketUuid: string + + constructor() { + this.apiKey = process.env.REACT_APP_APILLON_API_KEY || '' + this.apiSecret = process.env.REACT_APP_APILLON_API_SECRET || '' + this.bucketUuid = process.env.REACT_APP_APILLON_BUCKET_UUID || '' + + if (!this.apiKey || !this.apiSecret || !this.bucketUuid) { + console.warn('Apillon credentials not configured') + } + } + + /** + * List all files in the bucket (read-only operation) + * @param limit - Number of items per page (max 1000) + * @param page - Page number (default 1) + */ + public async listFiles(limit = 1000, page = 1): Promise { + const url = `${APILLON_API_BASE}/storage/buckets/${this.bucketUuid}/files?limit=${limit}&page=${page}` + + const response = await fetch(url, { + method: 'GET', + headers: { + Authorization: this.getAuthHeader(), + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Failed to list files: ${errorText}`) + } + + return await response.json() + } + + /** + * Upload file via Cloudflare Worker (write operation) + * + * This uses a two-step process to avoid worker timeouts: + * 1. Request signed S3 URL from worker + * 2. Upload file directly to S3 + * 3. Tell worker to complete the session + * + * @param file - File object to upload + * @param fileName - Target filename + * @param directoryPath - Target directory ('pending' or 'approved') + */ + public async uploadFile( + file: File, + fileName: string, + directoryPath: 'pending' | 'approved' + ): Promise<{ success: boolean; message: string }> { + const workerUrl = process.env.REACT_APP_CLOUDFLARE_WORKER_URL + + if (!workerUrl) { + throw new Error('Cloudflare Worker URL not configured') + } + + try { + // Step 1: Request signed URL from worker + const initiateResponse = await fetch(`${workerUrl}/initiate`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + fileName, + contentType: file.type || 'image/jpeg', + directoryPath + }) + }) + + if (!initiateResponse.ok) { + const errorData = await initiateResponse.json() + throw new Error(errorData.error || 'Failed to initiate upload') + } + + const { sessionUuid, uploadUrl } = await initiateResponse.json() + + // Step 2: Upload file directly to S3 using signed URL + const s3Response = await fetch(uploadUrl, { + method: 'PUT', + headers: { + 'Content-Type': file.type || 'image/jpeg' + }, + body: file + }) + + if (!s3Response.ok) { + throw new Error('Failed to upload to storage') + } + + // Step 3: Complete upload session via worker + // Add a small delay to allow S3 to process the file + await new Promise((resolve) => setTimeout(resolve, 2000)) + + const completeResponse = await fetch(`${workerUrl}/complete`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + sessionUuid + }) + }) + + if (!completeResponse.ok) { + // File was uploaded but Apillon hasn't detected it yet + // This is a timing issue - file will be processed eventually + return { + success: true, + message: 'Upload successful (processing...)' + } + } + + return { + success: true, + message: 'Upload successful' + } + } catch (error) { + throw error + } + } + + /** + * Get Basic Auth header + */ + private getAuthHeader(): string { + const credentials = btoa(`${this.apiKey}:${this.apiSecret}`) + return `Basic ${credentials}` + } +} + +// Export singleton instance +export const apillonClient = new ApillonClient() diff --git a/workers/poi-upload-worker/.gitignore b/workers/poi-upload-worker/.gitignore new file mode 100644 index 0000000..41f15d4 --- /dev/null +++ b/workers/poi-upload-worker/.gitignore @@ -0,0 +1,23 @@ +# Dependencies +node_modules/ +package-lock.json +yarn.lock + +# Wrangler +.wrangler/ +.dev.vars +.mf/ + +# Build +dist/ +*.log + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db diff --git a/workers/poi-upload-worker/README.md b/workers/poi-upload-worker/README.md new file mode 100644 index 0000000..77de5cd --- /dev/null +++ b/workers/poi-upload-worker/README.md @@ -0,0 +1,70 @@ +# POI Upload Worker + +Cloudflare Worker for Proof-of-Ink image uploads to Apillon Storage. + +## Features + +- Secure proxy for Apillon API (protects credentials) +- Two-phase upload workflow (initiate → upload → complete) +- Auto-sync: moves approved member images from `pending/` to `approved/` +- CORS-enabled for frontend access + +## Endpoints + +### POST /initiate + +Returns a signed upload URL. + +**Request:** +```json +{ + "fileName": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY.jpg", + "contentType": "image/jpeg", + "directoryPath": "pending" +} +``` + +### POST /complete + +Finalizes an upload session. + +**Request:** +```json +{ + "sessionUuid": "..." +} +``` + +### POST /sync-approved-members + +Moves images from `pending/` to `approved/` for provided addresses. + +**Request:** +```json +{ + "addresses": ["5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY"] +} +``` + +**Note:** Frontend validates membership before calling this endpoint. + +## Environment Variables + +Set via `wrangler secret put`: +- `APILLON_API_KEY` +- `APILLON_API_SECRET` +- `APILLON_BUCKET_UUID` +- `ALLOWED_ORIGINS` + +## Development + +```bash +npm install +npm run dev # Run locally +npm run deploy:prod # Deploy to production +npm run tail:prod # View logs +``` + +## File Naming + +Format: `{address}.{ext}` (e.g., `5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY.jpg`) diff --git a/workers/poi-upload-worker/package.json b/workers/poi-upload-worker/package.json new file mode 100644 index 0000000..11c9e19 --- /dev/null +++ b/workers/poi-upload-worker/package.json @@ -0,0 +1,28 @@ +{ + "name": "poi-upload-worker", + "version": "1.0.0", + "description": "Cloudflare Worker for secure Proof-of-Ink image uploads to Apillon", + "main": "src/index.ts", + "scripts": { + "dev": "wrangler dev", + "deploy": "wrangler deploy", + "deploy:dev": "wrangler deploy --env development", + "deploy:prod": "wrangler deploy --env production", + "tail": "wrangler tail", + "tail:dev": "wrangler tail --env development", + "tail:prod": "wrangler tail --env production" + }, + "keywords": [ + "cloudflare", + "worker", + "apillon", + "upload", + "proxy" + ], + "author": "Kappa Sigma Mu", + "license": "MIT", + "devDependencies": { + "@cloudflare/workers-types": "^4.20241127.0", + "wrangler": "^4.43.0" + } +} diff --git a/workers/poi-upload-worker/src/index.ts b/workers/poi-upload-worker/src/index.ts new file mode 100644 index 0000000..e19f3b2 --- /dev/null +++ b/workers/poi-upload-worker/src/index.ts @@ -0,0 +1,497 @@ +/** + * Cloudflare Worker for Proof-of-Ink Image Uploads + * + * This worker acts as a secure proxy between the frontend and Apillon Storage API. + * It protects write-enabled API credentials while allowing the frontend to upload images. + * + * Environment Variables Required: + * - APILLON_API_KEY: Write-enabled Apillon API key + * - APILLON_API_SECRET: Apillon API secret + * - APILLON_BUCKET_UUID: Target bucket UUID + * - ALLOWED_ORIGINS: Comma-separated list of allowed CORS origins + */ + +export interface Env { + APILLON_API_KEY: string + APILLON_API_SECRET: string + APILLON_BUCKET_UUID: string + ALLOWED_ORIGINS: string +} + +interface InitiateUploadRequest { + fileName: string + contentType: string + directoryPath: 'pending' | 'approved' +} + +interface CompleteUploadRequest { + sessionUuid: string +} + +interface ApillonUploadResponse { + data: { + sessionUuid: string + files: Array<{ + url: string + fileUuid: string + }> + } +} + +interface ApillonCompleteResponse { + data: { + success: boolean + } +} + +export default { + async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { + const url = new URL(request.url) + const path = url.pathname + + // CORS preflight + if (request.method === 'OPTIONS') { + return handleCORS(request, env) + } + + // Validate Origin + const origin = request.headers.get('Origin') + const allowedOrigins = env.ALLOWED_ORIGINS.split(',').map(o => o.trim()) + if (!origin || !allowedOrigins.includes(origin)) { + return jsonResponse({ error: 'Unauthorized origin' }, 403) + } + + try { + // Route based on path + if (path === '/initiate') { + return handleInitiateUpload(request, env, origin) + } else if (path === '/complete') { + return handleCompleteUpload(request, env, origin) + } else if (path === '/sync-approved-members') { + return handleSyncApprovedMembers(request, env, origin) + } else { + return jsonResponse({ error: 'Invalid endpoint' }, 404, origin) + } + + } catch (error) { + return jsonResponse({ + error: 'Internal server error', + details: (error as Error).message + }, 500, origin) + } + } +} + +/** + * Handle initiate upload request - returns signed S3 URL to frontend + */ +async function handleInitiateUpload( + request: Request, + env: Env, + origin: string +): Promise { + const body = await request.json() as InitiateUploadRequest + + if (!body.fileName || !body.contentType || !body.directoryPath) { + return jsonResponse({ error: 'Missing required fields' }, 400, origin) + } + + // Validate directoryPath + if (body.directoryPath !== 'pending' && body.directoryPath !== 'approved') { + return jsonResponse({ error: 'Invalid directoryPath' }, 400, origin) + } + + // Initiate upload session with Apillon + const initiateResponse = await apillonInitiateUpload( + env, + body.fileName, + body.contentType, + body.directoryPath + ) + + if (!initiateResponse.data?.sessionUuid || !initiateResponse.data?.files?.[0]?.url) { + return jsonResponse({ error: 'Failed to initiate upload session' }, 500, origin) + } + + const { sessionUuid, files } = initiateResponse.data + const uploadUrl = files[0].url + const fileUuid = files[0].fileUuid + + // Return signed URL to frontend + return jsonResponse({ + success: true, + sessionUuid, + uploadUrl, + fileUuid + }, 200, origin) +} + +/** + * Handle complete upload request - marks session as complete in Apillon + */ +async function handleCompleteUpload( + request: Request, + env: Env, + origin: string +): Promise { + const body = await request.json() as CompleteUploadRequest + + if (!body.sessionUuid) { + return jsonResponse({ error: 'Missing sessionUuid' }, 400, origin) + } + + // Complete upload session with Apillon + const completeResponse = await apillonCompleteUpload(env, body.sessionUuid) + + if (!completeResponse.data?.success) { + return jsonResponse({ error: 'Failed to complete upload session' }, 500, origin) + } + + return jsonResponse({ + success: true, + message: 'Upload completed successfully' + }, 200, origin) +} + +/** + * Initiate upload session with Apillon + */ +async function apillonInitiateUpload( + env: Env, + fileName: string, + contentType: string, + directoryPath: string +): Promise { + const url = `https://api.apillon.io/storage/buckets/${env.APILLON_BUCKET_UUID}/upload` + const auth = btoa(`${env.APILLON_API_KEY}:${env.APILLON_API_SECRET}`) + + const requestBody = { + files: [{ + fileName: fileName, + contentType: contentType, + path: directoryPath + }], + directoryPath: directoryPath + } + + const response = await fetch(url, { + method: 'POST', + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify(requestBody) + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Apillon initiate upload failed: ${errorText}`) + } + + return await response.json() +} + +/** + * Complete upload session with Apillon + */ +async function apillonCompleteUpload( + env: Env, + sessionUuid: string +): Promise { + const url = `https://api.apillon.io/storage/buckets/${env.APILLON_BUCKET_UUID}/upload/${sessionUuid}/end` + + const auth = btoa(`${env.APILLON_API_KEY}:${env.APILLON_API_SECRET}`) + + const response = await fetch(url, { + method: 'POST', + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Apillon complete upload failed: ${errorText}`) + } + + return await response.json() +} + +/** + * Handle sync approved members request - moves images from pending to approved + */ +async function handleSyncApprovedMembers( + request: Request, + env: Env, + origin: string +): Promise { + if (request.method !== 'POST') { + return jsonResponse({ error: 'Method not allowed' }, 405, origin) + } + + try { + const body = await request.json() as { addresses?: string[] } + const addresses: string[] = body.addresses || [] + + if (!Array.isArray(addresses) || addresses.length === 0) { + return jsonResponse({ error: 'Invalid addresses array' }, 400, origin) + } + + // Limit to 50 addresses per request + const limitedAddresses = addresses.slice(0, 50) + + // First, list all files in pending folder to find which addresses have images + const pendingFiles = await listFilesInFolder(env, 'pending') + + // Extract address from each filename (format: {address}.{ext}) + const pendingMap = new Map() + for (const file of pendingFiles) { + const match = file.name.match(/^(.+)\.([^.]+)$/) + if (match) { + const [, address, extension] = match + pendingMap.set(address, { + fileName: file.name, + fileUuid: file.uuid, + extension, + link: file.link + }) + } + } + + const results: { + moved: Array<{ address: string; from: string; to: string }> + skipped: Array<{ address: string; reason: string }> + errors: Array<{ address: string; error: string }> + } = { + moved: [], + skipped: [], + errors: [] + } + + for (const address of limitedAddresses) { + try { + const fileInfo = pendingMap.get(address) + + if (!fileInfo) { + results.skipped.push({ + address, + reason: 'No pending image found' + }) + continue + } + + // Move file from pending to approved + await moveFile( + env, + `pending/${fileInfo.fileName}`, + `approved/${fileInfo.fileName}`, + fileInfo.fileUuid, + fileInfo.link + ) + + results.moved.push({ + address, + from: `pending/${fileInfo.fileName}`, + to: `approved/${fileInfo.fileName}` + }) + } catch (error) { + results.errors.push({ + address, + error: (error as Error).message + }) + } + } + + return jsonResponse( + { + success: true, + ...results + }, + 200, + origin + ) + } catch (error) { + return jsonResponse( + { + success: false, + error: 'Sync failed', + details: (error as Error).message + }, + 500, + origin + ) + } +} + +/** + * List files in a specific folder (pending or approved) + */ +async function listFilesInFolder( + env: Env, + folder: string +): Promise> { + const url = `https://api.apillon.io/storage/buckets/${env.APILLON_BUCKET_UUID}/files` + const auth = btoa(`${env.APILLON_API_KEY}:${env.APILLON_API_SECRET}`) + + const response = await fetch(url, { + method: 'GET', + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Failed to list files: ${errorText}`) + } + + const data = await response.json() as { data: { items: Array<{ + name: string + fileUuid: string + path?: string + link?: string + CID?: string + CIDv1?: string + }> } } + + // Filter files by folder path (Apillon uses "pending/", "approved/" format) + return data.data.items + .filter(file => { + const filePath = file.path || '' + // Match "pending/" or "approved/" + return filePath === `${folder}/` + }) + .map(file => ({ + name: file.name, + uuid: file.fileUuid, + link: file.link, + cid: file.CIDv1 || file.CID + })) +} + +/** + * Move file from one location to another + * Note: Apillon doesn't have a native move operation, so we: + * 1. Download the file from its current IPFS location (using authenticated link) + * 2. Upload to new location (approved folder) + * 3. Delete from old location (pending folder) + */ +async function moveFile( + env: Env, + _fromPath: string, + toPath: string, + fileUuid: string, + fileLink?: string +): Promise { + // Step 1: Download the file content using the authenticated link from Apillon + if (!fileLink) { + throw new Error('File link is required to download file') + } + + const fileResponse = await fetch(fileLink) + if (!fileResponse.ok) { + throw new Error(`Failed to download file: ${fileResponse.statusText}`) + } + const fileBlob = await fileResponse.blob() + + // Step 2: Extract filename and content type + const fileName = toPath.split('/').pop() || 'unknown' + const contentType = fileResponse.headers.get('content-type') || 'application/octet-stream' + + // Step 3: Upload to new location + const targetFolder = toPath.split('/')[0] + const uploadSession = await apillonInitiateUpload(env, fileName, contentType, targetFolder) + + if (!uploadSession.data?.sessionUuid || !uploadSession.data?.files?.[0]?.url) { + throw new Error('Failed to initiate upload to new location') + } + + const uploadUrl = uploadSession.data.files[0].url + + // Step 4: Upload the file to the signed URL + const uploadResponse = await fetch(uploadUrl, { + method: 'PUT', + body: fileBlob, + headers: { + 'Content-Type': contentType + } + }) + + if (!uploadResponse.ok) { + throw new Error(`Failed to upload file: ${uploadResponse.statusText}`) + } + + // Step 5: Complete the upload session + await apillonCompleteUpload(env, uploadSession.data.sessionUuid) + + // Step 6: Delete from old location (pending folder) + await deleteApillonFile(env, fileUuid) +} + +/** + * Delete a file from Apillon storage + */ +async function deleteApillonFile(env: Env, fileUuid: string): Promise { + const url = `https://api.apillon.io/storage/buckets/${env.APILLON_BUCKET_UUID}/files/${fileUuid}` + const auth = btoa(`${env.APILLON_API_KEY}:${env.APILLON_API_SECRET}`) + + const response = await fetch(url, { + method: 'DELETE', + headers: { + 'Authorization': `Basic ${auth}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Failed to delete file: ${errorText}`) + } +} + +/** + * Handle CORS preflight requests + */ +function handleCORS(request: Request, env: Env): Response { + const origin = request.headers.get('Origin') + const allowedOrigins = env.ALLOWED_ORIGINS?.split(',').map(o => o.trim()) || [] + + if (!origin || !allowedOrigins.includes(origin)) { + return new Response(null, { status: 403 }) + } + + return new Response(null, { + status: 204, + headers: { + 'Access-Control-Allow-Origin': origin, + 'Access-Control-Allow-Methods': 'POST, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type', + 'Access-Control-Max-Age': '86400' + } + }) +} + +/** + * Helper to create JSON responses with CORS headers + */ +function jsonResponse(data: any, status: number, origin?: string): Response { + const headers: Record = { + 'Content-Type': 'application/json' + } + + if (origin) { + headers['Access-Control-Allow-Origin'] = origin + } + + return new Response(JSON.stringify(data), { + status, + headers + }) +} diff --git a/workers/poi-upload-worker/wrangler.toml b/workers/poi-upload-worker/wrangler.toml new file mode 100644 index 0000000..a01fee6 --- /dev/null +++ b/workers/poi-upload-worker/wrangler.toml @@ -0,0 +1,27 @@ +# Cloudflare Worker Configuration for POI Upload Proxy +name = "poi-upload-worker" +main = "src/index.ts" +compatibility_date = "2024-01-01" + +# Environment Variables (set via `wrangler secret put `) +# Required secrets: +# - APILLON_API_KEY +# - APILLON_API_SECRET +# - APILLON_BUCKET_UUID +# - ALLOWED_ORIGINS (comma-separated, e.g., "https://kappasigmamu.github.io,http://localhost:3000") + +# Development Configuration +[env.development] +name = "poi-upload-worker-dev" +vars = { ENVIRONMENT = "development" } + +# Production Configuration +[env.production] +name = "poi-upload-worker-prod" +vars = { ENVIRONMENT = "production" } + +[observability] +enabled = true + +[observability.logs] +enabled = true \ No newline at end of file