Cropping mostly done
This commit is contained in:
13
.env.example
13
.env.example
@@ -1,2 +1,15 @@
|
|||||||
# Your Google Cloud OAuth 2.0 Client ID
|
# Your Google Cloud OAuth 2.0 Client ID
|
||||||
VITE_GOOGLE_CLIENT_ID="YOUR_GOOGLE_CLIENT_ID_HERE"
|
VITE_GOOGLE_CLIENT_ID="YOUR_GOOGLE_CLIENT_ID_HERE"
|
||||||
|
|
||||||
|
# Face Detection Crop Configuration
|
||||||
|
# Crop aspect ratio (width:height) - e.g., 1.0 for square, 1.5 for 3:2 ratio
|
||||||
|
VITE_CROP_RATIO=1.0
|
||||||
|
|
||||||
|
# Face offset from center (as percentage of crop dimensions)
|
||||||
|
# Positive values move the face toward bottom-right, negative toward top-left
|
||||||
|
VITE_FACE_OFFSET_X=0.0
|
||||||
|
VITE_FACE_OFFSET_Y=-0.1
|
||||||
|
|
||||||
|
# Crop scale multiplier based on face width
|
||||||
|
# 1.0 = crop width equals face width, 2.0 = crop is 2x face width
|
||||||
|
VITE_CROP_SCALE=2.5
|
||||||
|
|||||||
4
.github/copilot-instructions.md
vendored
4
.github/copilot-instructions.md
vendored
@@ -2,7 +2,11 @@
|
|||||||
- You are a helpful AI assistant that helps developers write code.
|
- You are a helpful AI assistant that helps developers write code.
|
||||||
- This code is written in Svelte 5
|
- This code is written in Svelte 5
|
||||||
- It's important to only use modern Svelte 5 syntax, runes, and features.
|
- It's important to only use modern Svelte 5 syntax, runes, and features.
|
||||||
|
- Do not use $:, do not use eventDispatching as they are both deprecated
|
||||||
|
- User $effect, $state, $derived
|
||||||
|
- Pass fucntions as props instead od dispatching events
|
||||||
- Use styling from ".github/styling.md" for any UI components.
|
- Use styling from ".github/styling.md" for any UI components.
|
||||||
- Refer to the ".github/core-instructions.md" for the overall structure of the application.
|
- Refer to the ".github/core-instructions.md" for the overall structure of the application.
|
||||||
- Generate ".github/done.md" file to see what is done and what is not. Check it when you start and finish a task.
|
- Generate ".github/done.md" file to see what is done and what is not. Check it when you start and finish a task.
|
||||||
- Remain consistent in styling and code structure.
|
- Remain consistent in styling and code structure.
|
||||||
|
- Avoid unncessary iterations. If problems is mostly solved, stop.
|
||||||
11
package-lock.json
generated
11
package-lock.json
generated
@@ -8,6 +8,7 @@
|
|||||||
"name": "esn-card-generator",
|
"name": "esn-card-generator",
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@tensorflow-models/blazeface": "^0.1.0",
|
||||||
"@tensorflow/tfjs": "^4.22.0",
|
"@tensorflow/tfjs": "^4.22.0",
|
||||||
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
||||||
"@types/gapi": "^0.0.47",
|
"@types/gapi": "^0.0.47",
|
||||||
@@ -1349,6 +1350,16 @@
|
|||||||
"vite": "^5.2.0 || ^6 || ^7"
|
"vite": "^5.2.0 || ^6 || ^7"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@tensorflow-models/blazeface": {
|
||||||
|
"version": "0.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow-models/blazeface/-/blazeface-0.1.0.tgz",
|
||||||
|
"integrity": "sha512-Qc5Wii8/OE5beC7XfehkhF9SEFLaPbVKnxxalV0T9JXsUynXqvLommc9Eko7b8zXKy4SJ1BtVlcX2cmCzQrn9A==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"peerDependencies": {
|
||||||
|
"@tensorflow/tfjs-converter": "^4.10.0",
|
||||||
|
"@tensorflow/tfjs-core": "^4.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@tensorflow/tfjs": {
|
"node_modules/@tensorflow/tfjs": {
|
||||||
"version": "4.22.0",
|
"version": "4.22.0",
|
||||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-4.22.0.tgz",
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-4.22.0.tgz",
|
||||||
|
|||||||
@@ -31,6 +31,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@tensorflow/tfjs": "^4.22.0",
|
"@tensorflow/tfjs": "^4.22.0",
|
||||||
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
||||||
|
"@tensorflow-models/blazeface": "^0.1.0",
|
||||||
"@types/gapi": "^0.0.47",
|
"@types/gapi": "^0.0.47",
|
||||||
"@types/gapi.client.drive": "^3.0.15",
|
"@types/gapi.client.drive": "^3.0.15",
|
||||||
"@types/gapi.client.sheets": "^4.0.20201031",
|
"@types/gapi.client.sheets": "^4.0.20201031",
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
<!doctype html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
|
<title>ESN Card Generator</title>
|
||||||
<meta charset="utf-8" />
|
<meta charset="utf-8" />
|
||||||
<link rel="icon" href="%sveltekit.assets%/favicon.svg" />
|
<link rel="icon" href="%sveltekit.assets%/favicon.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
|||||||
394
src/lib/components/PhotoCard.svelte
Normal file
394
src/lib/components/PhotoCard.svelte
Normal file
@@ -0,0 +1,394 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { onMount } from 'svelte';
|
||||||
|
import { createEventDispatcher } from 'svelte';
|
||||||
|
import PhotoCrop from './PhotoCrop.svelte';
|
||||||
|
import * as tf from '@tensorflow/tfjs';
|
||||||
|
import * as blazeface from '@tensorflow-models/blazeface';
|
||||||
|
|
||||||
|
export let imageUrl: string;
|
||||||
|
export let personName: string;
|
||||||
|
export let isProcessing = false;
|
||||||
|
|
||||||
|
const dispatch = createEventDispatcher<{
|
||||||
|
cropUpdated: { x: number; y: number; width: number; height: number };
|
||||||
|
faceDetectionStarted: void;
|
||||||
|
faceDetectionCompleted: { success: boolean; hasAutoDetectedCrop: boolean };
|
||||||
|
}>();
|
||||||
|
|
||||||
|
let showCropEditor = false;
|
||||||
|
let autoDetectedCrop: { x: number; y: number; width: number; height: number } | null = null;
|
||||||
|
let currentCrop: { x: number; y: number; width: number; height: number } | null = null;
|
||||||
|
let isDetectingFace = false;
|
||||||
|
let faceDetectionError = false;
|
||||||
|
let detector: any = null;
|
||||||
|
let isModelLoading = false;
|
||||||
|
let isDownloadingModel = false;
|
||||||
|
|
||||||
|
let photoElement: HTMLImageElement;
|
||||||
|
|
||||||
|
onMount(async () => {
|
||||||
|
console.log('PhotoCard mounted, initializing face detection...');
|
||||||
|
await initializeFaceDetection();
|
||||||
|
});
|
||||||
|
|
||||||
|
async function initializeFaceDetection() {
|
||||||
|
try {
|
||||||
|
isDownloadingModel = true;
|
||||||
|
console.log('Downloading BlazeFace model...');
|
||||||
|
|
||||||
|
// Initialize TensorFlow.js with WebGL backend for better performance
|
||||||
|
await tf.setBackend('webgl');
|
||||||
|
await tf.ready();
|
||||||
|
console.log('TensorFlow.js WebGL backend initialized');
|
||||||
|
|
||||||
|
isDownloadingModel = false;
|
||||||
|
isModelLoading = true;
|
||||||
|
console.log('Loading BlazeFace model...');
|
||||||
|
|
||||||
|
// Load the BlazeFace model
|
||||||
|
detector = await blazeface.load();
|
||||||
|
|
||||||
|
isModelLoading = false;
|
||||||
|
console.log('BlazeFace model loaded successfully with WebGL backend');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to initialize BlazeFace with WebGL:', error);
|
||||||
|
console.log('Falling back to CPU backend...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fallback to CPU if WebGL fails
|
||||||
|
await tf.setBackend('cpu');
|
||||||
|
await tf.ready();
|
||||||
|
console.log('TensorFlow.js CPU backend initialized as fallback');
|
||||||
|
|
||||||
|
detector = await blazeface.load();
|
||||||
|
isModelLoading = false;
|
||||||
|
console.log('BlazeFace model loaded successfully with CPU backend');
|
||||||
|
} catch (fallbackError) {
|
||||||
|
console.error('Failed to initialize BlazeFace with CPU fallback:', fallbackError);
|
||||||
|
isDownloadingModel = false;
|
||||||
|
isModelLoading = false;
|
||||||
|
faceDetectionError = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} // Simple face detection using BlazeFace
|
||||||
|
async function detectFaceWithMediaPipe() {
|
||||||
|
if (!photoElement || isDetectingFace || !detector) return;
|
||||||
|
|
||||||
|
dispatch('faceDetectionStarted');
|
||||||
|
isDetectingFace = true;
|
||||||
|
faceDetectionError = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('Detecting faces with BlazeFace...');
|
||||||
|
|
||||||
|
// Detect faces in the image
|
||||||
|
const predictions = await detector.estimateFaces(photoElement, false);
|
||||||
|
|
||||||
|
console.log(`BlazeFace found ${predictions.length} faces`);
|
||||||
|
|
||||||
|
if (predictions.length > 0) {
|
||||||
|
// Find the face with the highest probability
|
||||||
|
let bestFace = predictions[0];
|
||||||
|
let highestProbability = predictions[0].probability ? predictions[0].probability[0] : 0;
|
||||||
|
|
||||||
|
for (let i = 1; i < predictions.length; i++) {
|
||||||
|
const face = predictions[i];
|
||||||
|
const probability = face.probability ? face.probability[0] : 0;
|
||||||
|
if (probability > highestProbability) {
|
||||||
|
bestFace = face;
|
||||||
|
highestProbability = probability;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Selected face with probability: ${highestProbability}`);
|
||||||
|
|
||||||
|
// Use the best detected face
|
||||||
|
const face = bestFace;
|
||||||
|
|
||||||
|
// BlazeFace returns topLeft and bottomRight coordinates
|
||||||
|
// These coordinates are relative to the DISPLAYED image size, not natural size
|
||||||
|
let [x1, y1] = face.topLeft;
|
||||||
|
let [x2, y2] = face.bottomRight;
|
||||||
|
|
||||||
|
console.log('BlazeFace detection (displayed coordinates):', { x1, y1, x2, y2 });
|
||||||
|
console.log('Image dimensions:', {
|
||||||
|
natural: { width: photoElement.naturalWidth, height: photoElement.naturalHeight },
|
||||||
|
displayed: { width: photoElement.clientWidth, height: photoElement.clientHeight }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate scale factors to convert from displayed to natural coordinates
|
||||||
|
const scaleX = photoElement.naturalWidth / photoElement.clientWidth;
|
||||||
|
const scaleY = photoElement.naturalHeight / photoElement.clientHeight;
|
||||||
|
|
||||||
|
console.log('Scale factors:', { scaleX, scaleY });
|
||||||
|
|
||||||
|
// Scale coordinates to natural image size
|
||||||
|
x1 = x1 * scaleX;
|
||||||
|
y1 = y1 * scaleY;
|
||||||
|
x2 = x2 * scaleX;
|
||||||
|
y2 = y2 * scaleY;
|
||||||
|
|
||||||
|
let faceWidth = x2 - x1;
|
||||||
|
let faceHeight = y2 - y1;
|
||||||
|
|
||||||
|
console.log('Scaled coordinates (natural size):', { x1, y1, x2, y2, faceWidth, faceHeight });
|
||||||
|
|
||||||
|
// BlazeFace coordinates are relative to the input image size
|
||||||
|
// Verify coordinates are within bounds and reasonable
|
||||||
|
if (x1 < 0 || y1 < 0 || x2 > photoElement.naturalWidth || y2 > photoElement.naturalHeight) {
|
||||||
|
console.warn('BlazeFace coordinates out of bounds, clamping:', { x1, y1, x2, y2 });
|
||||||
|
// Clamp coordinates to image bounds
|
||||||
|
x1 = Math.max(0, x1);
|
||||||
|
y1 = Math.max(0, y1);
|
||||||
|
x2 = Math.min(photoElement.naturalWidth, x2);
|
||||||
|
y2 = Math.min(photoElement.naturalHeight, y2);
|
||||||
|
|
||||||
|
// Recalculate dimensions
|
||||||
|
faceWidth = x2 - x1;
|
||||||
|
faceHeight = y2 - y1;
|
||||||
|
|
||||||
|
console.log('Clamped coordinates:', { x1, y1, x2, y2, faceWidth, faceHeight });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Final validation - ensure we have a reasonable face size
|
||||||
|
if (faceWidth <= 0 || faceHeight <= 0) {
|
||||||
|
console.error('Invalid face dimensions after clamping');
|
||||||
|
throw new Error('Invalid face dimensions');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recalculate face dimensions after any clamping
|
||||||
|
const finalFaceWidth = x2 - x1;
|
||||||
|
const finalFaceHeight = y2 - y1;
|
||||||
|
|
||||||
|
// // Validate face size - reject if too small
|
||||||
|
// const faceArea = finalFaceWidth * finalFaceHeight;
|
||||||
|
// const imageArea = photoElement.naturalWidth * photoElement.naturalHeight;
|
||||||
|
// const faceRatio = faceArea / imageArea;
|
||||||
|
|
||||||
|
// console.log('Face area ratio:', faceRatio);
|
||||||
|
|
||||||
|
// // Only reject if smaller than 0.5% of image (very small noise)
|
||||||
|
// if (faceRatio < 0.005) {
|
||||||
|
// console.log('Face rejected: too small');
|
||||||
|
// throw new Error('Face too small');
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Create crop area with environment-based configuration
|
||||||
|
const cropRatio = parseFloat(import.meta.env.VITE_CROP_RATIO || '1.0');
|
||||||
|
const faceOffsetX = parseFloat(import.meta.env.VITE_FACE_OFFSET_X || '0.0');
|
||||||
|
const faceOffsetY = parseFloat(import.meta.env.VITE_FACE_OFFSET_Y || '-0.1');
|
||||||
|
const cropScale = parseFloat(import.meta.env.VITE_CROP_SCALE || '2.5');
|
||||||
|
|
||||||
|
console.log('Crop configuration:', { cropRatio, faceOffsetX, faceOffsetY, cropScale });
|
||||||
|
|
||||||
|
// Calculate face center
|
||||||
|
const faceCenterX = x1 + finalFaceWidth / 2;
|
||||||
|
const faceCenterY = y1 + finalFaceHeight / 2;
|
||||||
|
|
||||||
|
// Calculate crop dimensions based on face width and scale
|
||||||
|
const cropWidth = finalFaceWidth * cropScale;
|
||||||
|
const cropHeight = cropWidth / cropRatio; // Maintain aspect ratio
|
||||||
|
|
||||||
|
// Apply face offset to crop center (offset is percentage of crop dimensions)
|
||||||
|
const cropCenterX = faceCenterX + (cropWidth * faceOffsetX);
|
||||||
|
const cropCenterY = faceCenterY + (cropHeight * faceOffsetY);
|
||||||
|
|
||||||
|
// Ensure crop fits within image bounds while maintaining aspect ratio
|
||||||
|
let finalCropWidth = cropWidth;
|
||||||
|
let finalCropHeight = cropHeight;
|
||||||
|
|
||||||
|
// Check if crop exceeds image bounds and scale down proportionally if needed
|
||||||
|
const maxWidth = photoElement.naturalWidth;
|
||||||
|
const maxHeight = photoElement.naturalHeight;
|
||||||
|
|
||||||
|
if (finalCropWidth > maxWidth || finalCropHeight > maxHeight) {
|
||||||
|
// Scale down to fit within bounds while maintaining ratio
|
||||||
|
const scaleToFitWidth = maxWidth / finalCropWidth;
|
||||||
|
const scaleToFitHeight = maxHeight / finalCropHeight;
|
||||||
|
const scaleToFit = Math.min(scaleToFitWidth, scaleToFitHeight);
|
||||||
|
|
||||||
|
finalCropWidth = finalCropWidth * scaleToFit;
|
||||||
|
finalCropHeight = finalCropHeight * scaleToFit;
|
||||||
|
|
||||||
|
console.log('Scaled crop to fit bounds:', { scaleToFit, finalCropWidth, finalCropHeight });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate crop position (top-left corner) with properly sized crop
|
||||||
|
const cropCenterXAdjusted = faceCenterX + (finalCropWidth * faceOffsetX);
|
||||||
|
const cropCenterYAdjusted = faceCenterY + (finalCropHeight * faceOffsetY);
|
||||||
|
|
||||||
|
const cropX = Math.max(0, Math.min(cropCenterXAdjusted - finalCropWidth / 2, photoElement.naturalWidth - finalCropWidth));
|
||||||
|
const cropY = Math.max(0, Math.min(cropCenterYAdjusted - finalCropHeight / 2, photoElement.naturalHeight - finalCropHeight));
|
||||||
|
|
||||||
|
console.log('Crop calculation:', {
|
||||||
|
faceCenter: { x: faceCenterX, y: faceCenterY },
|
||||||
|
cropDimensions: { width: cropWidth, height: cropHeight },
|
||||||
|
cropCenter: { x: cropCenterX, y: cropCenterY },
|
||||||
|
finalCrop: { x: cropX, y: cropY, width: finalCropWidth, height: finalCropHeight },
|
||||||
|
aspectRatio: finalCropWidth / finalCropHeight
|
||||||
|
});
|
||||||
|
|
||||||
|
autoDetectedCrop = {
|
||||||
|
x: Math.round(cropX),
|
||||||
|
y: Math.round(cropY),
|
||||||
|
width: Math.round(finalCropWidth),
|
||||||
|
height: Math.round(finalCropHeight)
|
||||||
|
};
|
||||||
|
|
||||||
|
currentCrop = { ...autoDetectedCrop };
|
||||||
|
dispatch('cropUpdated', currentCrop);
|
||||||
|
dispatch('faceDetectionCompleted', { success: true, hasAutoDetectedCrop: true });
|
||||||
|
console.log('BlazeFace detection successful!', autoDetectedCrop);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No faces detected
|
||||||
|
throw new Error('No faces detected by BlazeFace');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('BlazeFace detection failed:', error);
|
||||||
|
faceDetectionError = true;
|
||||||
|
dispatch('faceDetectionCompleted', { success: false, hasAutoDetectedCrop: false });
|
||||||
|
// Don't fall back to anything - just leave it as an error state
|
||||||
|
} finally {
|
||||||
|
isDetectingFace = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function openCropEditor() {
|
||||||
|
showCropEditor = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCropSave(e: CustomEvent<{ x: number; y: number; width: number; height: number }>) {
|
||||||
|
currentCrop = e.detail;
|
||||||
|
showCropEditor = false;
|
||||||
|
dispatch('cropUpdated', currentCrop);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCropCancel() {
|
||||||
|
showCropEditor = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try face detection when image and detector are ready
|
||||||
|
$: if (imageUrl && photoElement && detector && !isDetectingFace && !autoDetectedCrop) {
|
||||||
|
detectFaceWithMediaPipe();
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="relative group">
|
||||||
|
<div class="relative overflow-hidden rounded-lg border-2 border-gray-200">
|
||||||
|
<img
|
||||||
|
bind:this={photoElement}
|
||||||
|
src={imageUrl}
|
||||||
|
alt={personName}
|
||||||
|
class="w-full h-full object-cover"
|
||||||
|
on:load={detectFaceWithMediaPipe}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<!-- Small notification bars for all states -->
|
||||||
|
{#if isDownloadingModel}
|
||||||
|
<div class="absolute top-2 left-2 right-2 bg-blue-500/95 text-white px-3 py-2 rounded text-xs font-medium flex items-center space-x-2 shadow-lg">
|
||||||
|
<svg class="w-3 h-3 animate-spin" viewBox="0 0 24 24">
|
||||||
|
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4" fill="none"/>
|
||||||
|
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"/>
|
||||||
|
</svg>
|
||||||
|
<span>Downloading AI Model...</span>
|
||||||
|
<div class="flex-1 bg-white/20 rounded-full h-1 ml-2">
|
||||||
|
<div class="bg-white h-1 rounded-full animate-pulse" style="width: 30%"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else if isModelLoading}
|
||||||
|
<div class="absolute top-2 left-2 right-2 bg-purple-500/95 text-white px-3 py-2 rounded text-xs font-medium flex items-center space-x-2 shadow-lg">
|
||||||
|
<svg class="w-3 h-3 animate-spin" viewBox="0 0 24 24">
|
||||||
|
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4" fill="none"/>
|
||||||
|
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"/>
|
||||||
|
</svg>
|
||||||
|
<span>Loading AI Model...</span>
|
||||||
|
<div class="flex-1 bg-white/20 rounded-full h-1 ml-2">
|
||||||
|
<div class="bg-white h-1 rounded-full animate-pulse" style="width: 60%"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else if isDetectingFace}
|
||||||
|
<div class="absolute top-2 left-2 right-2 bg-green-500/95 text-white px-3 py-2 rounded text-xs font-medium flex items-center space-x-2 shadow-lg">
|
||||||
|
<svg class="w-3 h-3 animate-pulse" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z"/>
|
||||||
|
</svg>
|
||||||
|
<span>Detecting Face
|
||||||
|
<span class="inline-flex ml-1">
|
||||||
|
<span class="animate-pulse">.</span>
|
||||||
|
<span class="animate-pulse" style="animation-delay: 0.2s">.</span>
|
||||||
|
<span class="animate-pulse" style="animation-delay: 0.4s">.</span>
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
<div class="flex-1 bg-white/20 rounded-full h-1 ml-2">
|
||||||
|
<div class="bg-white h-1 rounded-full animate-pulse" style="width: 80%"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if currentCrop}
|
||||||
|
<!-- Show crop preview overlay with proper masking -->
|
||||||
|
<div class="absolute inset-0 pointer-events-none">
|
||||||
|
<div class="relative w-full h-full">
|
||||||
|
<!-- Create mask using box-shadow to darken only non-crop areas -->
|
||||||
|
<div
|
||||||
|
class="absolute border-2 border-blue-500 border-dashed"
|
||||||
|
style="left: {(currentCrop.x / photoElement?.naturalWidth) * 100}%;
|
||||||
|
top: {(currentCrop.y / photoElement?.naturalHeight) * 100}%;
|
||||||
|
width: {(currentCrop.width / photoElement?.naturalWidth) * 100}%;
|
||||||
|
height: {(currentCrop.height / photoElement?.naturalHeight) * 100}%;
|
||||||
|
box-shadow: 0 0 0 9999px rgba(0, 0, 0, 0.3);"
|
||||||
|
></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Edit crop button -->
|
||||||
|
<button
|
||||||
|
on:click={openCropEditor}
|
||||||
|
class="absolute top-2 right-2 bg-white bg-opacity-90 hover:bg-opacity-100 rounded-full p-2 shadow-lg transition-all duration-200 opacity-0 group-hover:opacity-100"
|
||||||
|
title="Edit crop area"
|
||||||
|
>
|
||||||
|
<svg class="w-4 h-4 text-gray-700" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15.232 5.232l3.536 3.536m-2.036-5.036a2.5 2.5 0 113.536 3.536L6.5 21.036H3v-3.572L16.732 3.732z"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<!-- Status indicators -->
|
||||||
|
<div class="absolute bottom-2 left-2 flex space-x-1">
|
||||||
|
{#if faceDetectionError}
|
||||||
|
<div class="bg-yellow-500 text-white px-2 py-1 rounded text-xs font-medium">
|
||||||
|
Manual crop
|
||||||
|
</div>
|
||||||
|
{:else if currentCrop && autoDetectedCrop && JSON.stringify(currentCrop) !== JSON.stringify(autoDetectedCrop)}
|
||||||
|
<div class="bg-blue-500 text-white px-2 py-1 rounded text-xs font-medium">
|
||||||
|
Custom crop
|
||||||
|
</div>
|
||||||
|
{:else if autoDetectedCrop}
|
||||||
|
<div class="bg-green-500 text-white px-2 py-1 rounded text-xs font-medium">
|
||||||
|
Auto-cropped
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="mt-2">
|
||||||
|
<p class="text-sm font-medium text-gray-900 truncate">{personName}</p>
|
||||||
|
{#if isProcessing}
|
||||||
|
<p class="text-xs text-gray-500">Processing...</p>
|
||||||
|
{:else if faceDetectionError}
|
||||||
|
<p class="text-xs text-yellow-600">Using center crop</p>
|
||||||
|
{:else if autoDetectedCrop}
|
||||||
|
<p class="text-xs text-green-600">Face detected</p>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if showCropEditor}
|
||||||
|
<PhotoCrop
|
||||||
|
{imageUrl}
|
||||||
|
{personName}
|
||||||
|
initialCrop={currentCrop}
|
||||||
|
on:save={handleCropSave}
|
||||||
|
on:cancel={handleCropCancel}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
360
src/lib/components/PhotoCrop.svelte
Normal file
360
src/lib/components/PhotoCrop.svelte
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { createEventDispatcher, onMount } from 'svelte';
|
||||||
|
|
||||||
|
export let imageUrl: string;
|
||||||
|
export let personName: string;
|
||||||
|
export let initialCrop: { x: number; y: number; width: number; height: number } | null = null;
|
||||||
|
|
||||||
|
const dispatch = createEventDispatcher<{
|
||||||
|
save: { x: number; y: number; width: number; height: number };
|
||||||
|
cancel: void;
|
||||||
|
}>();
|
||||||
|
|
||||||
|
let canvas: HTMLCanvasElement;
|
||||||
|
let ctx: CanvasRenderingContext2D;
|
||||||
|
let image: HTMLImageElement;
|
||||||
|
let isImageLoaded = false;
|
||||||
|
|
||||||
|
// Crop rectangle state
|
||||||
|
let crop = {
|
||||||
|
x: 0,
|
||||||
|
y: 0,
|
||||||
|
width: 200,
|
||||||
|
height: 200
|
||||||
|
};
|
||||||
|
|
||||||
|
// Interaction state
|
||||||
|
let isDragging = false;
|
||||||
|
let isResizing = false;
|
||||||
|
let dragStart = { x: 0, y: 0 };
|
||||||
|
let resizeHandle = '';
|
||||||
|
|
||||||
|
// Canvas dimensions
|
||||||
|
let canvasWidth = 600;
|
||||||
|
let canvasHeight = 400;
|
||||||
|
|
||||||
|
// Get crop ratio from environment
|
||||||
|
const cropRatio = parseFloat(import.meta.env.VITE_CROP_RATIO || '1.0');
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
ctx = canvas.getContext('2d')!;
|
||||||
|
loadImage();
|
||||||
|
});
|
||||||
|
|
||||||
|
async function loadImage() {
|
||||||
|
image = new Image();
|
||||||
|
image.onload = () => {
|
||||||
|
isImageLoaded = true;
|
||||||
|
|
||||||
|
// Calculate canvas size to fit image while maintaining aspect ratio
|
||||||
|
const maxWidth = 600;
|
||||||
|
const maxHeight = 400;
|
||||||
|
const imageAspect = image.width / image.height;
|
||||||
|
|
||||||
|
if (imageAspect > maxWidth / maxHeight) {
|
||||||
|
canvasWidth = maxWidth;
|
||||||
|
canvasHeight = maxWidth / imageAspect;
|
||||||
|
} else {
|
||||||
|
canvasHeight = maxHeight;
|
||||||
|
canvasWidth = maxHeight * imageAspect;
|
||||||
|
}
|
||||||
|
|
||||||
|
canvas.width = canvasWidth;
|
||||||
|
canvas.height = canvasHeight;
|
||||||
|
|
||||||
|
// Initialize crop rectangle
|
||||||
|
if (initialCrop) {
|
||||||
|
// Scale initial crop to canvas dimensions
|
||||||
|
const scaleX = canvasWidth / image.width;
|
||||||
|
const scaleY = canvasHeight / image.height;
|
||||||
|
crop = {
|
||||||
|
x: initialCrop.x * scaleX,
|
||||||
|
y: initialCrop.y * scaleY,
|
||||||
|
width: initialCrop.width * scaleX,
|
||||||
|
height: initialCrop.height * scaleY
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Default crop: centered with correct aspect ratio
|
||||||
|
const maxSize = Math.min(canvasWidth, canvasHeight) * 0.6;
|
||||||
|
const cropWidth = maxSize;
|
||||||
|
const cropHeight = cropWidth / cropRatio;
|
||||||
|
|
||||||
|
// If height exceeds canvas, scale down proportionally
|
||||||
|
if (cropHeight > canvasHeight * 0.8) {
|
||||||
|
const scale = (canvasHeight * 0.8) / cropHeight;
|
||||||
|
crop = {
|
||||||
|
x: (canvasWidth - (cropWidth * scale)) / 2,
|
||||||
|
y: (canvasHeight - (cropHeight * scale)) / 2,
|
||||||
|
width: cropWidth * scale,
|
||||||
|
height: cropHeight * scale
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
crop = {
|
||||||
|
x: (canvasWidth - cropWidth) / 2,
|
||||||
|
y: (canvasHeight - cropHeight) / 2,
|
||||||
|
width: cropWidth,
|
||||||
|
height: cropHeight
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drawCanvas();
|
||||||
|
};
|
||||||
|
image.src = imageUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
function drawCanvas() {
|
||||||
|
if (!ctx || !isImageLoaded) return;
|
||||||
|
|
||||||
|
// Clear canvas
|
||||||
|
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
|
||||||
|
|
||||||
|
// Draw image
|
||||||
|
ctx.drawImage(image, 0, 0, canvasWidth, canvasHeight);
|
||||||
|
|
||||||
|
// Draw overlay (darken non-crop area)
|
||||||
|
ctx.fillStyle = 'rgba(0, 0, 0, 0.5)';
|
||||||
|
ctx.fillRect(0, 0, canvasWidth, canvasHeight);
|
||||||
|
|
||||||
|
// Clear crop area
|
||||||
|
ctx.globalCompositeOperation = 'destination-out';
|
||||||
|
ctx.fillRect(crop.x, crop.y, crop.width, crop.height);
|
||||||
|
ctx.globalCompositeOperation = 'source-over';
|
||||||
|
|
||||||
|
// Draw crop rectangle border
|
||||||
|
ctx.strokeStyle = '#3b82f6';
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
ctx.strokeRect(crop.x, crop.y, crop.width, crop.height);
|
||||||
|
|
||||||
|
// Draw resize handles
|
||||||
|
const handleSize = 12; // Increased from 8 for easier grabbing
|
||||||
|
ctx.fillStyle = '#3b82f6';
|
||||||
|
ctx.strokeStyle = '#ffffff';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
|
||||||
|
// Corner handles with white borders for better visibility
|
||||||
|
const handles = [
|
||||||
|
{ x: crop.x - handleSize/2, y: crop.y - handleSize/2, cursor: 'nw-resize' },
|
||||||
|
{ x: crop.x + crop.width - handleSize/2, y: crop.y - handleSize/2, cursor: 'ne-resize' },
|
||||||
|
{ x: crop.x - handleSize/2, y: crop.y + crop.height - handleSize/2, cursor: 'sw-resize' },
|
||||||
|
{ x: crop.x + crop.width - handleSize/2, y: crop.y + crop.height - handleSize/2, cursor: 'se-resize' },
|
||||||
|
];
|
||||||
|
|
||||||
|
handles.forEach(handle => {
|
||||||
|
ctx.fillRect(handle.x, handle.y, handleSize, handleSize);
|
||||||
|
ctx.strokeRect(handle.x, handle.y, handleSize, handleSize);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMousePos(e: MouseEvent) {
|
||||||
|
const rect = canvas.getBoundingClientRect();
|
||||||
|
return {
|
||||||
|
x: e.clientX - rect.left,
|
||||||
|
y: e.clientY - rect.top
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function isInCropArea(x: number, y: number) {
|
||||||
|
return x >= crop.x && x <= crop.x + crop.width &&
|
||||||
|
y >= crop.y && y <= crop.y + crop.height;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getResizeHandle(x: number, y: number) {
|
||||||
|
const handleSize = 12; // Match the drawing size
|
||||||
|
const tolerance = handleSize;
|
||||||
|
|
||||||
|
if (Math.abs(x - crop.x) <= tolerance && Math.abs(y - crop.y) <= tolerance) return 'nw';
|
||||||
|
if (Math.abs(x - (crop.x + crop.width)) <= tolerance && Math.abs(y - crop.y) <= tolerance) return 'ne';
|
||||||
|
if (Math.abs(x - crop.x) <= tolerance && Math.abs(y - (crop.y + crop.height)) <= tolerance) return 'sw';
|
||||||
|
if (Math.abs(x - (crop.x + crop.width)) <= tolerance && Math.abs(y - (crop.y + crop.height)) <= tolerance) return 'se';
|
||||||
|
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleMouseDown(e: MouseEvent) {
|
||||||
|
const pos = getMousePos(e);
|
||||||
|
const handle = getResizeHandle(pos.x, pos.y);
|
||||||
|
|
||||||
|
if (handle) {
|
||||||
|
isResizing = true;
|
||||||
|
resizeHandle = handle;
|
||||||
|
dragStart = pos;
|
||||||
|
} else if (isInCropArea(pos.x, pos.y)) {
|
||||||
|
isDragging = true;
|
||||||
|
dragStart = { x: pos.x - crop.x, y: pos.y - crop.y };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleMouseMove(e: MouseEvent) {
|
||||||
|
const pos = getMousePos(e);
|
||||||
|
|
||||||
|
if (isResizing) {
|
||||||
|
const dx = pos.x - dragStart.x;
|
||||||
|
const dy = pos.y - dragStart.y;
|
||||||
|
|
||||||
|
const newCrop = { ...crop };
|
||||||
|
|
||||||
|
// Use primary axis movement for more predictable resizing
|
||||||
|
switch (resizeHandle) {
|
||||||
|
case 'nw':
|
||||||
|
// Use the dominant movement direction
|
||||||
|
const primaryDelta = Math.abs(dx) > Math.abs(dy) ? dx : dy * cropRatio;
|
||||||
|
const newWidth = Math.max(20, crop.width - primaryDelta);
|
||||||
|
const newHeight = newWidth / cropRatio;
|
||||||
|
|
||||||
|
newCrop.x = Math.max(0, crop.x + crop.width - newWidth);
|
||||||
|
newCrop.y = Math.max(0, crop.y + crop.height - newHeight);
|
||||||
|
newCrop.width = newWidth;
|
||||||
|
newCrop.height = newHeight;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'ne':
|
||||||
|
// For NE, primarily follow horizontal movement
|
||||||
|
const newWidthNE = Math.max(20, crop.width + dx);
|
||||||
|
const newHeightNE = newWidthNE / cropRatio;
|
||||||
|
|
||||||
|
newCrop.width = newWidthNE;
|
||||||
|
newCrop.height = newHeightNE;
|
||||||
|
newCrop.y = Math.max(0, crop.y + crop.height - newHeightNE);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'sw':
|
||||||
|
// For SW, primarily follow horizontal movement
|
||||||
|
const newWidthSW = Math.max(20, crop.width - dx);
|
||||||
|
const newHeightSW = newWidthSW / cropRatio;
|
||||||
|
|
||||||
|
newCrop.x = Math.max(0, crop.x + crop.width - newWidthSW);
|
||||||
|
newCrop.width = newWidthSW;
|
||||||
|
newCrop.height = newHeightSW;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'se':
|
||||||
|
// For SE, primarily follow horizontal movement
|
||||||
|
const newWidthSE = Math.max(20, crop.width + dx);
|
||||||
|
const newHeightSE = newWidthSE / cropRatio;
|
||||||
|
|
||||||
|
newCrop.width = newWidthSE;
|
||||||
|
newCrop.height = newHeightSE;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure crop stays within canvas bounds
|
||||||
|
if (newCrop.x + newCrop.width > canvasWidth) {
|
||||||
|
newCrop.width = canvasWidth - newCrop.x;
|
||||||
|
newCrop.height = newCrop.width / cropRatio;
|
||||||
|
}
|
||||||
|
if (newCrop.y + newCrop.height > canvasHeight) {
|
||||||
|
newCrop.height = canvasHeight - newCrop.y;
|
||||||
|
newCrop.width = newCrop.height * cropRatio;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adjust position if crop extends beyond bounds after resizing
|
||||||
|
if (newCrop.x + newCrop.width > canvasWidth) {
|
||||||
|
newCrop.x = canvasWidth - newCrop.width;
|
||||||
|
}
|
||||||
|
if (newCrop.y + newCrop.height > canvasHeight) {
|
||||||
|
newCrop.y = canvasHeight - newCrop.height;
|
||||||
|
}
|
||||||
|
|
||||||
|
crop = newCrop;
|
||||||
|
drawCanvas();
|
||||||
|
} else if (isDragging) {
|
||||||
|
crop.x = Math.max(0, Math.min(canvasWidth - crop.width, pos.x - dragStart.x));
|
||||||
|
crop.y = Math.max(0, Math.min(canvasHeight - crop.height, pos.y - dragStart.y));
|
||||||
|
drawCanvas();
|
||||||
|
} else {
|
||||||
|
// Update cursor based on hover state
|
||||||
|
const handle = getResizeHandle(pos.x, pos.y);
|
||||||
|
if (handle) {
|
||||||
|
canvas.style.cursor = handle + '-resize';
|
||||||
|
} else if (isInCropArea(pos.x, pos.y)) {
|
||||||
|
canvas.style.cursor = 'move';
|
||||||
|
} else {
|
||||||
|
canvas.style.cursor = 'default';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleMouseUp() {
|
||||||
|
isDragging = false;
|
||||||
|
isResizing = false;
|
||||||
|
resizeHandle = '';
|
||||||
|
canvas.style.cursor = 'default';
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleSave() {
|
||||||
|
// Convert canvas coordinates back to image coordinates
|
||||||
|
const scaleX = image.width / canvasWidth;
|
||||||
|
const scaleY = image.height / canvasHeight;
|
||||||
|
|
||||||
|
const imageCrop = {
|
||||||
|
x: Math.round(crop.x * scaleX),
|
||||||
|
y: Math.round(crop.y * scaleY),
|
||||||
|
width: Math.round(crop.width * scaleX),
|
||||||
|
height: Math.round(crop.height * scaleY)
|
||||||
|
};
|
||||||
|
|
||||||
|
dispatch('save', imageCrop);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCancel() {
|
||||||
|
dispatch('cancel');
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50" on:click={handleCancel}>
|
||||||
|
<div class="bg-white rounded-lg shadow-xl max-w-4xl w-full mx-4" on:click|stopPropagation>
|
||||||
|
<div class="p-6">
|
||||||
|
<div class="flex items-center justify-between mb-4">
|
||||||
|
<h3 class="text-lg font-semibold text-gray-900">
|
||||||
|
Crop Photo - {personName}
|
||||||
|
</h3>
|
||||||
|
|
||||||
|
<button
|
||||||
|
on:click={handleCancel}
|
||||||
|
class="text-gray-400 hover:text-gray-600"
|
||||||
|
>
|
||||||
|
<svg class="w-6 h-6" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex flex-col items-center space-y-4">
|
||||||
|
<div class="border border-gray-300 rounded-lg overflow-hidden">
|
||||||
|
<canvas
|
||||||
|
bind:this={canvas}
|
||||||
|
on:mousedown={handleMouseDown}
|
||||||
|
on:mousemove={handleMouseMove}
|
||||||
|
on:mouseup={handleMouseUp}
|
||||||
|
on:mouseleave={handleMouseUp}
|
||||||
|
class="block"
|
||||||
|
></canvas>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p class="text-sm text-gray-600 text-center max-w-lg">
|
||||||
|
Drag the crop area to move it, or drag the corner handles to resize.
|
||||||
|
The selected area will be used for the member card.
|
||||||
|
<br>
|
||||||
|
<span class="font-medium">Aspect Ratio: {cropRatio.toFixed(1)}:1 {cropRatio === 1.0 ? '(Square)' : cropRatio === 1.5 ? '(3:2)' : ''}</span>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div class="flex space-x-3">
|
||||||
|
<button
|
||||||
|
on:click={handleCancel}
|
||||||
|
class="px-4 py-2 bg-gray-200 text-gray-700 rounded-lg font-medium hover:bg-gray-300"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<button
|
||||||
|
on:click={handleSave}
|
||||||
|
class="px-4 py-2 bg-blue-600 text-white rounded-lg font-medium hover:bg-blue-700"
|
||||||
|
>
|
||||||
|
Save Crop
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
@@ -4,20 +4,23 @@
|
|||||||
import StepSheetSearch from './wizard/StepSheetSearch.svelte';
|
import StepSheetSearch from './wizard/StepSheetSearch.svelte';
|
||||||
import StepColumnMap from './wizard/StepColumnMap.svelte';
|
import StepColumnMap from './wizard/StepColumnMap.svelte';
|
||||||
import StepRowFilter from './wizard/StepRowFilter.svelte';
|
import StepRowFilter from './wizard/StepRowFilter.svelte';
|
||||||
|
import StepGallery from './wizard/StepGallery.svelte';
|
||||||
// Additional steps to be added as they are implemented
|
// Additional steps to be added as they are implemented
|
||||||
|
|
||||||
const steps = [
|
const steps = [
|
||||||
StepAuth,
|
StepAuth,
|
||||||
StepSheetSearch,
|
StepSheetSearch,
|
||||||
StepColumnMap,
|
StepColumnMap,
|
||||||
StepRowFilter
|
StepRowFilter,
|
||||||
|
StepGallery
|
||||||
];
|
];
|
||||||
|
|
||||||
const stepTitles = [
|
const stepTitles = [
|
||||||
'Authenticate',
|
'Authenticate',
|
||||||
'Select Sheet',
|
'Select Sheet',
|
||||||
'Map Columns',
|
'Map Columns',
|
||||||
'Filter Rows'
|
'Filter Rows',
|
||||||
|
'Review Photos'
|
||||||
];
|
];
|
||||||
|
|
||||||
function goToPreviousStep() {
|
function goToPreviousStep() {
|
||||||
|
|||||||
@@ -1,4 +1,402 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { onMount } from 'svelte';
|
||||||
|
import { columnMapping, filteredSheetData, currentStep } from '$lib/stores';
|
||||||
|
import { downloadDriveImage, isGoogleDriveUrl, createImageObjectUrl } from '$lib/google';
|
||||||
|
import PhotoCard from '../PhotoCard.svelte';
|
||||||
|
|
||||||
|
interface PhotoInfo {
|
||||||
|
name: string;
|
||||||
|
url: string;
|
||||||
|
status: 'loading' | 'success' | 'error';
|
||||||
|
objectUrl?: string;
|
||||||
|
retryCount: number;
|
||||||
|
cropData?: { x: number; y: number; width: number; height: number };
|
||||||
|
faceDetectionStatus?: 'pending' | 'processing' | 'completed' | 'failed';
|
||||||
|
}
|
||||||
|
|
||||||
|
let photos: PhotoInfo[] = [];
|
||||||
|
let isProcessing = false;
|
||||||
|
let processedCount = 0;
|
||||||
|
let totalCount = 0;
|
||||||
|
let faceDetectionInProgress = false;
|
||||||
|
let faceDetectionCount = { started: 0, completed: 0 };
|
||||||
|
|
||||||
|
// Process photos when component mounts
|
||||||
|
onMount(() => {
|
||||||
|
console.log('StepGallery mounted, processing photos...');
|
||||||
|
if ($filteredSheetData.length > 0 && $columnMapping.pictureUrl !== undefined) {
|
||||||
|
console.log('Processing photos for gallery step');
|
||||||
|
processPhotos();
|
||||||
|
} else {
|
||||||
|
console.log('No data to process:', {
|
||||||
|
dataLength: $filteredSheetData.length,
|
||||||
|
pictureUrlMapping: $columnMapping.pictureUrl
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
async function processPhotos() {
|
||||||
|
if (isProcessing) return;
|
||||||
|
|
||||||
|
console.log('Starting processPhotos...');
|
||||||
|
isProcessing = true;
|
||||||
|
processedCount = 0;
|
||||||
|
|
||||||
|
// Get valid and included rows from filteredSheetData
|
||||||
|
const validRows = $filteredSheetData.filter(row => row._isValid);
|
||||||
|
console.log(`Found ${validRows.length} valid rows`);
|
||||||
|
|
||||||
|
// Get unique photos to process
|
||||||
|
const photoUrls = new Set<string>();
|
||||||
|
const photoMap = new Map<string, any[]>(); // url -> row data
|
||||||
|
|
||||||
|
validRows.forEach((row: any) => {
|
||||||
|
const photoUrl = row.pictureUrl;
|
||||||
|
|
||||||
|
if (photoUrl && photoUrl.trim()) {
|
||||||
|
photoUrls.add(photoUrl.trim());
|
||||||
|
if (!photoMap.has(photoUrl.trim())) {
|
||||||
|
photoMap.set(photoUrl.trim(), []);
|
||||||
|
}
|
||||||
|
photoMap.get(photoUrl.trim())!.push(row);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Found ${photoUrls.size} unique photo URLs`);
|
||||||
|
totalCount = photoUrls.size;
|
||||||
|
|
||||||
|
// Initialize photos array
|
||||||
|
photos = Array.from(photoUrls).map(url => ({
|
||||||
|
name: photoMap.get(url)![0].name + ' ' + photoMap.get(url)![0].surname, // Use first person's name for display
|
||||||
|
url,
|
||||||
|
status: 'loading' as const,
|
||||||
|
retryCount: 0,
|
||||||
|
faceDetectionStatus: 'pending' as const
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Process each photo
|
||||||
|
for (let i = 0; i < photos.length; i++) {
|
||||||
|
await loadPhoto(i);
|
||||||
|
processedCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
isProcessing = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadPhoto(index: number, isRetry = false) {
|
||||||
|
const photo = photos[index];
|
||||||
|
|
||||||
|
if (!isRetry) {
|
||||||
|
photo.status = 'loading';
|
||||||
|
photos = [...photos]; // Trigger reactivity
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
let objectUrl: string;
|
||||||
|
|
||||||
|
if (isGoogleDriveUrl(photo.url)) {
|
||||||
|
// Download from Google Drive
|
||||||
|
console.log(`Downloading from Google Drive: ${photo.name}`);
|
||||||
|
const blob = await downloadDriveImage(photo.url);
|
||||||
|
objectUrl = createImageObjectUrl(blob);
|
||||||
|
} else {
|
||||||
|
// Use direct URL
|
||||||
|
objectUrl = photo.url;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test if image loads properly
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const img = new Image();
|
||||||
|
img.onload = () => resolve();
|
||||||
|
img.onerror = (error) => {
|
||||||
|
console.error(`Failed to load image for ${photo.name}:`, error);
|
||||||
|
reject(new Error('Failed to load image'));
|
||||||
|
};
|
||||||
|
img.src = objectUrl;
|
||||||
|
});
|
||||||
|
|
||||||
|
photo.objectUrl = objectUrl;
|
||||||
|
photo.status = 'success';
|
||||||
|
console.log(`Photo loaded successfully: ${photo.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to load photo for ${photo.name}:`, error);
|
||||||
|
photo.status = 'error';
|
||||||
|
}
|
||||||
|
|
||||||
|
photos = [...photos]; // Trigger reactivity
|
||||||
|
}
|
||||||
|
|
||||||
|
async function retryPhoto(index: number) {
|
||||||
|
const photo = photos[index];
|
||||||
|
|
||||||
|
if (photo.retryCount >= 3) {
|
||||||
|
return; // Max retries reached
|
||||||
|
}
|
||||||
|
|
||||||
|
photo.retryCount++;
|
||||||
|
await loadPhoto(index, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCropUpdate(index: number, cropData: { x: number; y: number; width: number; height: number }) {
|
||||||
|
photos[index].cropData = cropData;
|
||||||
|
photos = [...photos]; // Trigger reactivity
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleFaceDetectionStarted(index: number) {
|
||||||
|
photos[index].faceDetectionStatus = 'processing';
|
||||||
|
faceDetectionCount.started++;
|
||||||
|
faceDetectionInProgress = true;
|
||||||
|
photos = [...photos]; // Trigger reactivity
|
||||||
|
console.log(`Face detection started for photo ${index + 1}, total started: ${faceDetectionCount.started}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleFaceDetectionCompleted(index: number, detail: { success: boolean; hasAutoDetectedCrop: boolean }) {
|
||||||
|
photos[index].faceDetectionStatus = detail.success ? 'completed' : 'failed';
|
||||||
|
faceDetectionCount.completed++;
|
||||||
|
|
||||||
|
console.log(`Face detection completed for photo ${index + 1}, total completed: ${faceDetectionCount.completed}`);
|
||||||
|
|
||||||
|
// Check if all face detections are complete
|
||||||
|
if (faceDetectionCount.completed >= faceDetectionCount.started) {
|
||||||
|
faceDetectionInProgress = false;
|
||||||
|
console.log('All face detections completed');
|
||||||
|
}
|
||||||
|
|
||||||
|
photos = [...photos]; // Trigger reactivity
|
||||||
|
}
|
||||||
|
|
||||||
|
function canProceed() {
|
||||||
|
const hasPhotos = photos.length > 0;
|
||||||
|
const allLoaded = photos.every(photo => photo.status === 'success');
|
||||||
|
const allCropped = photos.every(photo => photo.cropData);
|
||||||
|
const faceDetectionComplete = !faceDetectionInProgress;
|
||||||
|
|
||||||
|
return hasPhotos && allLoaded && allCropped && faceDetectionComplete;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup object URLs when component is destroyed
|
||||||
|
function cleanupObjectUrls() {
|
||||||
|
photos.forEach(photo => {
|
||||||
|
if (photo.objectUrl && photo.objectUrl.startsWith('blob:')) {
|
||||||
|
URL.revokeObjectURL(photo.objectUrl);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup on unmount or when photos change
|
||||||
|
$: {
|
||||||
|
// This will run when photos array changes
|
||||||
|
if (photos.length === 0) {
|
||||||
|
cleanupObjectUrls();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
<div class="p-6">
|
<div class="p-6">
|
||||||
<h2 class="text-xl font-semibold text-gray-900">Review Photos</h2>
|
<div class="max-w-6xl mx-auto">
|
||||||
<p class="text-sm text-gray-700">Photo gallery and review functionality will be implemented here.</p>
|
<div class="mb-6">
|
||||||
|
<h2 class="text-xl font-semibold text-gray-900 mb-2">
|
||||||
|
Review & Crop Photos
|
||||||
|
</h2>
|
||||||
|
|
||||||
|
<p class="text-sm text-gray-700 mb-4">
|
||||||
|
Photos are automatically cropped using face detection. Click the pen icon to manually adjust the crop area.
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Processing Status -->
|
||||||
|
{#if isProcessing}
|
||||||
|
<div class="bg-blue-50 border border-blue-200 rounded-lg p-4 mb-6">
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div class="flex items-center">
|
||||||
|
<div class="w-5 h-5 border-2 border-blue-600 border-t-transparent rounded-full animate-spin mr-3"></div>
|
||||||
|
<span class="text-sm text-blue-800">
|
||||||
|
Processing photos...
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<span class="text-sm text-blue-600">
|
||||||
|
{processedCount} / {totalCount}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if totalCount > 0}
|
||||||
|
<div class="mt-3 w-full bg-blue-200 rounded-full h-2">
|
||||||
|
<div
|
||||||
|
class="bg-blue-600 h-2 rounded-full transition-all duration-300"
|
||||||
|
style="width: {(processedCount / totalCount) * 100}%"
|
||||||
|
></div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{:else if faceDetectionInProgress}
|
||||||
|
<div class="bg-green-50 border border-green-200 rounded-lg p-4 mb-6">
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div class="flex items-center">
|
||||||
|
<div class="w-5 h-5 border-2 border-green-600 border-t-transparent rounded-full animate-spin mr-3"></div>
|
||||||
|
<span class="text-sm text-green-800">
|
||||||
|
Detecting faces and auto-cropping...
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<span class="text-sm text-green-600">
|
||||||
|
{faceDetectionCount.completed} / {faceDetectionCount.started}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if faceDetectionCount.started > 0}
|
||||||
|
<div class="mt-3 w-full bg-green-200 rounded-full h-2">
|
||||||
|
<div
|
||||||
|
class="bg-green-600 h-2 rounded-full transition-all duration-300"
|
||||||
|
style="width: {(faceDetectionCount.completed / faceDetectionCount.started) * 100}%"
|
||||||
|
></div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Summary Stats -->
|
||||||
|
{#if !isProcessing && !faceDetectionInProgress && photos.length > 0}
|
||||||
|
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4 mb-6">
|
||||||
|
<h3 class="text-sm font-medium text-gray-700 mb-3">Processing Summary</h3>
|
||||||
|
|
||||||
|
<div class="grid grid-cols-2 md:grid-cols-5 gap-4 text-sm">
|
||||||
|
<div class="text-center">
|
||||||
|
<div class="text-2xl font-bold text-gray-900">{photos.length}</div>
|
||||||
|
<div class="text-gray-600">Total Photos</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="text-center">
|
||||||
|
<div class="text-2xl font-bold text-green-600">
|
||||||
|
{photos.filter(p => p.status === 'success').length}
|
||||||
|
</div>
|
||||||
|
<div class="text-gray-600">Loaded</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="text-center">
|
||||||
|
<div class="text-2xl font-bold text-blue-600">
|
||||||
|
{photos.filter(p => p.faceDetectionStatus === 'completed').length}
|
||||||
|
</div>
|
||||||
|
<div class="text-gray-600">Auto-cropped</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="text-center">
|
||||||
|
<div class="text-2xl font-bold text-purple-600">
|
||||||
|
{photos.filter(p => p.cropData).length}
|
||||||
|
</div>
|
||||||
|
<div class="text-gray-600">Ready</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="text-center">
|
||||||
|
<div class="text-2xl font-bold text-red-600">
|
||||||
|
{photos.filter(p => p.status === 'error').length}
|
||||||
|
</div>
|
||||||
|
<div class="text-gray-600">Failed</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if photos.filter(p => p.status === 'error').length > 0}
|
||||||
|
<div class="mt-4 p-3 bg-yellow-50 border border-yellow-200 rounded">
|
||||||
|
<p class="text-sm text-yellow-800">
|
||||||
|
<strong>Note:</strong> Cards will only be generated for photos that load successfully.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if !canProceed() && photos.filter(p => p.status === 'success').length > 0}
|
||||||
|
<div class="mt-4 p-3 bg-blue-50 border border-blue-200 rounded">
|
||||||
|
<p class="text-sm text-blue-800">
|
||||||
|
<strong>Tip:</strong> All photos need to be cropped before proceeding. Face detection runs automatically.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Photo Grid -->
|
||||||
|
<div class="bg-white border border-gray-200 rounded-lg overflow-hidden mb-6">
|
||||||
|
{#if photos.length === 0 && !isProcessing}
|
||||||
|
<div class="text-center py-12">
|
||||||
|
<svg class="mx-auto h-12 w-12 text-gray-400" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 002 2z"/>
|
||||||
|
</svg>
|
||||||
|
<h3 class="mt-2 text-sm font-medium text-gray-900">No photos found</h3>
|
||||||
|
<p class="mt-1 text-sm text-gray-500">
|
||||||
|
Go back to check your column mapping and selected rows.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<div class="p-6">
|
||||||
|
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 gap-6">
|
||||||
|
{#each photos as photo, index}
|
||||||
|
{#if photo.status === 'loading'}
|
||||||
|
<div class="border border-gray-200 rounded-lg overflow-hidden bg-white shadow-sm">
|
||||||
|
<div class="aspect-square bg-gray-100 flex items-center justify-center">
|
||||||
|
<div class="flex flex-col items-center">
|
||||||
|
<div class="w-8 h-8 border-2 border-blue-600 border-t-transparent rounded-full animate-spin mb-2"></div>
|
||||||
|
<span class="text-xs text-gray-600">Loading...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="p-3">
|
||||||
|
<h4 class="font-medium text-sm text-gray-900 truncate">{photo.name}</h4>
|
||||||
|
<span class="text-xs text-blue-600">Processing photo...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else if photo.status === 'success' && photo.objectUrl}
|
||||||
|
<PhotoCard
|
||||||
|
imageUrl={photo.objectUrl}
|
||||||
|
personName={photo.name}
|
||||||
|
isProcessing={false}
|
||||||
|
on:cropUpdated={(e) => handleCropUpdate(index, e.detail)}
|
||||||
|
on:faceDetectionStarted={() => handleFaceDetectionStarted(index)}
|
||||||
|
on:faceDetectionCompleted={(e) => handleFaceDetectionCompleted(index, e.detail)}
|
||||||
|
/>
|
||||||
|
{:else if photo.status === 'error'}
|
||||||
|
<div class="border border-gray-200 rounded-lg overflow-hidden bg-white shadow-sm">
|
||||||
|
<div class="aspect-square bg-gray-100 flex items-center justify-center">
|
||||||
|
<div class="flex flex-col items-center text-center p-4">
|
||||||
|
<svg class="w-12 h-12 text-red-400 mb-2" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
<span class="text-xs text-red-600 mb-2">Failed to load</span>
|
||||||
|
<button
|
||||||
|
class="text-xs text-blue-600 hover:text-blue-800 underline"
|
||||||
|
on:click={() => retryPhoto(index)}
|
||||||
|
disabled={photo.retryCount >= 3}
|
||||||
|
>
|
||||||
|
{photo.retryCount >= 3 ? 'Max retries' : 'Retry'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="p-3">
|
||||||
|
<h4 class="font-medium text-sm text-gray-900 truncate">{photo.name}</h4>
|
||||||
|
<span class="text-xs text-red-600">Failed to load</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Navigation -->
|
||||||
|
<div class="flex justify-between">
|
||||||
|
<button
|
||||||
|
on:click={() => currentStep.set(3)}
|
||||||
|
class="px-4 py-2 bg-gray-200 text-gray-700 rounded-lg font-medium hover:bg-gray-300"
|
||||||
|
>
|
||||||
|
← Back to Row Filter
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<button
|
||||||
|
on:click={() => currentStep.set(5)}
|
||||||
|
disabled={!canProceed()}
|
||||||
|
class="px-4 py-2 bg-blue-600 text-white rounded-lg font-medium hover:bg-blue-700 disabled:bg-gray-400 disabled:cursor-not-allowed"
|
||||||
|
>
|
||||||
|
{canProceed()
|
||||||
|
? `Generate ${photos.filter(p => p.status === 'success' && p.cropData).length} Cards →`
|
||||||
|
: 'Waiting for photos to load and crop'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { selectedSheet, columnMapping, rawSheetData, filteredSheetData, currentStep } from '$lib/stores';
|
import { selectedSheet, columnMapping, rawSheetData, filteredSheetData, currentStep, sheetData } from '$lib/stores';
|
||||||
|
import type { RowData } from '$lib/stores';
|
||||||
import { onMount } from 'svelte';
|
import { onMount } from 'svelte';
|
||||||
|
|
||||||
let searchTerm = '';
|
let searchTerm = '';
|
||||||
@@ -41,6 +42,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
|
console.log('StepRowFilter mounted');
|
||||||
processSheetData();
|
processSheetData();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -56,34 +56,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function saveToRecentSheets(sheet) {
|
|
||||||
// Create a copy of the sheet object with just the properties we need
|
|
||||||
const sheetToSave = {
|
|
||||||
id: sheet.id,
|
|
||||||
name: sheet.name,
|
|
||||||
url: sheet.webViewLink || sheet.url,
|
|
||||||
iconLink: sheet.iconLink
|
|
||||||
};
|
|
||||||
|
|
||||||
// Remove this sheet if it already exists in the list
|
|
||||||
recentSheets = recentSheets.filter(s => s.id !== sheetToSave.id);
|
|
||||||
|
|
||||||
// Add the sheet to the beginning of the list
|
|
||||||
recentSheets = [sheetToSave, ...recentSheets];
|
|
||||||
|
|
||||||
// Keep only up to 3 recent sheets
|
|
||||||
if (recentSheets.length > 3) {
|
|
||||||
recentSheets = recentSheets.slice(0, 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save to localStorage
|
|
||||||
try {
|
|
||||||
localStorage.setItem(RECENT_SHEETS_KEY, JSON.stringify(recentSheets));
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Error saving recent sheets:', err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSelectSheet(sheet) {
|
function handleSelectSheet(sheet) {
|
||||||
const sheetData = {
|
const sheetData = {
|
||||||
id: sheet.id,
|
id: sheet.id,
|
||||||
@@ -92,7 +64,6 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
selectedSheet.set(sheetData);
|
selectedSheet.set(sheetData);
|
||||||
saveToRecentSheets(sheet);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleContinue() {
|
function handleContinue() {
|
||||||
|
|||||||
@@ -126,3 +126,80 @@ export async function getSheetData(spreadsheetId: string, range: string) {
|
|||||||
});
|
});
|
||||||
return response.result.values || [];
|
return response.result.values || [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract Google Drive file ID from various URL formats
|
||||||
|
export function extractDriveFileId(url: string): string | null {
|
||||||
|
if (!url) return null;
|
||||||
|
|
||||||
|
// Handle different Google Drive URL formats
|
||||||
|
const patterns = [
|
||||||
|
/\/file\/d\/([a-zA-Z0-9-_]+)/, // https://drive.google.com/file/d/FILE_ID/view
|
||||||
|
/id=([a-zA-Z0-9-_]+)/, // https://drive.google.com/open?id=FILE_ID
|
||||||
|
/\/d\/([a-zA-Z0-9-_]+)/, // https://drive.google.com/uc?id=FILE_ID&export=download
|
||||||
|
/^([a-zA-Z0-9-_]{25,})$/ // Direct file ID
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
const match = url.match(pattern);
|
||||||
|
if (match) {
|
||||||
|
return match[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if URL is a Google Drive URL
|
||||||
|
export function isGoogleDriveUrl(url: string): boolean {
|
||||||
|
return url.includes('drive.google.com') || url.includes('googleapis.com');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download image from Google Drive using the API
|
||||||
|
export async function downloadDriveImage(url: string): Promise<Blob> {
|
||||||
|
const fileId = extractDriveFileId(url);
|
||||||
|
|
||||||
|
if (!fileId) {
|
||||||
|
throw new Error('Could not extract file ID from Google Drive URL');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!gapi.client.drive) {
|
||||||
|
throw new Error('Google Drive API not loaded');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get file metadata first to check if it exists and is accessible
|
||||||
|
const metadata = await gapi.client.drive.files.get({
|
||||||
|
fileId: fileId,
|
||||||
|
fields: 'id,name,mimeType,size'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!metadata.result.mimeType?.startsWith('image/')) {
|
||||||
|
throw new Error('File is not an image');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download the file content
|
||||||
|
const response = await gapi.client.drive.files.get({
|
||||||
|
fileId: fileId,
|
||||||
|
alt: 'media'
|
||||||
|
});
|
||||||
|
|
||||||
|
// The response body is already binary data, convert to blob
|
||||||
|
const binaryString = response.body;
|
||||||
|
const bytes = new Uint8Array(binaryString.length);
|
||||||
|
for (let i = 0; i < binaryString.length; i++) {
|
||||||
|
bytes[i] = binaryString.charCodeAt(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Blob([bytes], { type: metadata.result.mimeType });
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error downloading from Google Drive:', error);
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
throw new Error(`Failed to download image from Google Drive: ${errorMessage}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create an object URL from image data for display
|
||||||
|
export function createImageObjectUrl(blob: Blob): string {
|
||||||
|
return URL.createObjectURL(blob);
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user