|
|
|
|
@@ -1,506 +1,510 @@
|
|
|
|
|
<script lang="ts">
|
|
|
|
|
import { onMount } from 'svelte';
|
|
|
|
|
import { env } from '$env/dynamic/public';
|
|
|
|
|
import { columnMapping, filteredSheetData, currentStep, pictures, cropRects } from '$lib/stores';
|
|
|
|
|
import { downloadDriveImage, isGoogleDriveUrl, createImageObjectUrl } from '$lib/google';
|
|
|
|
|
import PhotoCard from '../PhotoCard.svelte';
|
|
|
|
|
import * as tf from '@tensorflow/tfjs';
|
|
|
|
|
import * as blazeface from '@tensorflow-models/blazeface';
|
|
|
|
|
import { onMount } from 'svelte';
|
|
|
|
|
import { env } from '$env/dynamic/public';
|
|
|
|
|
import { columnMapping, filteredSheetData, currentStep, pictures, cropRects } from '$lib/stores';
|
|
|
|
|
import { downloadDriveImage, isGoogleDriveUrl, createImageObjectUrl } from '$lib/google';
|
|
|
|
|
import Navigator from './subcomponents/Navigator.svelte';
|
|
|
|
|
import PhotoCard from './subcomponents/PhotoCard.svelte';
|
|
|
|
|
import * as tf from '@tensorflow/tfjs';
|
|
|
|
|
import * as blazeface from '@tensorflow-models/blazeface';
|
|
|
|
|
|
|
|
|
|
let photos = $state<PhotoInfo[]>([]);
|
|
|
|
|
let isProcessing = $state(false);
|
|
|
|
|
let processedCount = $state(0);
|
|
|
|
|
let totalCount = $state(0);
|
|
|
|
|
let detector: blazeface.BlazeFaceModel | undefined;
|
|
|
|
|
let detectorPromise: Promise<void> | undefined;
|
|
|
|
|
let photos = $state<PhotoInfo[]>([]);
|
|
|
|
|
let isProcessing = $state(false);
|
|
|
|
|
let processedCount = $state(0);
|
|
|
|
|
let totalCount = $state(0);
|
|
|
|
|
let detector: blazeface.BlazeFaceModel | undefined;
|
|
|
|
|
let detectorPromise: Promise<void> | undefined;
|
|
|
|
|
|
|
|
|
|
interface PhotoInfo {
|
|
|
|
|
name: string;
|
|
|
|
|
url: string;
|
|
|
|
|
status: 'loading' | 'success' | 'error';
|
|
|
|
|
objectUrl?: string;
|
|
|
|
|
retryCount: number;
|
|
|
|
|
cropData?: { x: number; y: number; width: number; height: number };
|
|
|
|
|
faceDetectionStatus?: 'pending' | 'processing' | 'completed' | 'failed' | 'manual';
|
|
|
|
|
}
|
|
|
|
|
interface PhotoInfo {
|
|
|
|
|
name: string;
|
|
|
|
|
url: string;
|
|
|
|
|
status: 'loading' | 'success' | 'error';
|
|
|
|
|
objectUrl?: string;
|
|
|
|
|
retryCount: number;
|
|
|
|
|
cropData?: { x: number; y: number; width: number; height: number };
|
|
|
|
|
faceDetectionStatus?: 'pending' | 'processing' | 'completed' | 'failed' | 'manual';
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function initializeDetector() {
|
|
|
|
|
if (!detectorPromise) {
|
|
|
|
|
detectorPromise = (async () => {
|
|
|
|
|
console.log('Initializing face detector...');
|
|
|
|
|
await tf.setBackend('webgl');
|
|
|
|
|
await tf.ready();
|
|
|
|
|
detector = await blazeface.load();
|
|
|
|
|
console.log('BlazeFace model loaded');
|
|
|
|
|
})();
|
|
|
|
|
}
|
|
|
|
|
return detectorPromise;
|
|
|
|
|
}
|
|
|
|
|
function initializeDetector() {
|
|
|
|
|
if (!detectorPromise) {
|
|
|
|
|
detectorPromise = (async () => {
|
|
|
|
|
console.log('Initializing face detector...');
|
|
|
|
|
await tf.setBackend('webgl');
|
|
|
|
|
await tf.ready();
|
|
|
|
|
detector = await blazeface.load();
|
|
|
|
|
console.log('BlazeFace model loaded');
|
|
|
|
|
})();
|
|
|
|
|
}
|
|
|
|
|
return detectorPromise;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function processPhotosInParallel() {
|
|
|
|
|
if (isProcessing) return;
|
|
|
|
|
async function processPhotosInParallel() {
|
|
|
|
|
if (isProcessing) return;
|
|
|
|
|
|
|
|
|
|
console.log('Starting processPhotos in parallel...');
|
|
|
|
|
isProcessing = true;
|
|
|
|
|
processedCount = 0;
|
|
|
|
|
console.log('Starting processPhotos in parallel...');
|
|
|
|
|
isProcessing = true;
|
|
|
|
|
processedCount = 0;
|
|
|
|
|
|
|
|
|
|
const validRows = $filteredSheetData.filter((row) => row._isValid);
|
|
|
|
|
const photoUrls = new Set<string>();
|
|
|
|
|
const photoMap = new Map<string, any[]>();
|
|
|
|
|
const validRows = $filteredSheetData.filter((row) => row._isValid);
|
|
|
|
|
const photoUrls = new Set<string>();
|
|
|
|
|
const photoMap = new Map<string, any[]>();
|
|
|
|
|
|
|
|
|
|
validRows.forEach((row: any) => {
|
|
|
|
|
const photoUrl = row.pictureUrl;
|
|
|
|
|
if (photoUrl && photoUrl.trim()) {
|
|
|
|
|
const trimmedUrl = photoUrl.trim();
|
|
|
|
|
photoUrls.add(trimmedUrl);
|
|
|
|
|
if (!photoMap.has(trimmedUrl)) {
|
|
|
|
|
photoMap.set(trimmedUrl, []);
|
|
|
|
|
}
|
|
|
|
|
photoMap.get(trimmedUrl)!.push(row);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
validRows.forEach((row: any) => {
|
|
|
|
|
const photoUrl = row.pictureUrl;
|
|
|
|
|
if (photoUrl && photoUrl.trim()) {
|
|
|
|
|
const trimmedUrl = photoUrl.trim();
|
|
|
|
|
photoUrls.add(trimmedUrl);
|
|
|
|
|
if (!photoMap.has(trimmedUrl)) {
|
|
|
|
|
photoMap.set(trimmedUrl, []);
|
|
|
|
|
}
|
|
|
|
|
photoMap.get(trimmedUrl)!.push(row);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
totalCount = photoUrls.size;
|
|
|
|
|
console.log(`Found ${totalCount} unique photo URLs`);
|
|
|
|
|
totalCount = photoUrls.size;
|
|
|
|
|
console.log(`Found ${totalCount} unique photo URLs`);
|
|
|
|
|
|
|
|
|
|
photos = Array.from(photoUrls).map((url) => ({
|
|
|
|
|
name: photoMap.get(url)![0].name + ' ' + photoMap.get(url)![0].surname,
|
|
|
|
|
url,
|
|
|
|
|
status: 'loading' as const,
|
|
|
|
|
retryCount: 0,
|
|
|
|
|
faceDetectionStatus: 'pending' as const
|
|
|
|
|
}));
|
|
|
|
|
photos = Array.from(photoUrls).map((url) => ({
|
|
|
|
|
name: photoMap.get(url)![0].name + ' ' + photoMap.get(url)![0].surname,
|
|
|
|
|
url,
|
|
|
|
|
status: 'loading' as const,
|
|
|
|
|
retryCount: 0,
|
|
|
|
|
faceDetectionStatus: 'pending' as const
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
const concurrencyLimit = 5;
|
|
|
|
|
const promises = [];
|
|
|
|
|
const concurrencyLimit = 5;
|
|
|
|
|
const promises = [];
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < photos.length; i++) {
|
|
|
|
|
const promise = (async () => {
|
|
|
|
|
await loadPhoto(i);
|
|
|
|
|
processedCount++;
|
|
|
|
|
})();
|
|
|
|
|
promises.push(promise);
|
|
|
|
|
for (let i = 0; i < photos.length; i++) {
|
|
|
|
|
const promise = (async () => {
|
|
|
|
|
await loadPhoto(i);
|
|
|
|
|
processedCount++;
|
|
|
|
|
})();
|
|
|
|
|
promises.push(promise);
|
|
|
|
|
|
|
|
|
|
if (promises.length >= concurrencyLimit) {
|
|
|
|
|
await Promise.all(promises);
|
|
|
|
|
promises.length = 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (promises.length >= concurrencyLimit) {
|
|
|
|
|
await Promise.all(promises);
|
|
|
|
|
promises.length = 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await Promise.all(promises);
|
|
|
|
|
await Promise.all(promises);
|
|
|
|
|
|
|
|
|
|
isProcessing = false;
|
|
|
|
|
console.log('All photos processed.');
|
|
|
|
|
}
|
|
|
|
|
isProcessing = false;
|
|
|
|
|
console.log('All photos processed.');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Initialize detector and process photos
|
|
|
|
|
onMount(() => {
|
|
|
|
|
console.log('StepGallery mounted');
|
|
|
|
|
initializeDetector(); // Start loading model
|
|
|
|
|
if ($filteredSheetData.length > 0 && $columnMapping.pictureUrl !== undefined) {
|
|
|
|
|
console.log('Processing photos for gallery step');
|
|
|
|
|
processPhotosInParallel();
|
|
|
|
|
} else {
|
|
|
|
|
console.log('No data to process:', {
|
|
|
|
|
dataLength: $filteredSheetData.length,
|
|
|
|
|
pictureUrlMapping: $columnMapping.pictureUrl
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
// Initialize detector and process photos
|
|
|
|
|
onMount(() => {
|
|
|
|
|
console.log('StepGallery mounted');
|
|
|
|
|
initializeDetector(); // Start loading model
|
|
|
|
|
if ($filteredSheetData.length > 0 && $columnMapping.pictureUrl !== undefined) {
|
|
|
|
|
console.log('Processing photos for gallery step');
|
|
|
|
|
processPhotosInParallel();
|
|
|
|
|
} else {
|
|
|
|
|
console.log('No data to process:', {
|
|
|
|
|
dataLength: $filteredSheetData.length,
|
|
|
|
|
pictureUrlMapping: $columnMapping.pictureUrl
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
async function loadPhoto(index: number, isRetry = false) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
async function loadPhoto(index: number, isRetry = false) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
|
|
|
|
|
if (!isRetry) {
|
|
|
|
|
photo.status = 'loading';
|
|
|
|
|
// No need to reassign photos array with $state reactivity
|
|
|
|
|
}
|
|
|
|
|
if (!isRetry) {
|
|
|
|
|
photo.status = 'loading';
|
|
|
|
|
// No need to reassign photos array with $state reactivity
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
let blob: Blob;
|
|
|
|
|
try {
|
|
|
|
|
let blob: Blob;
|
|
|
|
|
|
|
|
|
|
if (isGoogleDriveUrl(photo.url)) {
|
|
|
|
|
// Download from Google Drive
|
|
|
|
|
console.log(`Downloading from Google Drive: ${photo.name}`);
|
|
|
|
|
blob = await downloadDriveImage(photo.url);
|
|
|
|
|
} else {
|
|
|
|
|
// For direct URLs, convert to blob
|
|
|
|
|
const response = await fetch(photo.url);
|
|
|
|
|
blob = await response.blob();
|
|
|
|
|
}
|
|
|
|
|
if (isGoogleDriveUrl(photo.url)) {
|
|
|
|
|
// Download from Google Drive
|
|
|
|
|
console.log(`Downloading from Google Drive: ${photo.name}`);
|
|
|
|
|
blob = await downloadDriveImage(photo.url);
|
|
|
|
|
} else {
|
|
|
|
|
// For direct URLs, convert to blob
|
|
|
|
|
const response = await fetch(photo.url);
|
|
|
|
|
blob = await response.blob();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check for HEIC/HEIF format. If so, start conversion but don't block.
|
|
|
|
|
if (
|
|
|
|
|
// Check for HEIC/HEIF format. If so, start conversion but don't block.
|
|
|
|
|
if (
|
|
|
|
|
blob.type === 'image/heic' ||
|
|
|
|
|
blob.type === 'image/heif' ||
|
|
|
|
|
photo.url.toLowerCase().endsWith('.heic')
|
|
|
|
|
) {
|
|
|
|
|
console.log(`HEIC detected for ${photo.name}. Starting conversion in background.`);
|
|
|
|
|
photo.status = 'loading'; // Visually indicate something is happening
|
|
|
|
|
// Don't await this, let it run in the background
|
|
|
|
|
convertHeicPhoto(index, blob);
|
|
|
|
|
return; // End loadPhoto here for HEIC, conversion will handle the rest
|
|
|
|
|
photo.status = 'loading'; // Visually indicate something is happening
|
|
|
|
|
// Don't await this, let it run in the background
|
|
|
|
|
convertHeicPhoto(index, blob);
|
|
|
|
|
return; // End loadPhoto here for HEIC, conversion will handle the rest
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For non-HEIC images, proceed as normal
|
|
|
|
|
await processLoadedBlob(index, blob);
|
|
|
|
|
// For non-HEIC images, proceed as normal
|
|
|
|
|
await processLoadedBlob(index, blob);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error(`Failed to load photo for ${photo.name}:`, error);
|
|
|
|
|
photo.status = 'error';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error(`Failed to load photo for ${photo.name}:`, error);
|
|
|
|
|
photo.status = 'error';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
async function convertHeicPhoto(index: number, blob: Blob) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
try {
|
|
|
|
|
console.log(`Converting HEIC with heic-convert for ${photo.name}...`);
|
|
|
|
|
|
|
|
|
|
async function convertHeicPhoto(index: number, blob: Blob) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
try {
|
|
|
|
|
console.log(`Converting HEIC with heic-convert for ${photo.name}...`);
|
|
|
|
|
// Dynamically import the browser-specific version of the library
|
|
|
|
|
const { default: convert } = await import('heic-convert/browser');
|
|
|
|
|
|
|
|
|
|
// Dynamically import the browser-specific version of the library
|
|
|
|
|
const { default: convert } = await import('heic-convert/browser');
|
|
|
|
|
const inputBuffer = await blob.arrayBuffer();
|
|
|
|
|
const outputBuffer = await convert({
|
|
|
|
|
buffer: new Uint8Array(inputBuffer), // heic-convert expects a Uint8Array
|
|
|
|
|
format: 'JPEG',
|
|
|
|
|
quality: 0.9
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const inputBuffer = await blob.arrayBuffer();
|
|
|
|
|
const outputBuffer = await convert({
|
|
|
|
|
buffer: new Uint8Array(inputBuffer), // heic-convert expects a Uint8Array
|
|
|
|
|
format: 'JPEG',
|
|
|
|
|
quality: 0.9
|
|
|
|
|
});
|
|
|
|
|
const convertedBlob = new Blob([outputBuffer], { type: 'image/jpeg' });
|
|
|
|
|
|
|
|
|
|
const convertedBlob = new Blob([outputBuffer], { type: 'image/jpeg' });
|
|
|
|
|
|
|
|
|
|
console.log(`Successfully converted HEIC for ${photo.name}`);
|
|
|
|
|
|
|
|
|
|
// Now that it's converted, process it like any other image
|
|
|
|
|
await processLoadedBlob(index, convertedBlob);
|
|
|
|
|
console.log(`Successfully converted HEIC for ${photo.name}`);
|
|
|
|
|
|
|
|
|
|
} catch (e) {
|
|
|
|
|
console.error(`Failed to convert HEIC image for ${photo.name}:`, e);
|
|
|
|
|
photo.status = 'error';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Now that it's converted, process it like any other image
|
|
|
|
|
await processLoadedBlob(index, convertedBlob);
|
|
|
|
|
} catch (e) {
|
|
|
|
|
console.error(`Failed to convert HEIC image for ${photo.name}:`, e);
|
|
|
|
|
photo.status = 'error';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function processLoadedBlob(index: number, blob: Blob) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
try {
|
|
|
|
|
const objectUrl = createImageObjectUrl(blob);
|
|
|
|
|
async function processLoadedBlob(index: number, blob: Blob) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
try {
|
|
|
|
|
const objectUrl = createImageObjectUrl(blob);
|
|
|
|
|
|
|
|
|
|
// Test if image loads properly
|
|
|
|
|
await new Promise<void>((resolve, reject) => {
|
|
|
|
|
const img = new Image();
|
|
|
|
|
img.onload = () => resolve();
|
|
|
|
|
img.onerror = (error) => {
|
|
|
|
|
console.error(`Failed to load image for ${photo.name}:`, error);
|
|
|
|
|
reject(new Error('Failed to load image'));
|
|
|
|
|
};
|
|
|
|
|
img.src = objectUrl;
|
|
|
|
|
});
|
|
|
|
|
// Test if image loads properly
|
|
|
|
|
await new Promise<void>((resolve, reject) => {
|
|
|
|
|
const img = new Image();
|
|
|
|
|
img.onload = () => resolve();
|
|
|
|
|
img.onerror = (error) => {
|
|
|
|
|
console.error(`Failed to load image for ${photo.name}:`, error);
|
|
|
|
|
reject(new Error('Failed to load image'));
|
|
|
|
|
};
|
|
|
|
|
img.src = objectUrl;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
photo.objectUrl = objectUrl;
|
|
|
|
|
photo.status = 'success';
|
|
|
|
|
console.log(`Photo loaded successfully: ${photo.name}`);
|
|
|
|
|
|
|
|
|
|
// Save to pictures store
|
|
|
|
|
pictures.update(pics => ({
|
|
|
|
|
...pics,
|
|
|
|
|
[photo.url]: {
|
|
|
|
|
id: photo.url,
|
|
|
|
|
blob: blob,
|
|
|
|
|
url: objectUrl,
|
|
|
|
|
downloaded: true,
|
|
|
|
|
faceDetected: false,
|
|
|
|
|
faceCount: 0
|
|
|
|
|
}
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// Automatically run face detection to generate crop
|
|
|
|
|
await detectFaceForPhoto(index);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error(`Failed to process blob for ${photo.name}:`, error);
|
|
|
|
|
photo.status = 'error';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
photo.objectUrl = objectUrl;
|
|
|
|
|
photo.status = 'success';
|
|
|
|
|
console.log(`Photo loaded successfully: ${photo.name}`);
|
|
|
|
|
|
|
|
|
|
async function detectFaceForPhoto(index: number) {
|
|
|
|
|
try {
|
|
|
|
|
await initializeDetector(); // Ensure detector is loaded
|
|
|
|
|
if (!detector) {
|
|
|
|
|
photos[index].faceDetectionStatus = 'failed';
|
|
|
|
|
console.error('Face detector not available.');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
photos[index].faceDetectionStatus = 'processing';
|
|
|
|
|
const img = new Image();
|
|
|
|
|
img.crossOrigin = 'anonymous';
|
|
|
|
|
img.src = photos[index].objectUrl!;
|
|
|
|
|
await new Promise((r, e) => { img.onload = r; img.onerror = e; });
|
|
|
|
|
const predictions = await detector.estimateFaces(img, false);
|
|
|
|
|
// Save to pictures store
|
|
|
|
|
pictures.update((pics) => ({
|
|
|
|
|
...pics,
|
|
|
|
|
[photo.url]: {
|
|
|
|
|
id: photo.url,
|
|
|
|
|
blob: blob,
|
|
|
|
|
url: objectUrl,
|
|
|
|
|
downloaded: true,
|
|
|
|
|
faceDetected: false,
|
|
|
|
|
faceCount: 0
|
|
|
|
|
}
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
if (predictions.length > 0) {
|
|
|
|
|
const getProbability = (p: number | tf.Tensor) => (typeof p === 'number' ? p : p.dataSync()[0]);
|
|
|
|
|
|
|
|
|
|
const face = predictions.sort((a,b) => getProbability(b.probability!) - getProbability(a.probability!))[0];
|
|
|
|
|
// Coordinates in displayed image space
|
|
|
|
|
let [x1,y1] = face.topLeft as [number, number];
|
|
|
|
|
let [x2,y2] = face.bottomRight as [number, number];
|
|
|
|
|
// Scale to natural image size
|
|
|
|
|
const scaleX = img.naturalWidth / img.width;
|
|
|
|
|
const scaleY = img.naturalHeight / img.height;
|
|
|
|
|
const faceWidth = (x2 - x1) * scaleX;
|
|
|
|
|
const faceHeight = (y2 - y1) * scaleY;
|
|
|
|
|
const faceCenterX = (x1 + (x2 - x1)/2) * scaleX;
|
|
|
|
|
const faceCenterY = (y1 + (y2 - y1) / 2) * scaleY;
|
|
|
|
|
// Load crop config from env
|
|
|
|
|
const cropRatio = parseFloat(env.PUBLIC_CROP_RATIO || '1.0');
|
|
|
|
|
const offsetX = parseFloat(env.PUBLIC_FACE_OFFSET_X || '0.0');
|
|
|
|
|
const offsetY = parseFloat(env.PUBLIC_FACE_OFFSET_Y || '0.0');
|
|
|
|
|
const cropScale = parseFloat(env.PUBLIC_CROP_SCALE || '2.5');
|
|
|
|
|
// Compute crop size and center
|
|
|
|
|
let cropWidth = faceWidth * cropScale;
|
|
|
|
|
let cropHeight = cropWidth / cropRatio;
|
|
|
|
|
// Automatically run face detection to generate crop
|
|
|
|
|
await detectFaceForPhoto(index);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error(`Failed to process blob for ${photo.name}:`, error);
|
|
|
|
|
photo.status = 'error';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If crop is larger than image, scale it down while maintaining aspect ratio
|
|
|
|
|
if (cropWidth > img.naturalWidth || cropHeight > img.naturalHeight) {
|
|
|
|
|
const widthRatio = img.naturalWidth / cropWidth;
|
|
|
|
|
const heightRatio = img.naturalHeight / cropHeight;
|
|
|
|
|
const scale = Math.min(widthRatio, heightRatio);
|
|
|
|
|
cropWidth *= scale;
|
|
|
|
|
cropHeight *= scale;
|
|
|
|
|
}
|
|
|
|
|
async function detectFaceForPhoto(index: number) {
|
|
|
|
|
try {
|
|
|
|
|
await initializeDetector(); // Ensure detector is loaded
|
|
|
|
|
if (!detector) {
|
|
|
|
|
photos[index].faceDetectionStatus = 'failed';
|
|
|
|
|
console.error('Face detector not available.');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let centerX = faceCenterX + cropWidth * offsetX;
|
|
|
|
|
let centerY = faceCenterY + cropHeight * offsetY;
|
|
|
|
|
|
|
|
|
|
// Clamp center to ensure crop fits
|
|
|
|
|
centerX = Math.max(cropWidth/2, Math.min(centerX, img.naturalWidth - cropWidth/2));
|
|
|
|
|
centerY = Math.max(cropHeight/2, Math.min(centerY, img.naturalHeight - cropHeight/2));
|
|
|
|
|
|
|
|
|
|
const cropX = centerX - cropWidth/2;
|
|
|
|
|
const cropY = centerY - cropHeight/2;
|
|
|
|
|
photos[index].faceDetectionStatus = 'processing';
|
|
|
|
|
const img = new Image();
|
|
|
|
|
img.crossOrigin = 'anonymous';
|
|
|
|
|
img.src = photos[index].objectUrl!;
|
|
|
|
|
await new Promise((r, e) => {
|
|
|
|
|
img.onload = r;
|
|
|
|
|
img.onerror = e;
|
|
|
|
|
});
|
|
|
|
|
const predictions = await detector.estimateFaces(img, false);
|
|
|
|
|
|
|
|
|
|
const crop = {
|
|
|
|
|
x: Math.round(Math.max(0, cropX)),
|
|
|
|
|
y: Math.round(Math.max(0, cropY)),
|
|
|
|
|
width: Math.round(cropWidth),
|
|
|
|
|
height: Math.round(cropHeight)
|
|
|
|
|
};
|
|
|
|
|
photos[index].cropData = crop;
|
|
|
|
|
photos[index].faceDetectionStatus = 'completed';
|
|
|
|
|
|
|
|
|
|
// Save crop data to store
|
|
|
|
|
cropRects.update(crops => ({
|
|
|
|
|
...crops,
|
|
|
|
|
[photos[index].url]: crop
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// Update pictures store with face detection info
|
|
|
|
|
pictures.update(pics => ({
|
|
|
|
|
...pics,
|
|
|
|
|
[photos[index].url]: {
|
|
|
|
|
...pics[photos[index].url],
|
|
|
|
|
faceDetected: true,
|
|
|
|
|
faceCount: predictions.length
|
|
|
|
|
}
|
|
|
|
|
}));
|
|
|
|
|
} else {
|
|
|
|
|
photos[index].faceDetectionStatus = 'failed';
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error(`Face detection failed for ${photos[index].name}:`, error);
|
|
|
|
|
photos[index].faceDetectionStatus = 'failed';
|
|
|
|
|
}
|
|
|
|
|
// No need to reassign photos array with $state reactivity
|
|
|
|
|
}
|
|
|
|
|
if (predictions.length > 0) {
|
|
|
|
|
const getProbability = (p: number | tf.Tensor) =>
|
|
|
|
|
typeof p === 'number' ? p : p.dataSync()[0];
|
|
|
|
|
|
|
|
|
|
async function retryPhoto(index: number) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
const face = predictions.sort(
|
|
|
|
|
(a, b) => getProbability(b.probability!) - getProbability(a.probability!)
|
|
|
|
|
)[0];
|
|
|
|
|
// Coordinates in displayed image space
|
|
|
|
|
let [x1, y1] = face.topLeft as [number, number];
|
|
|
|
|
let [x2, y2] = face.bottomRight as [number, number];
|
|
|
|
|
// Scale to natural image size
|
|
|
|
|
const scaleX = img.naturalWidth / img.width;
|
|
|
|
|
const scaleY = img.naturalHeight / img.height;
|
|
|
|
|
const faceWidth = (x2 - x1) * scaleX;
|
|
|
|
|
const faceHeight = (y2 - y1) * scaleY;
|
|
|
|
|
const faceCenterX = (x1 + (x2 - x1) / 2) * scaleX;
|
|
|
|
|
const faceCenterY = (y1 + (y2 - y1) / 2) * scaleY;
|
|
|
|
|
// Load crop config from env
|
|
|
|
|
const cropRatio = parseFloat(env.PUBLIC_CROP_RATIO || '1.0');
|
|
|
|
|
const offsetX = parseFloat(env.PUBLIC_FACE_OFFSET_X || '0.0');
|
|
|
|
|
const offsetY = parseFloat(env.PUBLIC_FACE_OFFSET_Y || '0.0');
|
|
|
|
|
const cropScale = parseFloat(env.PUBLIC_CROP_SCALE || '2.5');
|
|
|
|
|
// Compute crop size and center
|
|
|
|
|
let cropWidth = faceWidth * cropScale;
|
|
|
|
|
let cropHeight = cropWidth / cropRatio;
|
|
|
|
|
|
|
|
|
|
if (photo.retryCount >= 3) {
|
|
|
|
|
return; // Max retries reached
|
|
|
|
|
}
|
|
|
|
|
// If crop is larger than image, scale it down while maintaining aspect ratio
|
|
|
|
|
if (cropWidth > img.naturalWidth || cropHeight > img.naturalHeight) {
|
|
|
|
|
const widthRatio = img.naturalWidth / cropWidth;
|
|
|
|
|
const heightRatio = img.naturalHeight / cropHeight;
|
|
|
|
|
const scale = Math.min(widthRatio, heightRatio);
|
|
|
|
|
cropWidth *= scale;
|
|
|
|
|
cropHeight *= scale;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
photo.retryCount++;
|
|
|
|
|
await loadPhoto(index, true);
|
|
|
|
|
}
|
|
|
|
|
let centerX = faceCenterX + cropWidth * offsetX;
|
|
|
|
|
let centerY = faceCenterY + cropHeight * offsetY;
|
|
|
|
|
|
|
|
|
|
function handleCropUpdate(index: number, detail: { cropData: { x: number; y: number; width: number; height: number } }) {
|
|
|
|
|
photos[index].cropData = detail.cropData;
|
|
|
|
|
photos[index].faceDetectionStatus = 'manual';
|
|
|
|
|
|
|
|
|
|
// Save updated crop data to store
|
|
|
|
|
cropRects.update(crops => ({
|
|
|
|
|
...crops,
|
|
|
|
|
[photos[index].url]: detail.cropData
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// No need to reassign photos array with $state reactivity
|
|
|
|
|
}
|
|
|
|
|
// Clamp center to ensure crop fits
|
|
|
|
|
centerX = Math.max(cropWidth / 2, Math.min(centerX, img.naturalWidth - cropWidth / 2));
|
|
|
|
|
centerY = Math.max(cropHeight / 2, Math.min(centerY, img.naturalHeight - cropHeight / 2));
|
|
|
|
|
|
|
|
|
|
const canProceed = $derived(() => {
|
|
|
|
|
const hasPhotos = photos.length > 0;
|
|
|
|
|
const allLoaded = photos.every(photo => photo.status === 'success');
|
|
|
|
|
const allCropped = photos.every(photo => photo.cropData);
|
|
|
|
|
const cropX = centerX - cropWidth / 2;
|
|
|
|
|
const cropY = centerY - cropHeight / 2;
|
|
|
|
|
|
|
|
|
|
return hasPhotos && allLoaded && allCropped;
|
|
|
|
|
});
|
|
|
|
|
const crop = {
|
|
|
|
|
x: Math.round(Math.max(0, cropX)),
|
|
|
|
|
y: Math.round(Math.max(0, cropY)),
|
|
|
|
|
width: Math.round(cropWidth),
|
|
|
|
|
height: Math.round(cropHeight)
|
|
|
|
|
};
|
|
|
|
|
photos[index].cropData = crop;
|
|
|
|
|
photos[index].faceDetectionStatus = 'completed';
|
|
|
|
|
|
|
|
|
|
// Cleanup object URLs when component is destroyed
|
|
|
|
|
function cleanupObjectUrls() {
|
|
|
|
|
photos.forEach(photo => {
|
|
|
|
|
if (photo.objectUrl && photo.objectUrl.startsWith('blob:')) {
|
|
|
|
|
URL.revokeObjectURL(photo.objectUrl);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
// Save crop data to store
|
|
|
|
|
cropRects.update((crops) => ({
|
|
|
|
|
...crops,
|
|
|
|
|
[photos[index].url]: crop
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// Cleanup on unmount using $effect
|
|
|
|
|
$effect(() => {
|
|
|
|
|
return () => {
|
|
|
|
|
cleanupObjectUrls();
|
|
|
|
|
};
|
|
|
|
|
});
|
|
|
|
|
// Update pictures store with face detection info
|
|
|
|
|
pictures.update((pics) => ({
|
|
|
|
|
...pics,
|
|
|
|
|
[photos[index].url]: {
|
|
|
|
|
...pics[photos[index].url],
|
|
|
|
|
faceDetected: true,
|
|
|
|
|
faceCount: predictions.length
|
|
|
|
|
}
|
|
|
|
|
}));
|
|
|
|
|
} else {
|
|
|
|
|
photos[index].faceDetectionStatus = 'failed';
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error(`Face detection failed for ${photos[index].name}:`, error);
|
|
|
|
|
photos[index].faceDetectionStatus = 'failed';
|
|
|
|
|
}
|
|
|
|
|
// No need to reassign photos array with $state reactivity
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function retryPhoto(index: number) {
|
|
|
|
|
const photo = photos[index];
|
|
|
|
|
|
|
|
|
|
if (photo.retryCount >= 3) {
|
|
|
|
|
return; // Max retries reached
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
photo.retryCount++;
|
|
|
|
|
await loadPhoto(index, true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function handleCropUpdate(
|
|
|
|
|
index: number,
|
|
|
|
|
detail: { cropData: { x: number; y: number; width: number; height: number } }
|
|
|
|
|
) {
|
|
|
|
|
photos[index].cropData = detail.cropData;
|
|
|
|
|
photos[index].faceDetectionStatus = 'manual';
|
|
|
|
|
|
|
|
|
|
// Save updated crop data to store
|
|
|
|
|
cropRects.update((crops) => ({
|
|
|
|
|
...crops,
|
|
|
|
|
[photos[index].url]: detail.cropData
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// No need to reassign photos array with $state reactivity
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Cleanup object URLs when component is destroyed
|
|
|
|
|
function cleanupObjectUrls() {
|
|
|
|
|
photos.forEach((photo) => {
|
|
|
|
|
if (photo.objectUrl && photo.objectUrl.startsWith('blob:')) {
|
|
|
|
|
URL.revokeObjectURL(photo.objectUrl);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const canProceed = $derived(() => {
|
|
|
|
|
const hasPhotos = photos.length > 0;
|
|
|
|
|
const allLoaded = photos.every((photo) => photo.status === 'success');
|
|
|
|
|
const allCropped = photos.every((photo) => photo.cropData);
|
|
|
|
|
|
|
|
|
|
return hasPhotos && allLoaded && allCropped;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Cleanup on unmount using $effect
|
|
|
|
|
$effect(() => {
|
|
|
|
|
return () => {
|
|
|
|
|
cleanupObjectUrls();
|
|
|
|
|
};
|
|
|
|
|
});
|
|
|
|
|
</script>
|
|
|
|
|
|
|
|
|
|
<div class="p-6">
|
|
|
|
|
<div class="max-w-6xl mx-auto">
|
|
|
|
|
<div class="mb-6">
|
|
|
|
|
<h2 class="text-xl font-semibold text-gray-900 mb-2">
|
|
|
|
|
Review & Crop Photos
|
|
|
|
|
</h2>
|
|
|
|
|
|
|
|
|
|
<p class="text-sm text-gray-700 mb-4">
|
|
|
|
|
Photos are automatically cropped using face detection. Click the pen icon to manually adjust the crop area.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
<div class="mb-6">
|
|
|
|
|
<h2 class="mb-2 text-xl font-semibold text-gray-900">Review & Crop Photos</h2>
|
|
|
|
|
|
|
|
|
|
<!-- Processing Status -->
|
|
|
|
|
{#if isProcessing}
|
|
|
|
|
<div class="bg-blue-50 border border-blue-200 rounded-lg p-4 mb-6">
|
|
|
|
|
<div class="flex items-center justify-between">
|
|
|
|
|
<div class="flex items-center">
|
|
|
|
|
<div class="w-5 h-5 border-2 border-blue-600 border-t-transparent rounded-full animate-spin mr-3"></div>
|
|
|
|
|
<span class="text-sm text-blue-800">
|
|
|
|
|
Processing photos...
|
|
|
|
|
</span>
|
|
|
|
|
</div>
|
|
|
|
|
<span class="text-sm text-blue-600">
|
|
|
|
|
{processedCount} / {totalCount}
|
|
|
|
|
</span>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
{#if totalCount > 0}
|
|
|
|
|
<div class="mt-3 w-full bg-blue-200 rounded-full h-2">
|
|
|
|
|
<div
|
|
|
|
|
class="bg-blue-600 h-2 rounded-full transition-all duration-300"
|
|
|
|
|
style="width: {(processedCount / totalCount) * 100}%"
|
|
|
|
|
></div>
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
<p class="mb-4 text-sm text-gray-700">
|
|
|
|
|
Photos are automatically cropped using face detection. Click the pen icon to manually adjust
|
|
|
|
|
the crop area.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<!-- Summary Stats -->
|
|
|
|
|
{#if !isProcessing && photos.length > 0}
|
|
|
|
|
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4 mb-6">
|
|
|
|
|
<h3 class="text-sm font-medium text-gray-700 mb-3">Processing Summary</h3>
|
|
|
|
|
|
|
|
|
|
<div class="grid grid-cols-2 md:grid-cols-5 gap-4 text-sm">
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-gray-900">{photos.length}</div>
|
|
|
|
|
<div class="text-gray-600">Total Photos</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-green-600">
|
|
|
|
|
{photos.filter(p => p.status === 'success').length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Loaded</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-blue-600">
|
|
|
|
|
{photos.filter(p => p.faceDetectionStatus === 'completed').length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Auto-cropped</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-purple-600">
|
|
|
|
|
{photos.filter(p => p.cropData).length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Ready</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-red-600">
|
|
|
|
|
{photos.filter(p => p.status === 'error').length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Failed</div>
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
<!-- Processing Status -->
|
|
|
|
|
{#if isProcessing}
|
|
|
|
|
<div class="mb-6 rounded-lg border border-blue-200 bg-blue-50 p-4">
|
|
|
|
|
<div class="flex items-center justify-between">
|
|
|
|
|
<div class="flex items-center">
|
|
|
|
|
<div
|
|
|
|
|
class="mr-3 h-5 w-5 animate-spin rounded-full border-2 border-blue-600 border-t-transparent"
|
|
|
|
|
></div>
|
|
|
|
|
<span class="text-sm text-blue-800"> Processing photos... </span>
|
|
|
|
|
</div>
|
|
|
|
|
<span class="text-sm text-blue-600">
|
|
|
|
|
{processedCount} / {totalCount}
|
|
|
|
|
</span>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
{#if photos.filter(p => p.status === 'error').length > 0}
|
|
|
|
|
<div class="mt-4 p-3 bg-yellow-50 border border-yellow-200 rounded">
|
|
|
|
|
<p class="text-sm text-yellow-800">
|
|
|
|
|
<strong>Note:</strong> Cards will only be generated for photos that load successfully.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
|
|
|
|
|
{#if !canProceed() && photos.filter(p => p.status === 'success').length > 0}
|
|
|
|
|
<div class="mt-4 p-3 bg-blue-50 border border-blue-200 rounded">
|
|
|
|
|
<p class="text-sm text-blue-800">
|
|
|
|
|
<strong>Tip:</strong> All photos need to be cropped before proceeding. Face detection runs automatically.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
{#if totalCount > 0}
|
|
|
|
|
<div class="mt-3 h-2 w-full rounded-full bg-blue-200">
|
|
|
|
|
<div
|
|
|
|
|
class="h-2 rounded-full bg-blue-600 transition-all duration-300"
|
|
|
|
|
style="width: {(processedCount / totalCount) * 100}%"
|
|
|
|
|
></div>
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
|
|
|
|
|
<!-- Photo Grid -->
|
|
|
|
|
<div class="bg-white border border-gray-200 rounded-lg overflow-hidden mb-6">
|
|
|
|
|
{#if photos.length === 0 && !isProcessing}
|
|
|
|
|
<div class="text-center py-12">
|
|
|
|
|
<svg class="mx-auto h-12 w-12 text-gray-400" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
|
|
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 002 2z"/>
|
|
|
|
|
</svg>
|
|
|
|
|
<h3 class="mt-2 text-sm font-medium text-gray-900">No photos found</h3>
|
|
|
|
|
<p class="mt-1 text-sm text-gray-500">
|
|
|
|
|
Go back to check your column mapping and selected rows.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
{:else}
|
|
|
|
|
<div class="p-6 grid grid-cols-1 md:grid-cols-2 lg:grid-cols-2 xl:grid-cols-3 gap-6">
|
|
|
|
|
{#each photos as photo, index}
|
|
|
|
|
<PhotoCard
|
|
|
|
|
{photo}
|
|
|
|
|
onCropUpdated={(e) => handleCropUpdate(index, e)}
|
|
|
|
|
onRetry={() => retryPhoto(index)}
|
|
|
|
|
/>
|
|
|
|
|
{/each}
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
</div>
|
|
|
|
|
<!-- Summary Stats -->
|
|
|
|
|
{#if !isProcessing && photos.length > 0}
|
|
|
|
|
<div class="mb-6 rounded-lg border border-gray-200 bg-gray-50 p-4">
|
|
|
|
|
<h3 class="mb-3 text-sm font-medium text-gray-700">Processing Summary</h3>
|
|
|
|
|
|
|
|
|
|
<!-- Navigation -->
|
|
|
|
|
<div class="flex justify-between">
|
|
|
|
|
<button
|
|
|
|
|
onclick={() => currentStep.set(4)}
|
|
|
|
|
class="px-4 py-2 bg-gray-200 text-gray-700 rounded-lg font-medium hover:bg-gray-300"
|
|
|
|
|
>
|
|
|
|
|
← Back to Row Filter
|
|
|
|
|
</button>
|
|
|
|
|
|
|
|
|
|
<button
|
|
|
|
|
onclick={() => currentStep.set(6)}
|
|
|
|
|
disabled={!canProceed()}
|
|
|
|
|
class="px-4 py-2 bg-blue-600 text-white rounded-lg font-medium hover:bg-blue-700 disabled:bg-gray-400 disabled:cursor-not-allowed"
|
|
|
|
|
>
|
|
|
|
|
{canProceed()
|
|
|
|
|
? `Generate ${photos.filter(p => p.status === 'success' && p.cropData).length} Cards →`
|
|
|
|
|
: 'Waiting for photos to load and crop'}
|
|
|
|
|
</button>
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
<div class="grid grid-cols-2 gap-4 text-sm md:grid-cols-5">
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-gray-900">{photos.length}</div>
|
|
|
|
|
<div class="text-gray-600">Total Photos</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-green-600">
|
|
|
|
|
{photos.filter((p) => p.status === 'success').length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Loaded</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-blue-600">
|
|
|
|
|
{photos.filter((p) => p.faceDetectionStatus === 'completed').length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Auto-cropped</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-purple-600">
|
|
|
|
|
{photos.filter((p) => p.cropData).length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Ready</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<div class="text-center">
|
|
|
|
|
<div class="text-2xl font-bold text-red-600">
|
|
|
|
|
{photos.filter((p) => p.status === 'error').length}
|
|
|
|
|
</div>
|
|
|
|
|
<div class="text-gray-600">Failed</div>
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
{#if photos.filter((p) => p.status === 'error').length > 0}
|
|
|
|
|
<div class="mt-4 rounded border border-yellow-200 bg-yellow-50 p-3">
|
|
|
|
|
<p class="text-sm text-yellow-800">
|
|
|
|
|
<strong>Note:</strong> Cards will only be generated for photos that load successfully.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
|
|
|
|
|
{#if !canProceed() && photos.filter((p) => p.status === 'success').length > 0}
|
|
|
|
|
<div class="mt-4 rounded border border-blue-200 bg-blue-50 p-3">
|
|
|
|
|
<p class="text-sm text-blue-800">
|
|
|
|
|
<strong>Tip:</strong> All photos need to be cropped before proceeding. Face detection runs
|
|
|
|
|
automatically.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
|
|
|
|
|
<!-- Photo Grid -->
|
|
|
|
|
<div class="mb-6 overflow-hidden rounded-lg bg-white">
|
|
|
|
|
{#if photos.length === 0 && !isProcessing}
|
|
|
|
|
<div class="py-12 text-center">
|
|
|
|
|
<svg
|
|
|
|
|
class="mx-auto h-12 w-12 text-gray-400"
|
|
|
|
|
fill="none"
|
|
|
|
|
viewBox="0 0 24 24"
|
|
|
|
|
stroke="currentColor"
|
|
|
|
|
>
|
|
|
|
|
<path
|
|
|
|
|
stroke-linecap="round"
|
|
|
|
|
stroke-linejoin="round"
|
|
|
|
|
stroke-width="2"
|
|
|
|
|
d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 002 2z"
|
|
|
|
|
/>
|
|
|
|
|
</svg>
|
|
|
|
|
<h3 class="mt-2 text-sm font-medium text-gray-900">No photos found</h3>
|
|
|
|
|
<p class="mt-1 text-sm text-gray-500">
|
|
|
|
|
Go back to check your column mapping and selected rows.
|
|
|
|
|
</p>
|
|
|
|
|
</div>
|
|
|
|
|
{:else}
|
|
|
|
|
<div class="grid grid-cols-1 gap-6 md:grid-cols-2 lg:grid-cols-2 xl:grid-cols-3">
|
|
|
|
|
{#each photos as photo, index}
|
|
|
|
|
<PhotoCard
|
|
|
|
|
{photo}
|
|
|
|
|
onCropUpdated={(e) => handleCropUpdate(index, e)}
|
|
|
|
|
onRetry={() => retryPhoto(index)}
|
|
|
|
|
/>
|
|
|
|
|
{/each}
|
|
|
|
|
</div>
|
|
|
|
|
{/if}
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
<!-- Navigation -->
|
|
|
|
|
<Navigator
|
|
|
|
|
canProceed={canProceed()}
|
|
|
|
|
{currentStep}
|
|
|
|
|
textBack="Back to Row Filter"
|
|
|
|
|
textForwardDisabled="Waiting from photos"
|
|
|
|
|
textForwardEnabled={`Generate ${photos.filter((p) => p.status === 'success' && p.cropData).length} Cards`}
|
|
|
|
|
/>
|
|
|
|
|
</div>
|
|
|
|
|
|
|
|
|
|
|