Memory leak fixes
This commit is contained in:
1
.github/copilot-instructions.md
vendored
1
.github/copilot-instructions.md
vendored
@@ -14,3 +14,4 @@
|
|||||||
- Remain consistent in styling and code structure.
|
- Remain consistent in styling and code structure.
|
||||||
- Avoid unncessary iterations. If problems is mostly solved, stop.
|
- Avoid unncessary iterations. If problems is mostly solved, stop.
|
||||||
- Split big components into subcomponents. Always create smaller subcomponents for better context management later.
|
- Split big components into subcomponents. Always create smaller subcomponents for better context management later.
|
||||||
|
- Do not do what you're not being asked. Stick to scope of my request.
|
||||||
7
package-lock.json
generated
7
package-lock.json
generated
@@ -19,6 +19,7 @@
|
|||||||
"fontkit": "^2.0.4",
|
"fontkit": "^2.0.4",
|
||||||
"heic-convert": "^2.1.0",
|
"heic-convert": "^2.1.0",
|
||||||
"idb": "^8.0.3",
|
"idb": "^8.0.3",
|
||||||
|
"idb-keyval": "^6.2.2",
|
||||||
"p-queue": "^8.1.0",
|
"p-queue": "^8.1.0",
|
||||||
"pdf-lib": "^1.17.1",
|
"pdf-lib": "^1.17.1",
|
||||||
"uuid": "^11.1.0"
|
"uuid": "^11.1.0"
|
||||||
@@ -1194,6 +1195,12 @@
|
|||||||
"version": "8.0.3",
|
"version": "8.0.3",
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
|
"node_modules/idb-keyval": {
|
||||||
|
"version": "6.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/idb-keyval/-/idb-keyval-6.2.2.tgz",
|
||||||
|
"integrity": "sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg==",
|
||||||
|
"license": "Apache-2.0"
|
||||||
|
},
|
||||||
"node_modules/is-core-module": {
|
"node_modules/is-core-module": {
|
||||||
"version": "2.16.1",
|
"version": "2.16.1",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
|
|||||||
@@ -40,6 +40,7 @@
|
|||||||
"fontkit": "^2.0.4",
|
"fontkit": "^2.0.4",
|
||||||
"heic-convert": "^2.1.0",
|
"heic-convert": "^2.1.0",
|
||||||
"idb": "^8.0.3",
|
"idb": "^8.0.3",
|
||||||
|
"idb-keyval": "^6.2.2",
|
||||||
"p-queue": "^8.1.0",
|
"p-queue": "^8.1.0",
|
||||||
"pdf-lib": "^1.17.1",
|
"pdf-lib": "^1.17.1",
|
||||||
"uuid": "^11.1.0"
|
"uuid": "^11.1.0"
|
||||||
|
|||||||
@@ -7,6 +7,8 @@
|
|||||||
import PhotoCard from './subcomponents/PhotoCard.svelte';
|
import PhotoCard from './subcomponents/PhotoCard.svelte';
|
||||||
import * as tf from '@tensorflow/tfjs';
|
import * as tf from '@tensorflow/tfjs';
|
||||||
import * as blazeface from '@tensorflow-models/blazeface';
|
import * as blazeface from '@tensorflow-models/blazeface';
|
||||||
|
import PQueue from 'p-queue';
|
||||||
|
import { set, clear } from 'idb-keyval';
|
||||||
|
|
||||||
let photos = $state<PhotoInfo[]>([]);
|
let photos = $state<PhotoInfo[]>([]);
|
||||||
let isProcessing = $state(false);
|
let isProcessing = $state(false);
|
||||||
@@ -14,6 +16,8 @@
|
|||||||
let totalCount = $state(0);
|
let totalCount = $state(0);
|
||||||
let detector: blazeface.BlazeFaceModel | undefined;
|
let detector: blazeface.BlazeFaceModel | undefined;
|
||||||
let detectorPromise: Promise<void> | undefined;
|
let detectorPromise: Promise<void> | undefined;
|
||||||
|
let downloadQueue: PQueue;
|
||||||
|
let faceDetectionQueue: PQueue;
|
||||||
|
|
||||||
interface PhotoInfo {
|
interface PhotoInfo {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -38,13 +42,59 @@
|
|||||||
return detectorPromise;
|
return detectorPromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Force memory cleanup
|
||||||
|
async function forceMemoryCleanup() {
|
||||||
|
await tf.nextFrame(); // Wait for any pending GPU operations
|
||||||
|
|
||||||
|
// Log memory state without aggressive cleanup
|
||||||
|
const memInfo = tf.memory();
|
||||||
|
console.log('Memory status:', {
|
||||||
|
tensors: memInfo.numTensors,
|
||||||
|
dataBuffers: memInfo.numDataBuffers,
|
||||||
|
bytes: memInfo.numBytes
|
||||||
|
});
|
||||||
|
|
||||||
|
// Only run garbage collection if available, don't dispose variables
|
||||||
|
if (typeof window !== 'undefined' && 'gc' in window) {
|
||||||
|
(window as any).gc();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function processPhotosInParallel() {
|
async function processPhotosInParallel() {
|
||||||
if (isProcessing) return;
|
if (isProcessing) return;
|
||||||
|
|
||||||
console.log('Starting processPhotos in parallel...');
|
console.log('Starting processPhotos with queues...');
|
||||||
isProcessing = true;
|
isProcessing = true;
|
||||||
processedCount = 0;
|
processedCount = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Clear previous session's images from IndexedDB
|
||||||
|
await clear();
|
||||||
|
console.log('Cleared IndexedDB.');
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Could not clear IndexedDB:', e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize queues with more conservative concurrency
|
||||||
|
downloadQueue = new PQueue({ concurrency: 3 }); // Reduced from 5
|
||||||
|
faceDetectionQueue = new PQueue({ concurrency: 1 }); // Keep at 1 for memory safety
|
||||||
|
|
||||||
|
// When both queues are idle, we're done
|
||||||
|
downloadQueue.on('idle', async () => {
|
||||||
|
if (faceDetectionQueue.size === 0 && faceDetectionQueue.pending === 0) {
|
||||||
|
await forceMemoryCleanup(); // Clean up memory when processing is complete
|
||||||
|
isProcessing = false;
|
||||||
|
console.log('All queues are idle. Processing complete.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
faceDetectionQueue.on('idle', async () => {
|
||||||
|
if (downloadQueue.size === 0 && downloadQueue.pending === 0) {
|
||||||
|
await forceMemoryCleanup(); // Clean up memory when processing is complete
|
||||||
|
isProcessing = false;
|
||||||
|
console.log('All queues are idle. Processing complete.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
const validRows = $filteredSheetData.filter((row) => row._isValid);
|
const validRows = $filteredSheetData.filter((row) => row._isValid);
|
||||||
const photoUrls = new Set<string>();
|
const photoUrls = new Set<string>();
|
||||||
const photoMap = new Map<string, any[]>();
|
const photoMap = new Map<string, any[]>();
|
||||||
@@ -62,7 +112,7 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
totalCount = photoUrls.size;
|
totalCount = photoUrls.size;
|
||||||
console.log(`Found ${totalCount} unique photo URLs`);
|
console.log(`Found ${totalCount} unique photo URLs to process.`);
|
||||||
|
|
||||||
photos = Array.from(photoUrls).map((url) => ({
|
photos = Array.from(photoUrls).map((url) => ({
|
||||||
name: photoMap.get(url)![0].name + ' ' + photoMap.get(url)![0].surname,
|
name: photoMap.get(url)![0].name + ' ' + photoMap.get(url)![0].surname,
|
||||||
@@ -72,28 +122,12 @@
|
|||||||
faceDetectionStatus: 'pending' as const
|
faceDetectionStatus: 'pending' as const
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const concurrencyLimit = 5;
|
// Add all photos to the download queue
|
||||||
const promises = [];
|
|
||||||
|
|
||||||
for (let i = 0; i < photos.length; i++) {
|
for (let i = 0; i < photos.length; i++) {
|
||||||
const promise = (async () => {
|
downloadQueue.add(() => loadPhoto(i));
|
||||||
await loadPhoto(i);
|
|
||||||
processedCount++;
|
|
||||||
})();
|
|
||||||
promises.push(promise);
|
|
||||||
|
|
||||||
if (promises.length >= concurrencyLimit) {
|
|
||||||
await Promise.all(promises);
|
|
||||||
promises.length = 0;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
|
|
||||||
isProcessing = false;
|
|
||||||
console.log('All photos processed.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize detector and process photos
|
// Initialize detector and process photos
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
console.log('StepGallery mounted');
|
console.log('StepGallery mounted');
|
||||||
@@ -148,6 +182,8 @@
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Failed to load photo for ${photo.name}:`, error);
|
console.error(`Failed to load photo for ${photo.name}:`, error);
|
||||||
photo.status = 'error';
|
photo.status = 'error';
|
||||||
|
// Since this step failed, we still count it as "processed" to not stall the progress bar
|
||||||
|
processedCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,6 +211,8 @@
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(`Failed to convert HEIC image for ${photo.name}:`, e);
|
console.error(`Failed to convert HEIC image for ${photo.name}:`, e);
|
||||||
photo.status = 'error';
|
photo.status = 'error';
|
||||||
|
// Since this step failed, we still count it as "processed" to not stall the progress bar
|
||||||
|
processedCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -198,12 +236,14 @@
|
|||||||
photo.status = 'success';
|
photo.status = 'success';
|
||||||
console.log(`Photo loaded successfully: ${photo.name}`);
|
console.log(`Photo loaded successfully: ${photo.name}`);
|
||||||
|
|
||||||
// Save to pictures store
|
// Save blob to IndexedDB instead of the store
|
||||||
|
await set(photo.url, blob);
|
||||||
|
|
||||||
|
// Save to pictures store, but without the blob to save memory
|
||||||
pictures.update((pics) => ({
|
pictures.update((pics) => ({
|
||||||
...pics,
|
...pics,
|
||||||
[photo.url]: {
|
[photo.url]: {
|
||||||
id: photo.url,
|
id: photo.url,
|
||||||
blob: blob,
|
|
||||||
url: objectUrl,
|
url: objectUrl,
|
||||||
downloaded: true,
|
downloaded: true,
|
||||||
faceDetected: false,
|
faceDetected: false,
|
||||||
@@ -211,32 +251,47 @@
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Automatically run face detection to generate crop
|
// Add face detection to its queue
|
||||||
await detectFaceForPhoto(index);
|
faceDetectionQueue.add(() => detectFaceForPhoto(index));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Failed to process blob for ${photo.name}:`, error);
|
console.error(`Failed to process blob for ${photo.name}:`, error);
|
||||||
photo.status = 'error';
|
photo.status = 'error';
|
||||||
|
// Since this step failed, we still count it as "processed" to not stall the progress bar
|
||||||
|
processedCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function detectFaceForPhoto(index: number) {
|
async function detectFaceForPhoto(index: number) {
|
||||||
|
const photo = photos[index];
|
||||||
|
let imageTensor;
|
||||||
try {
|
try {
|
||||||
await initializeDetector(); // Ensure detector is loaded
|
await initializeDetector(); // Ensure detector is loaded
|
||||||
if (!detector) {
|
if (!detector) {
|
||||||
photos[index].faceDetectionStatus = 'failed';
|
photo.faceDetectionStatus = 'failed';
|
||||||
console.error('Face detector not available.');
|
console.error('Face detector not available.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
photos[index].faceDetectionStatus = 'processing';
|
photo.faceDetectionStatus = 'processing';
|
||||||
const img = new Image();
|
const img = new Image();
|
||||||
img.crossOrigin = 'anonymous';
|
img.crossOrigin = 'anonymous';
|
||||||
img.src = photos[index].objectUrl!;
|
img.src = photo.objectUrl!;
|
||||||
await new Promise((r, e) => {
|
await new Promise((r, e) => {
|
||||||
img.onload = r;
|
img.onload = r;
|
||||||
img.onerror = e;
|
img.onerror = e;
|
||||||
});
|
});
|
||||||
const predictions = await detector.estimateFaces(img, false);
|
|
||||||
|
// Create tensor and manually dispose it after use
|
||||||
|
imageTensor = tf.browser.fromPixels(img);
|
||||||
|
const predictions = await detector.estimateFaces(imageTensor, false);
|
||||||
|
|
||||||
|
// Log memory usage for debugging
|
||||||
|
const memInfo = tf.memory();
|
||||||
|
console.log(`TensorFlow.js memory after face detection for ${photo.name}:`, {
|
||||||
|
numTensors: memInfo.numTensors,
|
||||||
|
numDataBuffers: memInfo.numDataBuffers,
|
||||||
|
numBytes: memInfo.numBytes
|
||||||
|
});
|
||||||
|
|
||||||
if (predictions.length > 0) {
|
if (predictions.length > 0) {
|
||||||
const getProbability = (p: number | tf.Tensor) =>
|
const getProbability = (p: number | tf.Tensor) =>
|
||||||
@@ -245,26 +300,27 @@
|
|||||||
const face = predictions.sort(
|
const face = predictions.sort(
|
||||||
(a, b) => getProbability(b.probability!) - getProbability(a.probability!)
|
(a, b) => getProbability(b.probability!) - getProbability(a.probability!)
|
||||||
)[0];
|
)[0];
|
||||||
// Coordinates in displayed image space
|
|
||||||
let [x1, y1] = face.topLeft as [number, number];
|
const topLeft = face.topLeft as [number, number];
|
||||||
let [x2, y2] = face.bottomRight as [number, number];
|
const bottomRight = face.bottomRight as [number, number];
|
||||||
// Scale to natural image size
|
|
||||||
|
let [x1, y1] = topLeft;
|
||||||
|
let [x2, y2] = bottomRight;
|
||||||
const scaleX = img.naturalWidth / img.width;
|
const scaleX = img.naturalWidth / img.width;
|
||||||
const scaleY = img.naturalHeight / img.height;
|
const scaleY = img.naturalHeight / img.height;
|
||||||
const faceWidth = (x2 - x1) * scaleX;
|
const faceWidth = (x2 - x1) * scaleX;
|
||||||
const faceHeight = (y2 - y1) * scaleY;
|
const faceHeight = (y2 - y1) * scaleY;
|
||||||
const faceCenterX = (x1 + (x2 - x1) / 2) * scaleX;
|
const faceCenterX = (x1 + (x2 - x1) / 2) * scaleX;
|
||||||
const faceCenterY = (y1 + (y2 - y1) / 2) * scaleY;
|
const faceCenterY = (y1 + (y2 - y1) / 2) * scaleY;
|
||||||
// Load crop config from env
|
|
||||||
const cropRatio = parseFloat(env.PUBLIC_CROP_RATIO || '1.0');
|
const cropRatio = parseFloat(env.PUBLIC_CROP_RATIO || '1.0');
|
||||||
const offsetX = parseFloat(env.PUBLIC_FACE_OFFSET_X || '0.0');
|
const offsetX = parseFloat(env.PUBLIC_FACE_OFFSET_X || '0.0');
|
||||||
const offsetY = parseFloat(env.PUBLIC_FACE_OFFSET_Y || '0.0');
|
const offsetY = parseFloat(env.PUBLIC_FACE_OFFSET_Y || '0.0');
|
||||||
const cropScale = parseFloat(env.PUBLIC_CROP_SCALE || '2.5');
|
const cropScale = parseFloat(env.PUBLIC_CROP_SCALE || '2.5');
|
||||||
// Compute crop size and center
|
|
||||||
let cropWidth = faceWidth * cropScale;
|
let cropWidth = faceWidth * cropScale;
|
||||||
let cropHeight = cropWidth / cropRatio;
|
let cropHeight = cropWidth / cropRatio;
|
||||||
|
|
||||||
// If crop is larger than image, scale it down while maintaining aspect ratio
|
|
||||||
if (cropWidth > img.naturalWidth || cropHeight > img.naturalHeight) {
|
if (cropWidth > img.naturalWidth || cropHeight > img.naturalHeight) {
|
||||||
const widthRatio = img.naturalWidth / cropWidth;
|
const widthRatio = img.naturalWidth / cropWidth;
|
||||||
const heightRatio = img.naturalHeight / cropHeight;
|
const heightRatio = img.naturalHeight / cropHeight;
|
||||||
@@ -276,9 +332,11 @@
|
|||||||
let centerX = faceCenterX + cropWidth * offsetX;
|
let centerX = faceCenterX + cropWidth * offsetX;
|
||||||
let centerY = faceCenterY + cropHeight * offsetY;
|
let centerY = faceCenterY + cropHeight * offsetY;
|
||||||
|
|
||||||
// Clamp center to ensure crop fits
|
|
||||||
centerX = Math.max(cropWidth / 2, Math.min(centerX, img.naturalWidth - cropWidth / 2));
|
centerX = Math.max(cropWidth / 2, Math.min(centerX, img.naturalWidth - cropWidth / 2));
|
||||||
centerY = Math.max(cropHeight / 2, Math.min(centerY, img.naturalHeight - cropHeight / 2));
|
centerY = Math.max(
|
||||||
|
cropHeight / 2,
|
||||||
|
Math.min(centerY, img.naturalHeight - cropHeight / 2)
|
||||||
|
);
|
||||||
|
|
||||||
const cropX = centerX - cropWidth / 2;
|
const cropX = centerX - cropWidth / 2;
|
||||||
const cropY = centerY - cropHeight / 2;
|
const cropY = centerY - cropHeight / 2;
|
||||||
@@ -289,32 +347,40 @@
|
|||||||
width: Math.round(cropWidth),
|
width: Math.round(cropWidth),
|
||||||
height: Math.round(cropHeight)
|
height: Math.round(cropHeight)
|
||||||
};
|
};
|
||||||
photos[index].cropData = crop;
|
photo.cropData = crop;
|
||||||
photos[index].faceDetectionStatus = 'completed';
|
photo.faceDetectionStatus = 'completed';
|
||||||
|
|
||||||
// Save crop data to store
|
|
||||||
cropRects.update((crops) => ({
|
cropRects.update((crops) => ({
|
||||||
...crops,
|
...crops,
|
||||||
[photos[index].url]: crop
|
[photo.url]: crop
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Update pictures store with face detection info
|
|
||||||
pictures.update((pics) => ({
|
pictures.update((pics) => ({
|
||||||
...pics,
|
...pics,
|
||||||
[photos[index].url]: {
|
[photo.url]: {
|
||||||
...pics[photos[index].url],
|
...pics[photo.url],
|
||||||
faceDetected: true,
|
faceDetected: true,
|
||||||
faceCount: predictions.length
|
faceCount: predictions.length
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
} else {
|
} else {
|
||||||
photos[index].faceDetectionStatus = 'failed';
|
photo.faceDetectionStatus = 'failed';
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Face detection failed for ${photos[index].name}:`, error);
|
console.error(`Face detection failed for ${photo.name}:`, error);
|
||||||
photos[index].faceDetectionStatus = 'failed';
|
photo.faceDetectionStatus = 'failed';
|
||||||
|
} finally {
|
||||||
|
// Manually dispose of the input tensor to prevent memory leaks
|
||||||
|
if (imageTensor) {
|
||||||
|
imageTensor.dispose();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a small delay to allow GPU memory to be freed before next operation
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
|
|
||||||
|
// This is the final step for a photo, so we increment the processed count here.
|
||||||
|
processedCount++;
|
||||||
}
|
}
|
||||||
// No need to reassign photos array with $state reactivity
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function retryPhoto(index: number) {
|
async function retryPhoto(index: number) {
|
||||||
@@ -325,7 +391,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
photo.retryCount++;
|
photo.retryCount++;
|
||||||
await loadPhoto(index, true);
|
// Add the retry attempt back to the download queue
|
||||||
|
downloadQueue.add(() => loadPhoto(index, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleCropUpdate(
|
function handleCropUpdate(
|
||||||
@@ -364,6 +431,13 @@
|
|||||||
// Cleanup on unmount using $effect
|
// Cleanup on unmount using $effect
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
|
// Clear queues on component unmount to stop any ongoing processing
|
||||||
|
if (downloadQueue) {
|
||||||
|
downloadQueue.clear();
|
||||||
|
}
|
||||||
|
if (faceDetectionQueue) {
|
||||||
|
faceDetectionQueue.clear();
|
||||||
|
}
|
||||||
cleanupObjectUrls();
|
cleanupObjectUrls();
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
import { filteredSheetData, currentStep, pictures, cropRects } from '$lib/stores';
|
import { filteredSheetData, currentStep, pictures, cropRects } from '$lib/stores';
|
||||||
import { PDFDocument, StandardFonts, rgb } from 'pdf-lib';
|
import { PDFDocument, StandardFonts, rgb } from 'pdf-lib';
|
||||||
import * as fontkit from 'fontkit';
|
import * as fontkit from 'fontkit';
|
||||||
|
import { clear } from 'idb-keyval';
|
||||||
import {
|
import {
|
||||||
BORDER_CONFIG,
|
BORDER_CONFIG,
|
||||||
TEXT_CONFIG,
|
TEXT_CONFIG,
|
||||||
@@ -10,6 +11,7 @@
|
|||||||
calculateGrid,
|
calculateGrid,
|
||||||
getAbsolutePositionPt,
|
getAbsolutePositionPt,
|
||||||
getAbsolutePhotoDimensionsPt,
|
getAbsolutePhotoDimensionsPt,
|
||||||
|
getImageBlob,
|
||||||
MM_TO_PT
|
MM_TO_PT
|
||||||
} from '$lib/pdfLayout';
|
} from '$lib/pdfLayout';
|
||||||
import {
|
import {
|
||||||
@@ -52,10 +54,34 @@
|
|||||||
|
|
||||||
let files = $state<GeneratedFile[]>(JSON.parse(JSON.stringify(initialFiles)));
|
let files = $state<GeneratedFile[]>(JSON.parse(JSON.stringify(initialFiles)));
|
||||||
|
|
||||||
|
// Cleanup function to clear IndexedDB and sensitive data
|
||||||
|
async function clearSensitiveData() {
|
||||||
|
try {
|
||||||
|
await clear(); // Clear all data from IndexedDB
|
||||||
|
console.log('IndexedDB cleared for security');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to clear IndexedDB:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle tab close or page unload
|
||||||
|
function handleBeforeUnload() {
|
||||||
|
clearSensitiveData();
|
||||||
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
|
// Add event listener for page unload
|
||||||
|
window.addEventListener('beforeunload', handleBeforeUnload);
|
||||||
|
|
||||||
// Start generation automatically when the component mounts
|
// Start generation automatically when the component mounts
|
||||||
handleGenerate('people_data.pdf');
|
handleGenerate('people_data.pdf');
|
||||||
handleGenerate('people_photos.pdf');
|
handleGenerate('people_photos.pdf');
|
||||||
|
|
||||||
|
// Cleanup function when component unmounts
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('beforeunload', handleBeforeUnload);
|
||||||
|
clearSensitiveData();
|
||||||
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// Load Roboto font
|
// Load Roboto font
|
||||||
@@ -150,6 +176,13 @@
|
|||||||
fileToUpdate.url = URL.createObjectURL(blob);
|
fileToUpdate.url = URL.createObjectURL(blob);
|
||||||
fileToUpdate.size = pdfBytes.length;
|
fileToUpdate.size = pdfBytes.length;
|
||||||
fileToUpdate.state = 'done';
|
fileToUpdate.state = 'done';
|
||||||
|
|
||||||
|
// Check if both PDFs are done, then clear sensitive data
|
||||||
|
const allDone = files.every((f) => f.state === 'done' || f.state === 'error');
|
||||||
|
if (allDone) {
|
||||||
|
console.log('All PDFs generated, clearing sensitive data...');
|
||||||
|
await clearSensitiveData();
|
||||||
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error(`PDF generation failed for ${fileName}:`, error);
|
console.error(`PDF generation failed for ${fileName}:`, error);
|
||||||
fileToUpdate.state = 'error';
|
fileToUpdate.state = 'error';
|
||||||
@@ -310,7 +343,10 @@
|
|||||||
|
|
||||||
if (pictureInfo && cropData) {
|
if (pictureInfo && cropData) {
|
||||||
try {
|
try {
|
||||||
const croppedImageBytes = await cropImage(pictureInfo.blob, cropData);
|
// Get blob from IndexedDB instead of the store
|
||||||
|
const imageBlob = await getImageBlob(pictureUrl);
|
||||||
|
if (imageBlob) {
|
||||||
|
const croppedImageBytes = await cropImage(imageBlob, cropData);
|
||||||
const embeddedImage = await pdfDoc.embedJpg(croppedImageBytes);
|
const embeddedImage = await pdfDoc.embedJpg(croppedImageBytes);
|
||||||
|
|
||||||
const imageAspectRatio = embeddedImage.width / embeddedImage.height;
|
const imageAspectRatio = embeddedImage.width / embeddedImage.height;
|
||||||
@@ -334,6 +370,15 @@
|
|||||||
width: imageWidth,
|
width: imageWidth,
|
||||||
height: imageHeight
|
height: imageHeight
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
console.warn(`Image blob not found in IndexedDB for ${pictureUrl}`);
|
||||||
|
// Draw placeholder when blob not found
|
||||||
|
page.drawRectangle({
|
||||||
|
...photoDimsPt,
|
||||||
|
borderColor: rgb(BORDER_CONFIG.color.r, BORDER_CONFIG.color.g, BORDER_CONFIG.color.b),
|
||||||
|
borderWidth: BORDER_CONFIG.width
|
||||||
|
});
|
||||||
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error(`Failed to embed photo for ${row.name}:`, error);
|
console.error(`Failed to embed photo for ${row.name}:`, error);
|
||||||
// Draw placeholder on error
|
// Draw placeholder on error
|
||||||
@@ -399,6 +444,10 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
files = JSON.parse(JSON.stringify(initialFiles));
|
files = JSON.parse(JSON.stringify(initialFiles));
|
||||||
|
|
||||||
|
// Clear sensitive data when starting over
|
||||||
|
clearSensitiveData();
|
||||||
|
|
||||||
currentStep.set(0);
|
currentStep.set(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -107,7 +107,7 @@
|
|||||||
return processedRow;
|
return processedRow;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Initially select rows based on validity and "Already Printed" status, up to 200
|
// Initially select rows based on validity and "Already Printed" status
|
||||||
const rowsToConsider = processedData.filter((row) => {
|
const rowsToConsider = processedData.filter((row) => {
|
||||||
if (!row._isValid) return false;
|
if (!row._isValid) return false;
|
||||||
const alreadyPrinted = row.alreadyPrinted;
|
const alreadyPrinted = row.alreadyPrinted;
|
||||||
@@ -118,7 +118,7 @@
|
|||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
const initialSelection = rowsToConsider.slice(0, 200).map((row) => row._rowIndex);
|
const initialSelection = rowsToConsider.map((row) => row._rowIndex);
|
||||||
selectedRows = new Set(initialSelection);
|
selectedRows = new Set(initialSelection);
|
||||||
|
|
||||||
updateSelectAllState();
|
updateSelectAllState();
|
||||||
@@ -131,10 +131,6 @@
|
|||||||
if (selectedRows.has(rowIndex)) {
|
if (selectedRows.has(rowIndex)) {
|
||||||
selectedRows.delete(rowIndex);
|
selectedRows.delete(rowIndex);
|
||||||
} else {
|
} else {
|
||||||
if (selectedRows.size >= 200) {
|
|
||||||
alert('You can only select a maximum of 200 rows at a time.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
selectedRows.add(rowIndex);
|
selectedRows.add(rowIndex);
|
||||||
}
|
}
|
||||||
selectedRows = new Set(selectedRows); // Trigger reactivity
|
selectedRows = new Set(selectedRows); // Trigger reactivity
|
||||||
@@ -150,20 +146,13 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// Select all visible valid rows that aren't already printed, up to the limit
|
// Select all visible valid rows that aren't already printed
|
||||||
const rowsToSelect = filteredData.filter(
|
const rowsToSelect = filteredData.filter(
|
||||||
(row) => row._isValid && !isRowAlreadyPrinted(row) && !selectedRows.has(row._rowIndex)
|
(row) => row._isValid && !isRowAlreadyPrinted(row) && !selectedRows.has(row._rowIndex)
|
||||||
);
|
);
|
||||||
|
|
||||||
const availableSlots = 200 - selectedRows.size;
|
for (const row of rowsToSelect) {
|
||||||
if (rowsToSelect.length > availableSlots) {
|
selectedRows.add(row._rowIndex);
|
||||||
alert(
|
|
||||||
`You can only select up to 200 rows. Only the first ${availableSlots} available rows will be selected.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < Math.min(rowsToSelect.length, availableSlots); i++) {
|
|
||||||
selectedRows.add(rowsToSelect[i]._rowIndex);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
selectedRows = new Set(selectedRows);
|
selectedRows = new Set(selectedRows);
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import {
|
|||||||
TEXT_FIELD_LAYOUT,
|
TEXT_FIELD_LAYOUT,
|
||||||
PHOTO_FIELD_LAYOUT
|
PHOTO_FIELD_LAYOUT
|
||||||
} from './pdfSettings';
|
} from './pdfSettings';
|
||||||
|
import { get } from 'idb-keyval';
|
||||||
|
|
||||||
// Conversion factor from millimeters to points (1 inch = 72 points, 1 inch = 25.4 mm)
|
// Conversion factor from millimeters to points (1 inch = 72 points, 1 inch = 25.4 mm)
|
||||||
export const MM_TO_PT = 72 / 25.4;
|
export const MM_TO_PT = 72 / 25.4;
|
||||||
@@ -17,6 +18,11 @@ export interface GridLayout {
|
|||||||
cellHeight: number; // mm
|
cellHeight: number; // mm
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Function to retrieve a blob from IndexedDB
|
||||||
|
export async function getImageBlob(url: string): Promise<Blob | undefined> {
|
||||||
|
return await get(url);
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate how many cards can fit on a page.
|
// Calculate how many cards can fit on a page.
|
||||||
export function calculateGrid(
|
export function calculateGrid(
|
||||||
pageWidth: number,
|
pageWidth: number,
|
||||||
|
|||||||
Reference in New Issue
Block a user