4 Commits

Author SHA1 Message Date
Roman Krček
e616165a77 Fix service worker mess 2025-09-03 10:38:30 +02:00
Roman Krček
238d2eebc5 Fix worker reloads 2025-09-03 10:22:59 +02:00
Roman Krček
aedf260551 Fixed problem where auth is bypassed 2025-09-03 10:17:20 +02:00
Roman Krček
f1179ddc09 Fix when people order multiple times 2025-09-03 08:34:22 +02:00
3 changed files with 92 additions and 78 deletions

View File

@@ -1,7 +1,7 @@
{ {
"name": "scan-wave", "name": "scan-wave",
"private": true, "private": true,
"version": "0.0.1", "version": "0.0.2",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite dev", "dev": "vite dev",

View File

@@ -138,35 +138,64 @@
if (rows.length === 0) throw new Error('No data found in sheet'); if (rows.length === 0) throw new Error('No data found in sheet');
// Extract participant data based on column mapping // --- Start of new logic to handle duplicates ---
const names: string[] = [];
const surnames: string[] = [];
const emails: string[] = [];
// Skip header row (start from index 1) // First, extract all potential participants from the sheet
const potentialParticipants = [];
for (let i = 1; i < rows.length; i++) { for (let i = 1; i < rows.length; i++) {
const row = rows[i]; const row = rows[i];
if (row.length > 0) { if (row.length > 0) {
const name = row[event.name_column - 1] || ''; const name = row[event.name_column - 1] || '';
const surname = row[event.surname_column - 1] || ''; const surname = row[event.surname_column - 1] || '';
const email = row[event.email_column - 1] || ''; const email = (row[event.email_column - 1] || '').trim();
const confirmation = row[event.confirmation_column - 1] || ''; const confirmation = row[event.confirmation_column - 1] || '';
// Only add if the row has meaningful data (not all empty) AND confirmation is TRUE
const isConfirmed = const isConfirmed =
confirmation.toString().toLowerCase() === 'true' || confirmation.toString().toLowerCase() === 'true' ||
confirmation.toString().toLowerCase() === 'yes' || confirmation.toString().toLowerCase() === 'yes' ||
confirmation === '1' || confirmation === '1' ||
confirmation === 'x'; confirmation === 'x';
if ((name.trim() || surname.trim() || email.trim()) && isConfirmed) { if ((name.trim() || surname.trim() || email) && isConfirmed) {
names.push(name.trim()); potentialParticipants.push({ name: name.trim(), surname: surname.trim(), email });
surnames.push(surname.trim());
emails.push(email.trim());
} }
} }
} }
// Create a map to count occurrences of each unique participant combination
const participantCounts = new Map<string, number>();
for (const p of potentialParticipants) {
const key = `${p.name}|${p.surname}|${p.email}`.toLowerCase(); // Create a unique key
participantCounts.set(key, (participantCounts.get(key) || 0) + 1);
}
// Create final arrays, modifying duplicate surnames to be unique
const names: string[] = [];
const surnames: string[] = [];
const emails: string[] = [];
const processedParticipants = new Map<string, number>();
for (const p of potentialParticipants) {
const key = `${p.name}|${p.surname}|${p.email}`.toLowerCase();
let finalSurname = p.surname;
// If this participant is a duplicate
if (participantCounts.get(key)! > 1) {
const count = (processedParticipants.get(key) || 0) + 1;
processedParticipants.set(key, count);
// If it's not the first occurrence, append a counter to the surname
if (count > 1) {
finalSurname = `${p.surname} (${count})`;
}
}
names.push(p.name);
surnames.push(finalSurname);
emails.push(p.email); // Keep the original email
}
// --- End of new logic ---
// Call database function to add participants // Call database function to add participants
const { error: syncError } = await data.supabase.rpc('participants_add_bulk', { const { error: syncError } = await data.supabase.rpc('participants_add_bulk', {
p_event: eventId, p_event: eventId,

View File

@@ -1,86 +1,71 @@
/// <reference lib="webworker" />
/// <reference types="@sveltejs/kit" /> /// <reference types="@sveltejs/kit" />
import { build, files, version } from '$service-worker'; import { build, files, version } from '$service-worker';
// Create a unique cache name for this deployment declare const self: ServiceWorkerGlobalScope;
const CACHE = `cache-${version}`;
const CACHE = `cache-${version}`;
const ASSETS = [ const ASSETS = [
...build, // the app itself ...build,
...files // everything in `static` ...files
]; ];
self.addEventListener('install', (event) => { self.addEventListener('install', (event: ExtendableEvent) => {
// Create a new cache and add all files to it const addFilesToCache = async () => {
async function addFilesToCache() { const cache = await caches.open(CACHE);
const cache = await caches.open(CACHE); await cache.addAll(ASSETS);
await cache.addAll(ASSETS); };
}
event.waitUntil(addFilesToCache()); console.log("[SW] Installing new service worker");
event.waitUntil(addFilesToCache());
self.skipWaiting();
}); });
self.addEventListener('activate', (event) => { self.addEventListener('activate', (event: ExtendableEvent) => {
// Remove previous cached data from disk const deleteOldCaches = async () => {
async function deleteOldCaches() { for (const key of await caches.keys()) {
for (const key of await caches.keys()) { if (key !== CACHE) await caches.delete(key);
if (key !== CACHE) await caches.delete(key); console.log("[SW] Removing old service worker")
} }
} };
event.waitUntil(deleteOldCaches()); event.waitUntil(deleteOldCaches());
self.clients.claim();
}); });
self.addEventListener('fetch', (event) => { self.addEventListener('fetch', (event: FetchEvent) => {
// ignore POST requests etc if (event.request.method !== 'GET') return;
if (event.request.method !== 'GET') return;
async function respond() { const url = new URL(event.request.url);
const url = new URL(event.request.url);
// Skip caching for auth routes // Never cache private routes
if (url.pathname.startsWith('/auth/')) { if (url.pathname.startsWith('/private')) {
return fetch(event.request); event.respondWith(fetch(event.request));
} return;
}
const cache = await caches.open(CACHE); const respond = async () => {
const cache = await caches.open(CACHE);
// `build`/`files` can always be served from the cache if (ASSETS.includes(url.pathname)) {
if (ASSETS.includes(url.pathname)) { const cached = await cache.match(url.pathname);
const response = await cache.match(url.pathname); if (cached) return cached;
}
if (response) { try {
return response; const response = await fetch(event.request);
} if (response.status === 200 && build.length > 0 && url.pathname.startsWith(`/${build[0]}/`)) {
} cache.put(event.request, response.clone());
}
return response;
} catch {
const cached = await cache.match(event.request);
if (cached) return cached;
}
// for everything else, try the network first, but return new Response('Not found', { status: 404 });
// fall back to the cache if we're offline };
try {
const response = await fetch(event.request);
// if we're offline, fetch can return a value that is not a Response event.respondWith(respond());
// instead of throwing - and we can't pass this non-Response to respondWith
if (!(response instanceof Response)) {
throw new Error('invalid response from fetch');
}
if (response.status === 200) {
cache.put(event.request, response.clone());
}
return response;
} catch (err) {
const response = await cache.match(event.request);
if (response) {
return response;
}
// if there's no cache, then just error out
// as there is nothing we can do to respond to this request
throw err;
}
}
event.respondWith(respond());
}); });