Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
f0c7d82
fix: increase memory limit for data upload functions to 512MiB
jodeleeuw Mar 30, 2026
3631a34
Merge pull request #144 from jspsych/fix/increase-function-memory
jodeleeuw Mar 30, 2026
8033e69
perf: skip metadata processing when metadata is not active
jodeleeuw Mar 30, 2026
77f1184
test: add emulator test for skipping metadata when inactive
jodeleeuw Mar 30, 2026
07996fe
ci: run test workflow on PRs against test branch
jodeleeuw Mar 30, 2026
639356b
ci: run test workflow on PRs against test branch
jodeleeuw Mar 30, 2026
cf8c957
Merge pull request #145 from jspsych/fix/skip-metadata-when-inactive
jodeleeuw Mar 30, 2026
081c0f6
feat: persist data to Cloud Storage before heavy processing to preven…
jodeleeuw Mar 30, 2026
6ea7665
feat: add scheduled recovery function for orphaned pending data
jodeleeuw Mar 30, 2026
de234a2
fix: add PUT handler to mock server and fix test assertions
jodeleeuw Mar 30, 2026
1c44709
fix: use separate port for early-persist mock server to avoid EADDRINUSE
jodeleeuw Mar 31, 2026
2d045f1
fix: increase waitForLog timeout to 30s for CI reliability
jodeleeuw Mar 31, 2026
dc2c36d
Simplify pending recovery to promote orphaned files into upload queue
jodeleeuw Mar 31, 2026
2c4b08a
Add user-friendly failure explanations to upload queue dashboard
jodeleeuw Mar 31, 2026
8a6771c
Redesign queue panel: hide pending, surface failed, add success state
jodeleeuw Mar 31, 2026
c6b7356
Show all queued uploads immediately with download access
jodeleeuw Mar 31, 2026
2873cae
Merge pull request #146 from jspsych/fix/early-persist-data-loss-prev…
jodeleeuw Mar 31, 2026
f67c812
fix: address code review findings from PR #147
jodeleeuw Mar 31, 2026
ea4274e
debug: add memory usage logging to apiData for OOM threshold testing
jodeleeuw Apr 1, 2026
058a1db
perf: set concurrency: 1 on data upload functions
jodeleeuw Apr 1, 2026
d412c79
chore: remove temporary memory logging from apiData
jodeleeuw Apr 1, 2026
23e0b54
fix: replace polling with direct read in data-emulator log tests
jodeleeuw Apr 1, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/node.js.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ name: Test

on:
pull_request:
branches: ["main"]
branches: ["main", "test"]
push:
branches: ["test"]

Expand Down
232 changes: 137 additions & 95 deletions components/dashboard/QueuePanel.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,29 @@ import {
import { Download } from "lucide-react";
import { auth } from "../../lib/firebase";

function friendlyReason(reason) {
if (!reason) return null;
if (reason.includes("interrupted upload") || reason.includes("memory limit")) {
return "Upload was interrupted by a server restart or memory limit.";
}
if (reason.includes("Upload exception") || reason.includes("fetch failed")) {
return "Could not connect to OSF.";
}
if (reason.includes("OSF error 503") || reason.includes("OSF error 502")) {
return "OSF was temporarily unavailable.";
}
if (reason.includes("OSF error 429")) {
return "OSF rate-limited the request.";
}
if (reason.includes("OSF error 401") || reason.includes("OSF error 403")) {
return "Authentication error. Your OSF token may need to be refreshed.";
}
return reason;
}

function statusBadge(status) {
const labels = {
pending: { color: "orange", text: "Waiting to retry" },
pending: { color: "orange", text: "Retrying" },
processing: { color: "blue", text: "Retrying now" },
failed: { color: "red", text: "Failed" },
};
Expand All @@ -27,6 +47,20 @@ function statusBadge(status) {
);
}

function nextRetryText(nextRetryAt) {
if (!nextRetryAt) return null;
const t = nextRetryAt.toDate ? nextRetryAt.toDate() : new Date(nextRetryAt);
const msUntil = t.getTime() - Date.now();
if (msUntil <= 0) return "soon";
const minUntil = Math.ceil(msUntil / (60 * 1000));
if (minUntil >= 60) {
const hours = Math.floor(minUntil / 60);
const mins = minUntil % 60;
return `in ${hours}h${mins > 0 ? ` ${mins}m` : ""}`;
}
return `in ${minUntil}m`;
}

function timeRemaining(createdAt) {
if (!createdAt) return null;
const created = createdAt.toDate ? createdAt.toDate() : new Date(createdAt);
Expand All @@ -36,9 +70,9 @@ function timeRemaining(createdAt) {
const hoursLeft = Math.floor(msLeft / (60 * 60 * 1000));
if (hoursLeft >= 24) {
const days = Math.floor(hoursLeft / 24);
return `${days}d ${hoursLeft % 24}h remaining`;
return `${days}d ${hoursLeft % 24}h`;
}
return `${hoursLeft}h remaining`;
return `${hoursLeft}h`;
}

async function fetchFile(experimentId, entryId) {
Expand All @@ -51,7 +85,12 @@ async function fetchFile(experimentId, entryId) {
);
}

export default function QueuePanel({ entries, experimentId, errorLog }) {
/**
* QueuePanel — shows all queued uploads (pending + failed) with immediate
* download access. Pending items are being retried automatically but the
* researcher can download them right away without waiting.
*/
export default function QueuePanel({ entries, experimentId }) {
const [downloading, setDownloading] = useState(null);
const [downloadingAll, setDownloadingAll] = useState(false);

Expand Down Expand Up @@ -113,44 +152,57 @@ export default function QueuePanel({ entries, experimentId, errorLog }) {

const plural = (n, word) => `${n} ${word}${n !== 1 ? "s" : ""}`;

let alertTitle = "";
if (pendingCount > 0 && failedCount > 0) {
alertTitle = `${plural(pendingCount, "file")} waiting to upload, ${plural(failedCount, "file")} failed.`;
} else if (pendingCount > 0) {
alertTitle = `${plural(pendingCount, "file")} waiting to upload to OSF.`;
} else {
let alertTitle;
let alertDescription;

if (allFailed) {
alertTitle = `${plural(failedCount, "file")} could not be uploaded to OSF.`;
alertDescription = "All retries were exhausted. Download these files and upload them to your OSF project manually to prevent data loss.";
} else if (failedCount > 0) {
alertTitle = `${plural(entries.length, "file")} did not upload to OSF.`;
alertDescription = `${plural(pendingCount, "file")} still being retried. ${plural(failedCount, "file")} failed permanently. You can download all files below.`;
} else {
alertTitle = `${plural(pendingCount, "file")} did not upload to OSF.`;
alertDescription = "DataPipe is retrying automatically. You can also download the files now.";
}

return (
<Alert.Root status={allFailed ? "error" : "warning"} variant="solid">
<Alert.Indicator />
<Box flex="1">
<Alert.Title mb={1}>{alertTitle}</Alert.Title>
<Text fontSize="sm" mb={4}>
{allFailed
? "These files could not be delivered after multiple attempts. Download them to avoid data loss."
: "DataPipe will keep retrying automatically. Files are stored for up to 1 week. You can also download them below."}
</Text>
<Text fontSize="sm" mb={4}>{alertDescription}</Text>
<Accordion.Root collapsible size="sm" mb={4}>
<Accordion.Item value="why">
<Accordion.ItemTrigger>
<Box as="span" flex="1" textAlign="left" fontSize="sm">
Why am I seeing this?
Why did these uploads fail?
</Box>
<Accordion.ItemIndicator />
</Accordion.ItemTrigger>
<Accordion.ItemContent>
<Text fontSize="sm" pb={3}>
When a participant submits data, DataPipe tries to upload it to
your OSF project immediately. If that transfer fails — for
example, because OSF is temporarily unavailable, rate-limiting
requests, or there is a configuration issue with your project —
DataPipe saves a copy of the data and retries automatically over
the next several days. The files listed here are those saved
copies. Once a retry succeeds the file will disappear from this
list. If all retries are exhausted, you can still download the
data and upload it to OSF manually.
your OSF project immediately. If that fails, DataPipe saves a
copy and retries automatically. Common reasons include:
</Text>
<Box as="ul" fontSize="sm" pl={5} pb={3} listStyleType="disc">
<Box as="li" mb={1}>
<strong>Server memory limit</strong> — Large data submissions
can occasionally exceed the server&apos;s memory capacity.
</Box>
<Box as="li" mb={1}>
<strong>OSF unavailable</strong> — OSF may be temporarily
down or rate-limiting requests.
</Box>
<Box as="li" mb={1}>
<strong>Configuration issue</strong> — There may be a problem
with your OSF project settings or authentication token.
</Box>
</Box>
<Text fontSize="sm" pb={3}>
Files are stored for up to 7 days. If retries don&apos;t succeed,
download the files and upload them to OSF manually.
</Text>
</Accordion.ItemContent>
</Accordion.Item>
Expand All @@ -167,78 +219,68 @@ export default function QueuePanel({ entries, experimentId, errorLog }) {
Download all as ZIP
</Button>
</HStack>
<Accordion.Root collapsible defaultValue={allFailed ? ["queue-list"] : []}>
<Accordion.Item value="queue-list">
<Accordion.ItemTrigger>
<Box as="span" flex="1" textAlign="left">
View file details
</Box>
<Accordion.ItemIndicator />
</Accordion.ItemTrigger>
<Accordion.ItemContent pb={4}>
<Table.Root variant="line" size="sm">
<Table.Header>
<Table.Row>
<Table.ColumnHeader>FILENAME</Table.ColumnHeader>
<Table.ColumnHeader>STATUS</Table.ColumnHeader>
<Table.ColumnHeader>EXPIRES</Table.ColumnHeader>
<Table.ColumnHeader>ATTEMPTS</Table.ColumnHeader>
<Table.ColumnHeader>DOWNLOAD</Table.ColumnHeader>
</Table.Row>
</Table.Header>
<Table.Body>
{entries.map((entry) => (
<Table.Row key={entry.id}>
<Table.Cell>
{entry.filename}
{entry.failureReason && (
<Text fontSize="xs" color="red.300" mt={1}>
{entry.failureReason}
</Text>
)}
</Table.Cell>
<Table.Cell>{statusBadge(entry.status)}</Table.Cell>
<Table.Cell>{timeRemaining(entry.createdAt)}</Table.Cell>
<Table.Cell>
{entry.retryCount}/{entry.maxRetries}
</Table.Cell>
<Table.Cell>
<IconButton
aria-label="Download file"
size="xs"
variant="ghost"
loading={downloading === entry.id}
onClick={() => handleDownload(entry)}
>
<Download size={14} />
</IconButton>
</Table.Cell>
</Table.Row>
))}
{errorLog && errorLog.map((error, index) => (
<Table.Row key={`error-${index}`}>
<Table.Cell>
<Text>{error.error}</Text>
<Text fontSize="xs" color="red.300" mt={1}>
{error.time}
</Text>
</Table.Cell>
<Table.Cell>
<Badge colorPalette="red" variant="solid" px={2}>
Error
</Badge>
</Table.Cell>
<Table.Cell>-</Table.Cell>
<Table.Cell>-</Table.Cell>
<Table.Cell>-</Table.Cell>
</Table.Row>
))}
</Table.Body>
</Table.Root>
</Accordion.ItemContent>
</Accordion.Item>
</Accordion.Root>
<Table.Root variant="line" size="sm">
<Table.Header>
<Table.Row>
<Table.ColumnHeader>FILENAME</Table.ColumnHeader>
<Table.ColumnHeader>STATUS</Table.ColumnHeader>
<Table.ColumnHeader>REASON</Table.ColumnHeader>
<Table.ColumnHeader>STORED FOR</Table.ColumnHeader>
<Table.ColumnHeader></Table.ColumnHeader>
</Table.Row>
</Table.Header>
<Table.Body>
{entries.map((entry) => (
<Table.Row key={entry.id}>
<Table.Cell>{entry.filename}</Table.Cell>
<Table.Cell>
{statusBadge(entry.status)}
{(entry.status === "pending" || entry.status === "processing") &&
entry.nextRetryAt && (
<Text fontSize="xs" color="gray.400" mt={1}>
Next retry {nextRetryText(entry.nextRetryAt)}
</Text>
)}
</Table.Cell>
<Table.Cell>
<Text fontSize="xs">
{friendlyReason(entry.failureReason) || "\u2014"}
</Text>
</Table.Cell>
<Table.Cell>
<Text fontSize="xs">
{timeRemaining(entry.createdAt) || "\u2014"}
</Text>
</Table.Cell>
<Table.Cell>
<IconButton
aria-label={`Download ${entry.filename}`}
size="xs"
variant="ghost"
loading={downloading === entry.id}
onClick={() => handleDownload(entry)}
>
<Download size={14} />
</IconButton>
</Table.Cell>
</Table.Row>
))}
</Table.Body>
</Table.Root>
</Box>
</Alert.Root>
);
}

/**
* UploadsResolvedNotice — brief success confirmation shown when
* previously pending/failed uploads have all been resolved.
*/
export function UploadsResolvedNotice() {
return (
<Alert.Root status="success" variant="subtle" size="sm">
<Alert.Indicator />
<Alert.Title fontSize="sm">All queued uploads completed successfully.</Alert.Title>
</Alert.Root>
);
}
4 changes: 4 additions & 0 deletions firebase.json
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@
"port": 5000,
"host": "localhost"
},
"storage": {
"port": 9199,
"host": "localhost"
},
"ui": {
"enabled": true
}
Expand Down
28 changes: 13 additions & 15 deletions functions/src/__tests__/data-emulator.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,6 @@ const config = {

jest.setTimeout(30000);

async function waitForLog(db, docId, field, expectedValue, timeoutMs = 10000) {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
const doc = await db.collection("logs").doc(docId).get();
if (doc.exists && doc.data()?.[field] === expectedValue) {
return doc;
}
await new Promise((resolve) => setTimeout(resolve, 250));
}
return db.collection("logs").doc(docId).get();
}

beforeAll(async () => {
initializeApp(config);
Expand Down Expand Up @@ -78,20 +67,26 @@ describe("apiData", () => {
it("should increment the write request log for the experiment when there is a complete request", async () => {
const db = getFirestore();
await db.collection("logs").doc("testlog").delete();
// writeLog is awaited inside apiData before the response is sent,
// so the log document should exist by the time we get the response.
await saveData({
experimentID: "testlog",
data: "test",
filename: "test",
});
let doc = await waitForLog(db, "testlog", "saveData", 1);
// Small delay to allow Firestore emulator to sync
await new Promise((resolve) => setTimeout(resolve, 500));
let doc = await db.collection("logs").doc("testlog").get();
expect(doc.exists).toBe(true);
expect(doc.data().saveData).toBe(1);

await saveData({
experimentID: "testlog",
data: "test",
filename: "test",
});
doc = await waitForLog(db, "testlog", "saveData", 2);
await new Promise((resolve) => setTimeout(resolve, 500));
doc = await db.collection("logs").doc("testlog").get();
expect(doc.data().saveData).toBe(2);
});

Expand All @@ -106,7 +101,9 @@ describe("apiData", () => {
filename: "test",
});

let doc = await waitForLog(db, "data-testexp", "logError", 1);
await new Promise((resolve) => setTimeout(resolve, 500));
let doc = await db.collection("logs").doc("data-testexp").get();
expect(doc.exists).toBe(true);
expect(doc.data().logError).toBe(1);

await db.collection("experiments").doc("data-testexp").set(
Expand All @@ -124,7 +121,8 @@ describe("apiData", () => {
filename: "test",
});

doc = await waitForLog(db, "data-testexp", "logError", 2);
await new Promise((resolve) => setTimeout(resolve, 500));
doc = await db.collection("logs").doc("data-testexp").get();
expect(doc.data().logError).toBe(2);

});
Expand Down
Loading