Skip to content

Commit

Permalink
fix: refactor Tus parallel logic (#795)
Browse files Browse the repository at this point in the history
* fix: misc for tus-sender parallel improvements wip
* fix: chunk-sender respect parallel value while including in-progress
* finish all tus e2e tests
* fix: finalize tus parallel refactor
* fix: add logs to bundle size report
* fix: incorrect calc of diff in bundle report

fixes #777
  • Loading branch information
yoavniran authored Dec 8, 2024
1 parent a1c6063 commit 3eb997e
Show file tree
Hide file tree
Showing 57 changed files with 2,580 additions and 1,657 deletions.
20 changes: 15 additions & 5 deletions .github/actions/bundle-size-report/index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ const saveUpdatedMasterData = (data, masterData, core) => {
} else {
core.info("not saving updated master bundle size report - no change found");
}
} else {
core.info("skipping saving updated master bundle size report because we're not on MASTER");
}
};

Expand All @@ -71,15 +73,21 @@ const getBundleSizeReportMasterData = (core) => {
return JSON.parse(str);
};

const getWithPreviousBundleSizeReport = async (data, masterData, core) => {
const getWithPreviousBundleSizeReport = (data, masterData, core) => {
let updatedData = data;

if (!BRANCH.includes("master")) {
updatedData = data.map((row) => {
const masterRow = masterData.find((mr) => mr.name === row.name);

const diff = masterRow ?
(parseFileSize(row.size) - parseFileSize(masterRow.size)) : "N/A";
(Math.round(parseFileSize(row.size) - parseFileSize(masterRow.size))) : "N/A";

if (diff && diff !== "N/A") {
core.info(`bundle size diff for '${row.name}': ${diff}`);
} else {
core.info(`no previous bundle size data found for '${row.name}'`);
}

const trend = masterRow ?
(diff > 0 ? "🔺" : (diff < 0 ? "⬇" : "=")) : "N/A";
Expand Down Expand Up @@ -128,6 +136,8 @@ const getReportValue = (key, val) => {
return val === true ? "🟢" : "💥"
case "max":
return filesize(val, { standard: "jedec", spacer: "" });
case "diff":
return filesize(val, { standard: "jedec", spacer: "" });
default:
return `${val}`;
}
Expand All @@ -154,12 +164,12 @@ export default async ({ core }) => {
core.info("processing bundle size report...");

const dataStr = process.env.BUNDLE_SIZE_REPORT;
core.debug("got bundle size data input: " + dataStr);
core.info("got bundle size data input: " + dataStr);
const data = JSON.parse(dataStr);

const masterData = await getBundleSizeReportMasterData(core);
const masterData = getBundleSizeReportMasterData(core);

const dataWithMasterCompare = await getWithPreviousBundleSizeReport(data, masterData, core);
const dataWithMasterCompare = getWithPreviousBundleSizeReport(data, masterData, core);
core.debug("bundle size data with compare: " + dataWithMasterCompare);
core.setOutput("BUNDLE_SIZE_REPORT_WITH_COMPARE", JSON.stringify(dataWithMasterCompare));

Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,4 @@ cypress/test-bundle
.nx/
/cypress/downloads
/test.mjs
test/test-bundle-size.mjs
2 changes: 1 addition & 1 deletion bundle.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ export default {
},
};
},
maxSize: 24000,
maxSize: 25000,
},

//TODO: find a way to make this work with global object assignment (wepackages/tus-sender/src/tusSender/initTusUpload/createUpload.js:88:94bpack externals root)
Expand Down
9 changes: 6 additions & 3 deletions cypress/integration/intercept.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,17 @@ const createResponse = (options = {}) => ({
export const interceptWithHandler = (handler, alias = "uploadReq", url = UPLOAD_URL, method = DEFAULT_METHOD) =>
intercept(url, method, handler, alias);

export const interceptWithDelay = (delay = 100, alias = "uploadReq", url = UPLOAD_URL, method = DEFAULT_METHOD, resOptions = {}) =>
interceptWithHandler((req) => {
export const interceptWithDelay = (delay = 100, alias = "uploadReq", url = UPLOAD_URL, method = DEFAULT_METHOD, resOptions = {}) => {
const getResOptions = (typeof resOptions === "function") ? resOptions : () => resOptions;

return interceptWithHandler((req) => {
req.reply({
...RESPONSE_DEFAULTS,
...resOptions,
...getResOptions(req),
delay,
});
}, alias, url, method);
};

const intercept = (url = UPLOAD_URL, method = DEFAULT_METHOD, resOptions, alias = "uploadReq") => {
const handler = (typeof resOptions === "function") ? resOptions : createResponse(resOptions);
Expand Down
1 change: 1 addition & 0 deletions cypress/integration/selectFiles.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ const selectFiles = (fixtureName, triggerSelector, alias, cb, {
const usedFileName = fileName || fixtureName;

cy.fixture(fixtureName, { encoding: null })
.as(usedFileName)
.then((contents) => {
const files = new Array(times)
.fill(null)
Expand Down
210 changes: 135 additions & 75 deletions cypress/integration/tus-uploady/TusUploady-parallel-spec.js
Original file line number Diff line number Diff line change
@@ -1,97 +1,157 @@
import uploadFile from "../uploadFile";
import intercept from "../intercept";
import { createTusIntercepts, parallelFinalUrl, uploadUrl } from "./tusIntercept";
import clearTusPersistStorage from "./clearTusPersistStorage";
import runParallelUpload from "./runParallerlUpload";

describe("TusUploady - Parallel", () => {
const fileName = "flower.jpg";

before(() => {
beforeEach(() => {
cy.visitStory(
"tusUploady",
"with-tus-concatenation",
{
useMock: false,
uploadUrl: "http://test.tus.com/upload",
chunkSize: 200000,
uploadUrl,
chunkSize: 200_000,
tusResumeStorage: true,
uploadParams: { foo: "bar" },
}
);

clearTusPersistStorage();
});

it("should upload chunks using tus protocol in parallel", () => {
let reqCount = 0;
const createUrls = ["123", "456", "final"];
it("should upload chunks using tus protocol in parallel, chunk size larger than part size", () => {
const parallel = 2;
const {
assertCreateRequest,
assertPatchRequest,
assertPatchRequestTimes,
assertParallelFinalRequest,
} = createTusIntercepts({ parallel });

runParallelUpload(fileName, parallel, (fileSize, createSize, startFinishEvents) => {
assertCreateRequest(createSize + 1);
assertCreateRequest(createSize);

assertPatchRequest(createSize + 1, 0);
assertPatchRequest(createSize, 1);

assertPatchRequestTimes(0, 1);
assertPatchRequestTimes(1, 1);

assertParallelFinalRequest(({ request }) => {
expect(request.headers["upload-metadata"])
.to.eq("foo YmFy");
expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq(parallelFinalUrl);
});
});
});

it("should upload chunks using tus protocol in parallel, chunk size smaller than part size", () => {
const chunkSize = 50_000;
const parallel = 3;

const {
assertCreateRequest,
assertPatchRequest,
assertPatchRequestTimes,
assertParallelFinalRequest
} = createTusIntercepts({ parallel });

cy.setUploadOptions({ chunkSize, parallel });

runParallelUpload(fileName, parallel, (fileSize, createSize, startFinishEvents) => {
assertCreateRequest(createSize + 1);
assertCreateRequest(createSize + 1);
assertCreateRequest(createSize - 1);

assertPatchRequest(chunkSize, 0);
assertPatchRequest(chunkSize, 1);
assertPatchRequest(chunkSize, 2);

assertPatchRequestTimes(0, 3);
assertPatchRequestTimes(1, 3);
assertPatchRequestTimes(2, 3);

assertParallelFinalRequest(({ request }) => {
expect(request.headers["upload-metadata"])
.to.eq("foo YmFy");
expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq(parallelFinalUrl);
});
});
});

cy.intercept("POST", "http://test.tus.com/upload", (req) => {
req.reply(200, { success: true }, {
"Location": `http://test.tus.com/upload/${createUrls[reqCount]}`,
"Tus-Resumable": "1.0.0",
it("should upload chunks using tus protocol in parallel, with resume on the whole file", () => {
const parallel = 2;

const {
getPartUrls,
addResumeForFinal,
assertPatchRequestTimes,
assertCreateRequestTimes,
assertLastCreateRequest,
assertResumeRequest,
} = createTusIntercepts({ parallel });

const runAgain = runParallelUpload(fileName, parallel, (fileSize, createSize, startFinishEvents) => {
assertLastCreateRequest(({ request }) => {
expect(request.headers["upload-concat"])
.to.eq(`final;${getPartUrls().join(" ")}`);
expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq(parallelFinalUrl);
});

reqCount += 1;
}).as("createReq");

intercept("http://test.tus.com/upload/123", "PATCH", {
headers: {
"Tus-Resumable": "1.0.0",
"Upload-Offset": "200000"
},
}, "patchReq1");

intercept("http://test.tus.com/upload/456", "PATCH", {
headers: {
"Tus-Resumable": "1.0.0",
"Upload-Offset": "175000"
},
}, "patchReq2");

cy.get("input")
.should("exist")
.as("fInput");

uploadFile(fileName, () => {
cy.waitMedium();

cy.storyLog()
.assertFileItemStartFinish(fileName, 1)
.then((startFinishEvents) => {
cy.wait("@createReq")
.then((xhr) => {
expect(xhr.request.headers["upload-length"]).to.eq("200000");
expect(xhr.request.headers["upload-concat"]).to.eq("partial");
});

cy.wait("@createReq")
.then((xhr) => {
expect(xhr.request.headers["upload-length"]).to.eq("172445");
expect(xhr.request.headers["upload-concat"]).to.eq("partial");
});

cy.wait("@patchReq1")
.then(({ request }) => {
const { headers } = request;
expect(headers["content-length"]).to.eq("200000");
expect(headers["content-type"]).to.eq("application/offset+octet-stream");
});

cy.wait("@patchReq2")
.then(({ request }) => {
const { headers } = request;
expect(headers["content-length"]).to.eq("172445");
expect(headers["content-type"]).to.eq("application/offset+octet-stream");
});

cy.wait("@createReq")
.then((xhr) => {
expect(xhr.request.headers["upload-metadata"])
.to.eq("foo YmFy");

expect(xhr.request.headers["upload-concat"])
.to.eq("final;http://test.tus.com/upload/123 http://test.tus.com/upload/456");

expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq("http://test.tus.com/upload/final");
});
addResumeForFinal(fileSize);

runAgain((fileSize, createSize, startFinishEvents) => {
expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq(parallelFinalUrl);

assertPatchRequestTimes(0, 1, "should only have patch req for the first upload, not for resume");
assertPatchRequestTimes(1, 1, "should only have patch req for the first upload, not for resume");
assertCreateRequestTimes(3, "should only have 3 requests (create, final, resume)");

assertResumeRequest(({ request }) => {
console.log(" RESUME REQUEST ", request);
expect(request.method).to.eq("HEAD");
});
});
});
});

it("should upload chunks using tus protocol in parallel, with resume on one of the parts", () => {
const parallel = 2;

const {
getPartUrls,
addResumeForFinal,
addResumeForParts,
assertPatchRequestTimes,
assertCreateRequestTimes,
assertLastCreateRequest,
assertResumeForParts,
} = createTusIntercepts({ parallel });

const runAgain = runParallelUpload(fileName, parallel, (fileSize, createSize, startFinishEvents) => {
assertLastCreateRequest(({ request }) => {
expect(request.headers["upload-concat"])
.to.eq(`final;${getPartUrls().join(" ")}`);
expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq(parallelFinalUrl);
});

//resume on the file fails so can test resume on parts
addResumeForFinal(-1);
addResumeForParts();

runAgain((fileSize, createSize, startFinishEvents) => {
expect(startFinishEvents.finish.args[1].uploadResponse.location).to.eq(parallelFinalUrl);

assertCreateRequestTimes(4, "should have 4 requests (create, final, resume, final)");

assertPatchRequestTimes(0, 1, "should only have patch req for the first upload, not for resume");
assertPatchRequestTimes(1, 1, "should only have patch req for the first upload, not for resume");

assertResumeForParts();
});
});
});
});
Loading

0 comments on commit 3eb997e

Please sign in to comment.