From 871bc2c3f6fe95d48bc5a6e39e26f5760e0e4904 Mon Sep 17 00:00:00 2001 From: Youcef Mammar Date: Thu, 23 Aug 2018 07:23:13 +0200 Subject: [PATCH 1/2] GSC Datastore: Add support for contentType property close #57 --- lib/stores/DataStore.js | 21 +++++++++++++++++++ lib/stores/GCSDataStore.js | 4 ++++ lib/stores/S3Store.js | 21 ------------------- test/Test-DataStore.js | 41 +++++++++++++++++++++++++++++++++----- 4 files changed, 61 insertions(+), 26 deletions(-) diff --git a/lib/stores/DataStore.js b/lib/stores/DataStore.js index 834c336c..7119f62a 100644 --- a/lib/stores/DataStore.js +++ b/lib/stores/DataStore.js @@ -107,6 +107,27 @@ class DataStore extends EventEmitter { return resolve({ size: 0, upload_length: 1 }); }); } + + /** + * Parses the Base64 encoded metadata received from the client. + * + * @param {String} metadata_string tus' standard upload metadata + * @return {Object} metadata as key-value pair + */ + _parseMetadataString(metadata_string) { + const kv_pair_list = metadata_string.split(','); + + return kv_pair_list.reduce((metadata, kv_pair) => { + const [key, base64_value] = kv_pair.split(' '); + + metadata[key] = { + encoded: base64_value, + decoded: Buffer.from(base64_value, 'base64').toString('ascii'), + }; + + return metadata; + }, {}); + } } module.exports = DataStore; diff --git a/lib/stores/GCSDataStore.js b/lib/stores/GCSDataStore.js index 7265d429..6008221b 100644 --- a/lib/stores/GCSDataStore.js +++ b/lib/stores/GCSDataStore.js @@ -92,7 +92,9 @@ class GCSDataStore extends DataStore { const file = new File(file_id, upload_length, upload_defer_length, upload_metadata); const gcs_file = this.bucket.file(file.id); + const parsedMetadata = this._parseMetadataString(upload_metadata); const options = { + contentType: parsedMetadata.type.decoded, metadata: { metadata: { upload_length: file.upload_length, @@ -131,9 +133,11 @@ class GCSDataStore extends DataStore { return new Promise((resolve, reject) => { const file = this.bucket.file(file_id); + const parsedMetadata = this._parseMetadataString(data.upload_metadata); const options = { offset, metadata: { + contentType: parsedMetadata.type.decoded, metadata: { upload_length: data.upload_length, tus_version: TUS_RESUMABLE, diff --git a/lib/stores/S3Store.js b/lib/stores/S3Store.js index e2c268c2..9dfc1668 100644 --- a/lib/stores/S3Store.js +++ b/lib/stores/S3Store.js @@ -233,27 +233,6 @@ class S3Store extends DataStore { }); } - /** - * Parses the Base64 encoded metadata received from the client. - * - * @param {String} metadata_string tus' standard upload metadata - * @return {Object} metadata as key-value pair - */ - _parseMetadataString(metadata_string) { - const kv_pair_list = metadata_string.split(','); - - return kv_pair_list.reduce((metadata, kv_pair) => { - const [key, base64_value] = kv_pair.split(' '); - - metadata[key] = { - encoded: base64_value, - decoded: Buffer.from(base64_value, 'base64').toString('ascii'), - }; - - return metadata; - }, {}); - } - /** * Uploads a part/chunk to S3 from a temporary part file. * diff --git a/test/Test-DataStore.js b/test/Test-DataStore.js index d139ffcf..a376faad 100644 --- a/test/Test-DataStore.js +++ b/test/Test-DataStore.js @@ -41,10 +41,16 @@ describe('DataStore', () => { done(); }); - it('must have a create method', (done) => { + it('must have a create method', () => { datastore.should.have.property('create'); - datastore.create(); - done(); + const req = { + headers: { + 'upload-length': 42, + 'upload-defer-length': 0, + 'upload-metadata': 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY=' + } + }; + return datastore.create(req); }); it('must have a write method', (done) => { @@ -53,9 +59,34 @@ describe('DataStore', () => { done(); }); - it('must have a getOffset method', (done) => { + it('must have a getOffset method', () => { datastore.should.have.property('getOffset'); - datastore.getOffset(); + const id = 42; + return datastore.getOffset(id); + }); + + it('must have a _parseMetadataString method', (done) => { + datastore.should.have.property('_parseMetadataString'); + const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY=' + const parsed = datastore._parseMetadataString(uploadMetadata); + parsed.should.deepEqual({ + "filename": { + "decoded": "my-file.pdf", + "encoded": "bXktZmlsZS5wZGY=" + }, + "filetype": { + "decoded": "application/pdf", + "encoded": "YXBwbGljYXRpb24vcGRm" + }, + "name": { + "decoded": "my-file.pdf", + "encoded": "bXktZmlsZS5wZGY=" + }, + "type": { + "decoded": "application/pdf", + "encoded": "YXBwbGljYXRpb24vcGRm" + } + }) done(); }); }); From 595514701755ca624519d70650cc7490c7ee4faf Mon Sep 17 00:00:00 2001 From: Youcef Mammar Date: Sun, 7 Oct 2018 19:18:31 +0200 Subject: [PATCH 2/2] various fixes - using filetype instead of type - added test - handled case when metadata header is undefined --- lib/stores/DataStore.js | 7 +++++-- lib/stores/GCSDataStore.js | 4 ++-- test/Test-DataStore.js | 4 +++- test/Test-GCSDataStore.js | 7 +++++-- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/lib/stores/DataStore.js b/lib/stores/DataStore.js index 7119f62a..885ff7a7 100644 --- a/lib/stores/DataStore.js +++ b/lib/stores/DataStore.js @@ -81,7 +81,7 @@ class DataStore extends EventEmitter { */ write(req) { log('[DataStore] write'); - return new Promise((resolve, reject) => { + return new Promise((resolve) => { // Stub resolve for tests const offset = 0; @@ -92,7 +92,7 @@ class DataStore extends EventEmitter { /** * Called in HEAD requests. This method should return the bytes - * writen to the DataStore, for the client to know where to resume + * written to the DataStore, for the client to know where to resume * the upload. * * @param {string} id filename @@ -115,6 +115,9 @@ class DataStore extends EventEmitter { * @return {Object} metadata as key-value pair */ _parseMetadataString(metadata_string) { + if (!metadata_string || !metadata_string.length) { + return {}; + } const kv_pair_list = metadata_string.split(','); return kv_pair_list.reduce((metadata, kv_pair) => { diff --git a/lib/stores/GCSDataStore.js b/lib/stores/GCSDataStore.js index 6008221b..5ac3f943 100644 --- a/lib/stores/GCSDataStore.js +++ b/lib/stores/GCSDataStore.js @@ -94,7 +94,7 @@ class GCSDataStore extends DataStore { const gcs_file = this.bucket.file(file.id); const parsedMetadata = this._parseMetadataString(upload_metadata); const options = { - contentType: parsedMetadata.type.decoded, + contentType: parsedMetadata.filetype && parsedMetadata.filetype.decoded, metadata: { metadata: { upload_length: file.upload_length, @@ -137,7 +137,7 @@ class GCSDataStore extends DataStore { const options = { offset, metadata: { - contentType: parsedMetadata.type.decoded, + contentType: parsedMetadata.filetype && parsedMetadata.filetype.decoded, metadata: { upload_length: data.upload_length, tus_version: TUS_RESUMABLE, diff --git a/test/Test-DataStore.js b/test/Test-DataStore.js index a376faad..3031fa46 100644 --- a/test/Test-DataStore.js +++ b/test/Test-DataStore.js @@ -68,7 +68,7 @@ describe('DataStore', () => { it('must have a _parseMetadataString method', (done) => { datastore.should.have.property('_parseMetadataString'); const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY=' - const parsed = datastore._parseMetadataString(uploadMetadata); + let parsed = datastore._parseMetadataString(uploadMetadata); parsed.should.deepEqual({ "filename": { "decoded": "my-file.pdf", @@ -87,6 +87,8 @@ describe('DataStore', () => { "encoded": "YXBwbGljYXRpb24vcGRm" } }) + parsed = datastore._parseMetadataString(null); + parsed.should.deepEqual({}) done(); }); }); diff --git a/test/Test-GCSDataStore.js b/test/Test-GCSDataStore.js index 9e85d05b..2b4ef34b 100644 --- a/test/Test-GCSDataStore.js +++ b/test/Test-GCSDataStore.js @@ -128,9 +128,11 @@ describe('GCSDataStore', () => { }); it('should create a file', (done) => { + const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY=' const req = { headers: { 'upload-length': TEST_FILE_SIZE, + 'upload-metadata': uploadMetadata, }, }; server.datastore.create(req) @@ -144,6 +146,7 @@ describe('GCSDataStore', () => { it(`should fire the ${EVENTS.EVENT_FILE_CREATED} event`, (done) => { + const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY=' server.datastore.on(EVENTS.EVENT_FILE_CREATED, (event) => { event.should.have.property('file'); assert.equal(event.file instanceof File, true); @@ -153,6 +156,7 @@ describe('GCSDataStore', () => { const req = { headers: { 'upload-length': TEST_FILE_SIZE, + 'upload-metadata': uploadMetadata, }, }; server.datastore.create(req) @@ -199,8 +203,7 @@ describe('GCSDataStore', () => { // TODO: upload this file to the bucket first return server.datastore.getOffset(FILE_ALREADY_IN_BUCKET) .should.be.fulfilledWith({ - size: TEST_FILE_SIZE, - upload_length: TEST_FILE_SIZE, + size: TEST_FILE_SIZE }); }); });