Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GSC Datastore: Add support for contentType property #130

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 26 additions & 2 deletions lib/stores/DataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class DataStore extends EventEmitter {
*/
write(req) {
log('[DataStore] write');
return new Promise((resolve, reject) => {
return new Promise((resolve) => {
// Stub resolve for tests
const offset = 0;

Expand All @@ -92,7 +92,7 @@ class DataStore extends EventEmitter {

/**
* Called in HEAD requests. This method should return the bytes
* writen to the DataStore, for the client to know where to resume
* written to the DataStore, for the client to know where to resume
* the upload.
*
* @param {string} id filename
Expand All @@ -107,6 +107,30 @@ class DataStore extends EventEmitter {
return resolve({ size: 0, upload_length: 1 });
});
}

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I moved this method here where it can be leveraged by any datastore (not just S3)

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

/**
* Parses the Base64 encoded metadata received from the client.
*
* @param {String} metadata_string tus' standard upload metadata
* @return {Object} metadata as key-value pair
*/
_parseMetadataString(metadata_string) {
if (!metadata_string || !metadata_string.length) {
return {};
}
const kv_pair_list = metadata_string.split(',');

return kv_pair_list.reduce((metadata, kv_pair) => {
const [key, base64_value] = kv_pair.split(' ');

metadata[key] = {
encoded: base64_value,
decoded: Buffer.from(base64_value, 'base64').toString('ascii'),
};

return metadata;
}, {});
}
}

module.exports = DataStore;
4 changes: 4 additions & 0 deletions lib/stores/GCSDataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,9 @@ class GCSDataStore extends DataStore {

const file = new File(file_id, upload_length, upload_defer_length, upload_metadata);
const gcs_file = this.bucket.file(file.id);
const parsedMetadata = this._parseMetadataString(upload_metadata);
const options = {
contentType: parsedMetadata.filetype && parsedMetadata.filetype.decoded,
metadata: {
metadata: {
upload_length: file.upload_length,
Expand Down Expand Up @@ -131,9 +133,11 @@ class GCSDataStore extends DataStore {
return new Promise((resolve, reject) => {
const file = this.bucket.file(file_id);

const parsedMetadata = this._parseMetadataString(data.upload_metadata);
const options = {
offset,
metadata: {
contentType: parsedMetadata.filetype && parsedMetadata.filetype.decoded,
metadata: {
upload_length: data.upload_length,
tus_version: TUS_RESUMABLE,
Expand Down
21 changes: 0 additions & 21 deletions lib/stores/S3Store.js
Original file line number Diff line number Diff line change
Expand Up @@ -233,27 +233,6 @@ class S3Store extends DataStore {
});
}

/**
* Parses the Base64 encoded metadata received from the client.
*
* @param {String} metadata_string tus' standard upload metadata
* @return {Object} metadata as key-value pair
*/
_parseMetadataString(metadata_string) {
const kv_pair_list = metadata_string.split(',');

return kv_pair_list.reduce((metadata, kv_pair) => {
const [key, base64_value] = kv_pair.split(' ');

metadata[key] = {
encoded: base64_value,
decoded: Buffer.from(base64_value, 'base64').toString('ascii'),
};

return metadata;
}, {});
}

/**
* Uploads a part/chunk to S3 from a temporary part file.
*
Expand Down
43 changes: 38 additions & 5 deletions test/Test-DataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,16 @@ describe('DataStore', () => {
done();
});

it('must have a create method', (done) => {
it('must have a create method', () => {
datastore.should.have.property('create');
datastore.create();
done();
const req = {
headers: {
'upload-length': 42,
'upload-defer-length': 0,
'upload-metadata': 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
}
};
return datastore.create(req);
});

it('must have a write method', (done) => {
Expand All @@ -53,9 +59,36 @@ describe('DataStore', () => {
done();
});

it('must have a getOffset method', (done) => {
it('must have a getOffset method', () => {
datastore.should.have.property('getOffset');
datastore.getOffset();
const id = 42;
return datastore.getOffset(id);
});

it('must have a _parseMetadataString method', (done) => {
datastore.should.have.property('_parseMetadataString');
const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
let parsed = datastore._parseMetadataString(uploadMetadata);
parsed.should.deepEqual({
"filename": {
"decoded": "my-file.pdf",
"encoded": "bXktZmlsZS5wZGY="
},
"filetype": {
"decoded": "application/pdf",
"encoded": "YXBwbGljYXRpb24vcGRm"
},
"name": {
"decoded": "my-file.pdf",
"encoded": "bXktZmlsZS5wZGY="
},
"type": {
"decoded": "application/pdf",
"encoded": "YXBwbGljYXRpb24vcGRm"
}
})
parsed = datastore._parseMetadataString(null);
parsed.should.deepEqual({})
done();
});
});
7 changes: 5 additions & 2 deletions test/Test-GCSDataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,11 @@ describe('GCSDataStore', () => {
});

it('should create a file', (done) => {
const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
'upload-metadata': uploadMetadata,
},
};
server.datastore.create(req)
Expand All @@ -144,6 +146,7 @@ describe('GCSDataStore', () => {


it(`should fire the ${EVENTS.EVENT_FILE_CREATED} event`, (done) => {
const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
server.datastore.on(EVENTS.EVENT_FILE_CREATED, (event) => {
event.should.have.property('file');
assert.equal(event.file instanceof File, true);
Expand All @@ -153,6 +156,7 @@ describe('GCSDataStore', () => {
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
'upload-metadata': uploadMetadata,
},
};
server.datastore.create(req)
Expand Down Expand Up @@ -199,8 +203,7 @@ describe('GCSDataStore', () => {
// TODO: upload this file to the bucket first
return server.datastore.getOffset(FILE_ALREADY_IN_BUCKET)
.should.be.fulfilledWith({
size: TEST_FILE_SIZE,
upload_length: TEST_FILE_SIZE,
size: TEST_FILE_SIZE
});
});
});
Expand Down