Skip to content

Commit

Permalink
various fixes
Browse files Browse the repository at this point in the history
- using filetype instead of type
- added test
- handled case when metadata header is undefined
  • Loading branch information
tkrugg committed Oct 7, 2018
1 parent 871bc2c commit 5955147
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 7 deletions.
7 changes: 5 additions & 2 deletions lib/stores/DataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class DataStore extends EventEmitter {
*/
write(req) {
log('[DataStore] write');
return new Promise((resolve, reject) => {
return new Promise((resolve) => {
// Stub resolve for tests
const offset = 0;

Expand All @@ -92,7 +92,7 @@ class DataStore extends EventEmitter {

/**
* Called in HEAD requests. This method should return the bytes
* writen to the DataStore, for the client to know where to resume
* written to the DataStore, for the client to know where to resume
* the upload.
*
* @param {string} id filename
Expand All @@ -115,6 +115,9 @@ class DataStore extends EventEmitter {
* @return {Object} metadata as key-value pair
*/
_parseMetadataString(metadata_string) {
if (!metadata_string || !metadata_string.length) {
return {};
}
const kv_pair_list = metadata_string.split(',');

return kv_pair_list.reduce((metadata, kv_pair) => {
Expand Down
4 changes: 2 additions & 2 deletions lib/stores/GCSDataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ class GCSDataStore extends DataStore {
const gcs_file = this.bucket.file(file.id);
const parsedMetadata = this._parseMetadataString(upload_metadata);
const options = {
contentType: parsedMetadata.type.decoded,
contentType: parsedMetadata.filetype && parsedMetadata.filetype.decoded,
metadata: {
metadata: {
upload_length: file.upload_length,
Expand Down Expand Up @@ -137,7 +137,7 @@ class GCSDataStore extends DataStore {
const options = {
offset,
metadata: {
contentType: parsedMetadata.type.decoded,
contentType: parsedMetadata.filetype && parsedMetadata.filetype.decoded,
metadata: {
upload_length: data.upload_length,
tus_version: TUS_RESUMABLE,
Expand Down
4 changes: 3 additions & 1 deletion test/Test-DataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ describe('DataStore', () => {
it('must have a _parseMetadataString method', (done) => {
datastore.should.have.property('_parseMetadataString');
const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
const parsed = datastore._parseMetadataString(uploadMetadata);
let parsed = datastore._parseMetadataString(uploadMetadata);
parsed.should.deepEqual({
"filename": {
"decoded": "my-file.pdf",
Expand All @@ -87,6 +87,8 @@ describe('DataStore', () => {
"encoded": "YXBwbGljYXRpb24vcGRm"
}
})
parsed = datastore._parseMetadataString(null);
parsed.should.deepEqual({})
done();
});
});
7 changes: 5 additions & 2 deletions test/Test-GCSDataStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,11 @@ describe('GCSDataStore', () => {
});

it('should create a file', (done) => {
const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
'upload-metadata': uploadMetadata,
},
};
server.datastore.create(req)
Expand All @@ -144,6 +146,7 @@ describe('GCSDataStore', () => {


it(`should fire the ${EVENTS.EVENT_FILE_CREATED} event`, (done) => {
const uploadMetadata = 'type YXBwbGljYXRpb24vcGRm,name bXktZmlsZS5wZGY=,filetype YXBwbGljYXRpb24vcGRm,filename bXktZmlsZS5wZGY='
server.datastore.on(EVENTS.EVENT_FILE_CREATED, (event) => {
event.should.have.property('file');
assert.equal(event.file instanceof File, true);
Expand All @@ -153,6 +156,7 @@ describe('GCSDataStore', () => {
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
'upload-metadata': uploadMetadata,
},
};
server.datastore.create(req)
Expand Down Expand Up @@ -199,8 +203,7 @@ describe('GCSDataStore', () => {
// TODO: upload this file to the bucket first
return server.datastore.getOffset(FILE_ALREADY_IN_BUCKET)
.should.be.fulfilledWith({
size: TEST_FILE_SIZE,
upload_length: TEST_FILE_SIZE,
size: TEST_FILE_SIZE
});
});
});
Expand Down

0 comments on commit 5955147

Please sign in to comment.