Skip to content

Commit

Permalink
Implementing copy object
Browse files Browse the repository at this point in the history
  • Loading branch information
Jamie Hall committed Jan 28, 2015
1 parent dc08729 commit b8921d7
Show file tree
Hide file tree
Showing 5 changed files with 104 additions and 25 deletions.
45 changes: 35 additions & 10 deletions lib/controllers.js
Original file line number Diff line number Diff line change
Expand Up @@ -138,16 +138,41 @@ module.exports = function (rootDirectory, logger) {
});
},
putObject: function (req, res) {
fileStore.putObject(req.bucket, req, function (err, key) {
if (err) {
logger.error('Error uploading object "%s" to bucket "%s"',
req.params.key, req.bucket.name, err);
return res.status(400).json('Error uploading file');
}
logger.info('Stored object "%s" in bucket "%s" successfully', req.params.key, req.bucket.name);
res.header('ETag', key.md5);
return res.status(200).end();
});
var copy = req.headers['x-amz-copy-source'];
if (copy) {
var srcObjectParams = copy.split('/');
var srcBucket = srcObjectParams[1];
var srcObject = srcObjectParams.slice(2).join('/');
fileStore.getBucket(srcBucket, function (err, bucket) {
if (err) {
logger.error('No bucket found for "%s"', srcBucket);
var template = templateBuilder.buildBucketNotFound(bucketName);
return buildXmlResponse(res, 404, template);
}
fileStore.copyObject(bucket, srcObject, req.bucket, req.params.key, function (err, key) {
if (err) {
logger.error('Error copying object %s from bucket %s " into bucket "%s" with key of %s',
srcObject, bucket.name, req.bucket.name, req.params.key);
return res.status(400).json('Error copying file');
}
logger.info('Copied object %s from bucket %s " into bucket "%s" with key of %s',
srcObject, bucket.name, req.bucket.name, req.params.key);
var template = templateBuilder.buildCopyObject(key);
return buildXmlResponse(res, 200, template);
});
});
} else {
fileStore.putObject(req.bucket, req, function (err, key) {
if (err) {
logger.error('Error uploading object "%s" to bucket "%s"',
req.params.key, req.bucket.name, err);
return res.status(400).json('Error uploading file');
}
logger.info('Stored object "%s" in bucket "%s" successfully', req.params.key, req.bucket.name);
res.header('ETag', key.md5);
return res.status(200).end();
});
}
},
deleteObject: function (req, res) {
var key = req.params.key;
Expand Down
38 changes: 29 additions & 9 deletions lib/file-store.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
'use strict';
var path = require('path'),
fs = require('fs'),
async = require('async'),
md5 = require('MD5'),
var path = require('path'),
fs = require('fs-extra'),
async = require('async'),
md5 = require('MD5'),
mkdirp = require('mkdirp'),
utils = require('./utils'),
_ = require('lodash');
utils = require('./utils'),
_ = require('lodash');

var FileStore = function (rootDirectory) {
var CONTENT_FILE = '.dummys3_content',
Expand Down Expand Up @@ -51,7 +51,7 @@ var FileStore = function (rootDirectory) {
var putBucket = function (bucketName, done) {
var bucketPath = getBucketPath(bucketName);
fs.mkdir(bucketPath, 502, function (err) {
if(err) {
if (err) {
return done(err);
}
return getBucket(bucketName, done);
Expand Down Expand Up @@ -194,7 +194,7 @@ var FileStore = function (rootDirectory) {
customMetaData: getCustomMetaData(headers)
};
fs.writeFile(metaFile, JSON.stringify(metaData), function (err) {
if(err) {
if (err) {
return done(err);
}
return done(null, metaData);
Expand Down Expand Up @@ -229,10 +229,29 @@ var FileStore = function (rootDirectory) {
});
};

var copyObject = function (srcBucket, srcKey, destBucket, destKey, done) {
var srcKeyPath = path.resolve(getBucketPath(srcBucket.name), srcKey),
destKeyPath = path.resolve(getBucketPath(destBucket.name), destKey),
srcMetadataFilePath = path.join(srcKeyPath, METADATA_FILE),
srcContentFilePath = path.join(srcKeyPath, CONTENT_FILE),
destMetadataFilePath = path.join(destKeyPath, METADATA_FILE),
destContentFilePath = path.join(destKeyPath, CONTENT_FILE);

mkdirp.sync(destKeyPath);
fs.copySync(srcMetadataFilePath, destMetadataFilePath);
fs.copySync(srcContentFilePath, destContentFilePath);
fs.readFile(destMetadataFilePath, function (err, data) {
if (err) {
return done(err);
}
done(null, buildS3ObjectFromMetaDataFile(destKey, data));
});
};

var deleteObject = function (bucket, key, done) {
var keyPath = path.resolve(getBucketPath(bucket.name), key);
async.map([path.join(keyPath, METADATA_FILE),
path.join(keyPath, CONTENT_FILE)], fs.unlink, function (err) {
path.join(keyPath, CONTENT_FILE)], fs.unlink, function (err) {
if (err) {
return done(err);
}
Expand Down Expand Up @@ -260,6 +279,7 @@ var FileStore = function (rootDirectory) {
getObjects: getObjects,
getObject: getObject,
putObject: putObject,
copyObject: copyObject,
getObjectExists: getObjectExists,
deleteObject: deleteObject
};
Expand Down
17 changes: 14 additions & 3 deletions lib/xml-template-builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ var xml = function () {
buildBuckets: function (buckets) {
return jstoxml.toXML({
_name: 'ListAllMyBucketsResult',
_attrs: { 'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01' },
_attrs: {'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01'},
_content: {
Owner: {
ID: 123,
Expand All @@ -55,7 +55,7 @@ var xml = function () {
buildBucketQuery: function (options, items) {
var xml = {
_name: 'ListAllMyBucketsResult',
_attrs: { 'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01' },
_attrs: {'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01'},
_content: buildQueryContentXML(items, options)
};
return jstoxml.toXML(xml, {
Expand Down Expand Up @@ -118,7 +118,7 @@ var xml = function () {
buildAcl: function () {
return jstoxml.toXML({
_name: 'AccessControlPolicy',
_attrs: { 'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01' },
_attrs: {'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01'},
_content: {
Owner: {
ID: 123,
Expand All @@ -143,6 +143,17 @@ var xml = function () {
header: true,
indent: ' '
});
},
buildCopyObject: function (item) {
return jstoxml.toXML({
CopyObjectResult: {
LastModified: item.modifiedDate,
ETag: item.md5
}
}, {
header: true,
indent: ' '
});
}
};
};
Expand Down
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "s3rver",
"version": "0.0.0",
"description": "Fake S3 server for node",
"main": "index.js",
"main": "lib/index.js",
"scripts": {
"test": "mocha --timeout 20000 --reporter spec --ui bdd"
},
Expand All @@ -15,8 +15,7 @@
"s3rver": "bin/s3rver.js"
},
"license": "MIT",
"main" : "lib/index.js",
"keywords" : [
"keywords": [
"fake",
"s3",
"server",
Expand All @@ -30,6 +29,7 @@
"commander": "^2.6.0",
"concat-stream": "^1.4.7",
"express": "^4.10.6",
"fs-extra": "^0.16.0",
"jstoxml": "^0.2.2",
"lodash": "^2.4.1",
"mkdirp": "^0.5.0",
Expand Down
23 changes: 23 additions & 0 deletions test/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,29 @@ describe('S3rver Tests', function () {
});
});

it('should copy an image object into another bucket', function (done) {
var file = path.join(__dirname, 'resources/image.jpg');
fs.readFile(file, function (err, data) {
if (err) {
return done(err);
}
var params = {
Bucket: buckets[3],
Key: 'image/jamie',
CopySource: '/' + buckets[0] + '/image'
};
s3Client.copyObject(params, function (err, data) {
console.log('---------------------------- DATA ------------------------');
console.log(data);
/[a-fA-F0-9]{32}/.test(data.ETag).should.equal(true);
if (err) {
return done(err);
}
done();
});
});
});


it('should store a large buffer in a bucket', function (done) {
// 20M
Expand Down

0 comments on commit b8921d7

Please sign in to comment.