diff --git a/lib/controllers.js b/lib/controllers.js index fa1c0a01..6ff34da5 100644 --- a/lib/controllers.js +++ b/lib/controllers.js @@ -138,16 +138,41 @@ module.exports = function (rootDirectory, logger) { }); }, putObject: function (req, res) { - fileStore.putObject(req.bucket, req, function (err, key) { - if (err) { - logger.error('Error uploading object "%s" to bucket "%s"', - req.params.key, req.bucket.name, err); - return res.status(400).json('Error uploading file'); - } - logger.info('Stored object "%s" in bucket "%s" successfully', req.params.key, req.bucket.name); - res.header('ETag', key.md5); - return res.status(200).end(); - }); + var copy = req.headers['x-amz-copy-source']; + if (copy) { + var srcObjectParams = copy.split('/'); + var srcBucket = srcObjectParams[1]; + var srcObject = srcObjectParams.slice(2).join('/'); + fileStore.getBucket(srcBucket, function (err, bucket) { + if (err) { + logger.error('No bucket found for "%s"', srcBucket); + var template = templateBuilder.buildBucketNotFound(bucketName); + return buildXmlResponse(res, 404, template); + } + fileStore.copyObject(bucket, srcObject, req.bucket, req.params.key, function (err, key) { + if (err) { + logger.error('Error copying object %s from bucket %s " into bucket "%s" with key of %s', + srcObject, bucket.name, req.bucket.name, req.params.key); + return res.status(400).json('Error copying file'); + } + logger.info('Copied object %s from bucket %s " into bucket "%s" with key of %s', + srcObject, bucket.name, req.bucket.name, req.params.key); + var template = templateBuilder.buildCopyObject(key); + return buildXmlResponse(res, 200, template); + }); + }); + } else { + fileStore.putObject(req.bucket, req, function (err, key) { + if (err) { + logger.error('Error uploading object "%s" to bucket "%s"', + req.params.key, req.bucket.name, err); + return res.status(400).json('Error uploading file'); + } + logger.info('Stored object "%s" in bucket "%s" successfully', req.params.key, req.bucket.name); + res.header('ETag', key.md5); + return res.status(200).end(); + }); + } }, deleteObject: function (req, res) { var key = req.params.key; diff --git a/lib/file-store.js b/lib/file-store.js index 47a80adb..bdd3728b 100644 --- a/lib/file-store.js +++ b/lib/file-store.js @@ -1,11 +1,11 @@ 'use strict'; -var path = require('path'), - fs = require('fs'), - async = require('async'), - md5 = require('MD5'), +var path = require('path'), + fs = require('fs-extra'), + async = require('async'), + md5 = require('MD5'), mkdirp = require('mkdirp'), - utils = require('./utils'), - _ = require('lodash'); + utils = require('./utils'), + _ = require('lodash'); var FileStore = function (rootDirectory) { var CONTENT_FILE = '.dummys3_content', @@ -51,7 +51,7 @@ var FileStore = function (rootDirectory) { var putBucket = function (bucketName, done) { var bucketPath = getBucketPath(bucketName); fs.mkdir(bucketPath, 502, function (err) { - if(err) { + if (err) { return done(err); } return getBucket(bucketName, done); @@ -194,7 +194,7 @@ var FileStore = function (rootDirectory) { customMetaData: getCustomMetaData(headers) }; fs.writeFile(metaFile, JSON.stringify(metaData), function (err) { - if(err) { + if (err) { return done(err); } return done(null, metaData); @@ -229,10 +229,29 @@ var FileStore = function (rootDirectory) { }); }; + var copyObject = function (srcBucket, srcKey, destBucket, destKey, done) { + var srcKeyPath = path.resolve(getBucketPath(srcBucket.name), srcKey), + destKeyPath = path.resolve(getBucketPath(destBucket.name), destKey), + srcMetadataFilePath = path.join(srcKeyPath, METADATA_FILE), + srcContentFilePath = path.join(srcKeyPath, CONTENT_FILE), + destMetadataFilePath = path.join(destKeyPath, METADATA_FILE), + destContentFilePath = path.join(destKeyPath, CONTENT_FILE); + + mkdirp.sync(destKeyPath); + fs.copySync(srcMetadataFilePath, destMetadataFilePath); + fs.copySync(srcContentFilePath, destContentFilePath); + fs.readFile(destMetadataFilePath, function (err, data) { + if (err) { + return done(err); + } + done(null, buildS3ObjectFromMetaDataFile(destKey, data)); + }); + }; + var deleteObject = function (bucket, key, done) { var keyPath = path.resolve(getBucketPath(bucket.name), key); async.map([path.join(keyPath, METADATA_FILE), - path.join(keyPath, CONTENT_FILE)], fs.unlink, function (err) { + path.join(keyPath, CONTENT_FILE)], fs.unlink, function (err) { if (err) { return done(err); } @@ -260,6 +279,7 @@ var FileStore = function (rootDirectory) { getObjects: getObjects, getObject: getObject, putObject: putObject, + copyObject: copyObject, getObjectExists: getObjectExists, deleteObject: deleteObject }; diff --git a/lib/xml-template-builder.js b/lib/xml-template-builder.js index b565ccfb..3b6143e5 100644 --- a/lib/xml-template-builder.js +++ b/lib/xml-template-builder.js @@ -32,7 +32,7 @@ var xml = function () { buildBuckets: function (buckets) { return jstoxml.toXML({ _name: 'ListAllMyBucketsResult', - _attrs: { 'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01' }, + _attrs: {'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01'}, _content: { Owner: { ID: 123, @@ -55,7 +55,7 @@ var xml = function () { buildBucketQuery: function (options, items) { var xml = { _name: 'ListAllMyBucketsResult', - _attrs: { 'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01' }, + _attrs: {'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01'}, _content: buildQueryContentXML(items, options) }; return jstoxml.toXML(xml, { @@ -118,7 +118,7 @@ var xml = function () { buildAcl: function () { return jstoxml.toXML({ _name: 'AccessControlPolicy', - _attrs: { 'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01' }, + _attrs: {'xmlns': 'http://doc.s3.amazonaws.com/2006-03-01'}, _content: { Owner: { ID: 123, @@ -143,6 +143,17 @@ var xml = function () { header: true, indent: ' ' }); + }, + buildCopyObject: function (item) { + return jstoxml.toXML({ + CopyObjectResult: { + LastModified: item.modifiedDate, + ETag: item.md5 + } + }, { + header: true, + indent: ' ' + }); } }; }; diff --git a/package.json b/package.json index 8a2ab548..ce417320 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "s3rver", "version": "0.0.0", "description": "Fake S3 server for node", - "main": "index.js", + "main": "lib/index.js", "scripts": { "test": "mocha --timeout 20000 --reporter spec --ui bdd" }, @@ -15,8 +15,7 @@ "s3rver": "bin/s3rver.js" }, "license": "MIT", - "main" : "lib/index.js", - "keywords" : [ + "keywords": [ "fake", "s3", "server", @@ -30,6 +29,7 @@ "commander": "^2.6.0", "concat-stream": "^1.4.7", "express": "^4.10.6", + "fs-extra": "^0.16.0", "jstoxml": "^0.2.2", "lodash": "^2.4.1", "mkdirp": "^0.5.0", diff --git a/test/test.js b/test/test.js index eaf75fc0..c95415df 100644 --- a/test/test.js +++ b/test/test.js @@ -198,6 +198,29 @@ describe('S3rver Tests', function () { }); }); + it('should copy an image object into another bucket', function (done) { + var file = path.join(__dirname, 'resources/image.jpg'); + fs.readFile(file, function (err, data) { + if (err) { + return done(err); + } + var params = { + Bucket: buckets[3], + Key: 'image/jamie', + CopySource: '/' + buckets[0] + '/image' + }; + s3Client.copyObject(params, function (err, data) { + console.log('---------------------------- DATA ------------------------'); + console.log(data); + /[a-fA-F0-9]{32}/.test(data.ETag).should.equal(true); + if (err) { + return done(err); + } + done(); + }); + }); + }); + it('should store a large buffer in a bucket', function (done) { // 20M