Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change knox to aws-sdk in order to support v4 aws signatures #30

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,23 +22,24 @@ npm install s3-sync

### `require('s3-sync').createStream([db, ]options)` ###

Creates an upload stream. Passes its options to [knox](http://ghub.io/knox),
Creates an upload stream. Passes its options to [aws-sdk](http://ghub.io/aws-sdk),
so at a minimum you'll need:

* `key`: Your AWS access key.
* `secret`: Your AWS secret.
* `key` or `accessKeyId`: Your AWS access key.
* `secret` or `secretAccessKey`: Your AWS secret.
* `bucket`: The bucket to upload to.
* `region`: The region the bucket is in.

The following are also specific to s3-sync:

* `concurrency`: The maximum amount of files to upload concurrently.
* `retries`: The maximum number of times to retry uploading a file before failing. By default the value is 7.
* `headers`: Additional headers to include on each file.
* `headers`: Additional parameters for each file, see [S3 docs](http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property).
* `hashKey`: By default, file hashes are stored based on the file's absolute
path. This doesn't work very nicely with temporary files, so you can pass
this function in to map the file object to a string key for the hash.
* `acl`: Use a custom [ACL header](http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html). Defaults to `public-read`.
* `force`: Force s3-sync to overwrite any existing files.
* `force`: Force s3-sync to overwrite any existing files. Not generally required, since we store a hash and compare it to detect updated files.

You can also store your local cache in S3, provided you pass the following
options, and use `getCache` and `putCache` (see below) before/after uploading:
Expand Down Expand Up @@ -107,7 +108,7 @@ var files = readdirp({
, directoryFilter: ['!.git', '!cache']
})

// Takes the same options arguments as `knox`,
// Takes the same options arguments as `aws-sdk`,
// plus some additional options listed above
var uploader = s3sync(db, {
key: process.env.AWS_ACCESS_KEY
Expand Down
54 changes: 34 additions & 20 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ var LevelWriteStream = require('level-write-stream')
, xtend = require('xtend')
, mime = require('mime')
, once = require('once')
, knox = require('knox')
, AWS = require('aws-sdk')
, url = require('url')
, fs = require('fs')

Expand All @@ -25,8 +25,10 @@ function s3syncer(db, options) {
options.retries = options.retries || 7
options.acl = options.acl || 'public-read'
options.force = !!options.force
options.accessKeyId = options.accessKeyId || options.key
options.secretAccessKey = options.secretAccessKey || options.secret

var client = knox.createClient(options)
var client = new AWS.S3(options)
, queue = createQueue(options.concurrency)
, region = options.region === 'us-standard' ? false : options.region
, secure = options.secure || !('secure' in options)
Expand Down Expand Up @@ -89,14 +91,13 @@ function s3syncer(db, options) {
})

function checkForUpload(next) {
client.headFile(relative, function(err, res) {
if (err) return next(err)
client.headObject({Bucket: options.bucket, Key: relative}, function(err, res) {
if (err && err.statusCode !== 404) return next(err)
if (err && err.statusCode === 404) return uploadFile(details, next)
if (
options.force ||
res.statusCode === 404 || (
res.headers['x-amz-meta-syncfilehash'] !== details.md5
options.force || (
res.Metadata['syncfilehash'] !== details.md5
)) return uploadFile(details, next)
if (res.statusCode >= 300) return next(new Error('Bad status code: ' + res.statusCode))
return next(null, details)
})
}
Expand All @@ -117,16 +118,22 @@ function s3syncer(db, options) {
off.on('fail', function() {
next(lasterr || new Error('unknown error'))
}).on('ready', function() {
var headers = xtend({
'x-amz-acl': options.acl
, 'x-amz-meta-syncfilehash': details.md5
, 'Content-Type': mime.lookup(absolute)
var params = xtend({
Bucket: options.bucket,
Key: relative,
ContentType: mime.lookup(absolute),
ACL: options.acl,
Metadata: {
syncfilehash: details.md5
},
Body: fs.createReadStream(absolute)
}, options.headers)

client.putFile(absolute, relative, headers, function(err, res) {
if (!err) {
if (res.statusCode < 300) return next(null, details)
err = new Error('Bad status code: ' + res.statusCode)
client.putObject(params, function(err, res) {
if (err) {
err = new Error('Bad status code: ' + err.statusCode)
} else {
return next(null, details)
}

lasterr = err
Expand All @@ -139,9 +146,12 @@ function s3syncer(db, options) {
function getCache(callback) {
callback = once(callback)

client.getFile(options.cacheDest, function(err, res) {
if (err) return callback(err)
if (res.statusCode === 404) return callback(null)
client.getObject({
Bucket: options.bucket,
Key: options.cacheDest
}, function(err, res) {
if (err && err.statusCode !== 404) return callback(err)
if (err && err.statusCode === 404) return callback(null)

es.pipeline(
res
Expand All @@ -161,7 +171,11 @@ function s3syncer(db, options) {
.pipe(fs.createWriteStream(options.cacheSrc))
.once('error', callback)
.once('close', function() {
client.putFile(options.cacheSrc, options.cacheDest, function(err) {
client.putObject({
Bucket: options.bucket,
Key: options.cacheDest,
Body: fs.createReadStream(options.cacheSrc)
}, function(err) {
if (err) return callback(err)
fs.unlink(options.cacheSrc, callback)
})
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
"description": "A streaming upload tool for Amazon S3",
"main": "index.js",
"dependencies": {
"aws-sdk": "^2.2.18",
"backoff": "~2.3.0",
"crypto": "0.0.3",
"event-stream": "3.0.16",
"knox": "~0.8.3",
"level-write-stream": "^1.0.0",
"mime": "~1.2.9",
"once": "~1.1.1",
Expand Down