Paddle downstream with Canoe, an S3 utility library for Node.js. Built on the AWS Node SDK.
npm install canoe --save
Create a new Canoe
instance by passing an instance of AWS.S3
from the aws-sdk
module.
var AWS = require('aws-sdk'),
Canoe = require('canoe');
var s3 = new AWS.S3();
var canoe = new Canoe(s3);
Create a readable stream of all objects whose keys match a given prefix.
Combines multiple objects into a single readable stream, preserving the order of the objects.
// This will stream all objects starting with "path/to/things/name_"
// For example, it would match "path/to/things/name_1", "path/to/things/name_2", etc
canoe.createPrefixedReadStream({
Bucket: 'bucket-name',
Prefix: 'path/to/things/name_'
}, function (err, readable) {
readable.pipe(process.stdout)
})
A Node 0.10-friendly writable stream interface ("streams2") for uploading objects to S3.
Creates a writable stream for a given S3 bucket/key. The stream will be returned immediately and also passed to an optional callback.
The stream will be writable when it's returned, but not actually ready to send data to S3 yet (data will be buffered internally in the meantime). If you use the immediately returned stream, be sure to respect false
-y return values, as Node's readable.pipe()
does. The stream will be fully ready when the callback is run.
The stream will emit a writable
event when it's ready to send data to S3. A close
event will be emitted when the stream is fully done consuming (uploading) the data; this will happen after the finish
event.
Basic usage:
var writableStream = canoe.createWriteStream({
Bucket: 'bucket-name',
Key: 'file/name'
});
writableStream.write(stuff);
writableStream.end();
// Create a stream and use it immediately
var writeable = canoe.createWriteStream({
Bucket: 'random-access-memories',
Key: 'instant.crush'
});
// For fun, let's keep track of how much memory we need.
// We'll print the memory peak once the upload finishes.
// Peak will stay fairly consistent regardless of file size.
var maxMemory = 0;
setInterval(function() {
maxMemory = Math.max(maxMemory, process.memoryUsage().heapUsed);
}, 100);
writeable.on('close', function(data) {
console.log('Peak memory heap usage was ' + maxMemory + ' bytes');
process.exit();
});
// Imagine you have some massive file you want to upload that doesn't fit into memory
fs.createReadStream('./random-access-memories.log').pipe(writeable);
// Create a stream and wait to use it in a callback
var s3Params = {Bucket: 'random-access-memories', Key: 'instant.crush'};
canoe.createWriteStream(s3Params, function(err, writable) {
if (err) return;
writable.write("And we will never be alone again\n");
writable.write("'Cause it doesn't happen every day\n");
writable.write("Kinda counted on you being a friend\n");
writable.write("Kinda given up on giving away\n");
writable.write("Now I thought about what I wanna say\n");
writable.write("But I never really know where to go\n");
writable.write("So I chained myself to a friend\n");
writable.write("'Cause I know it unlocks like a door...");
writable.end("\n");
});
See the contributing documentation.
Evan Solomon (personal website), supported by A Medium Corporation.
Copyright 2013 A Medium Corporation.
Licensed under the Apache License, Version 2.0. See the top-level file LICENSE.txt
and (http://www.apache.org/licenses/LICENSE-2.0).