diff --git a/samples/Node.js/app.js b/samples/Node.js/app.js new file mode 100644 index 0000000..feb2fdd --- /dev/null +++ b/samples/Node.js/app.js @@ -0,0 +1,17 @@ +process.env.TMPDIR = 'tmp'; // to avoid the EXDEV rename error, see http://stackoverflow.com/q/21071303/76173 + +var express = require('express'); +var multipart = require('connect-multiparty'); +var multipartMiddleware = multipart(); +var app = express(); +var flowroutes = require('./routes/flow-routes.js'); + +// Host most stuff in the public folder +app.use(express.static(__dirname + '/public')); +app.use('/dist', express.static(__dirname + '/../../dist')); +app.use('/bower_components', express.static(__dirname + '/../../bower_components')); + +// Default Route +app.use('/',flowroutes); + +app.listen(3000); \ No newline at end of file diff --git a/samples/Node.js/models/flow-node.js b/samples/Node.js/models/flow-node.js new file mode 100644 index 0000000..9a5082e --- /dev/null +++ b/samples/Node.js/models/flow-node.js @@ -0,0 +1,216 @@ +var fs = require('fs'), + path = require('path'), + util = require('util'), + Stream = require('stream').Stream; + +module.exports = flow = function(temporaryFolder) { + var $ = this; + $.temporaryFolder = temporaryFolder; + $.maxFileSize = null; + $.fileParameterName = 'file'; + + try { + fs.mkdirSync($.temporaryFolder); + } catch (e) {} + + function cleanIdentifier(identifier) { + return identifier.replace(/[^0-9A-Za-z_-]/g, ''); + } + + function getChunkFilename(chunkNumber, identifier) { + // Clean up the identifier + identifier = cleanIdentifier(identifier); + // What would the file name be? + return path.resolve($.temporaryFolder, './flow-' + identifier + '.' + chunkNumber); + } + + function validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize) { + // Clean up the identifier + identifier = cleanIdentifier(identifier); + + // Check if the request is sane + if (chunkNumber == 0 || chunkSize == 0 || totalSize == 0 || identifier.length == 0 || filename.length == 0) { + return 'non_flow_request'; + } + var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); + if (chunkNumber > numberOfChunks) { + return 'invalid_flow_request1'; + } + + // Is the file too big? + if ($.maxFileSize && totalSize > $.maxFileSize) { + return 'invalid_flow_request2'; + } + + if (typeof(fileSize) != 'undefined') { + if (chunkNumber < numberOfChunks && fileSize != chunkSize) { + // The chunk in the POST request isn't the correct size + return 'invalid_flow_request3'; + } + if (numberOfChunks > 1 && chunkNumber == numberOfChunks && fileSize != ((totalSize % chunkSize) + parseInt(chunkSize))) { + // The chunks in the POST is the last one, and the fil is not the correct size + return 'invalid_flow_request4'; + } + if (numberOfChunks == 1 && fileSize != totalSize) { + // The file is only a single chunk, and the data size does not fit + return 'invalid_flow_request5'; + } + } + + return 'valid'; + } + + //'found', filename, original_filename, identifier + //'not_found', null, null, null + $.get = function(req, callback) { + var chunkNumber = req.params('flowChunkNumber', 0); + var chunkSize = req.params('flowChunkSize', 0); + var totalSize = req.params('flowTotalSize', 0); + var identifier = req.params('flowIdentifier', ""); + var filename = req.params('flowFilename', ""); + + if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') { + var chunkFilename = getChunkFilename(chunkNumber, identifier); + fs.exists(chunkFilename, function(exists) { + if (exists) { + callback('found', chunkFilename, filename, identifier); + } else { + callback('not_found', null, null, null); + } + }); + } else { + callback('not_found', null, null, null); + } + }; + + //'partly_done', filename, original_filename, identifier + //'done', filename, original_filename, identifier + //'invalid_flow_request', null, null, null + //'non_flow_request', null, null, null + $.post = function(req, callback) { + + var fields = req.body; + var files = req.files; + + var chunkNumber = fields['flowChunkNumber']; + var chunkSize = fields['flowChunkSize']; + var totalSize = fields['flowTotalSize']; + var identifier = cleanIdentifier(fields['flowIdentifier']); + var filename = fields['flowFilename']; + + if (!files[$.fileParameterName] || !files[$.fileParameterName].size) { + callback('invalid_flow_request', null, null, null); + return; + } + + var original_filename = files[$.fileParameterName]['originalFilename']; + var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, files[$.fileParameterName].size); + if (validation == 'valid') { + var chunkFilename = getChunkFilename(chunkNumber, identifier); + var tmpChunkFilename = files[$.fileParameterName].path; + // Save the chunk (TODO: OVERWRITE) + fs.rename(tmpChunkFilename, chunkFilename, function(err) { + // Log any error related to fs.rename (different partitions) + if (err){ + console.log(err); + return; + } + + // Do we have all the chunks? + var currentTestChunk = 1; + var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); + + var testChunkExists = function() { + fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists) { + if (exists) { + currentTestChunk++; + if (currentTestChunk > numberOfChunks) { + callback('done', filename, original_filename, identifier); + } else { + // Recursion + testChunkExists(); + } + } else { + callback('partly_done', filename, original_filename, identifier); + } + }); + }; + testChunkExists(); + }); + } else { + callback(validation, filename, original_filename, identifier); + } + }; + + // Pipe chunks directly in to an existsing WritableStream + // r.write(identifier, response); + // r.write(identifier, response, {end:false}); + // + // var stream = fs.createWriteStream(filename); + // r.write(identifier, stream); + // stream.on('data', function(data){...}); + // stream.on('finish', function(){...}); + $.write = function(identifier, writableStream, options) { + options = options || {}; + options.end = (typeof options['end'] == 'undefined' ? true : options['end']); + + // Iterate over each chunk + var pipeChunk = function(number) { + + var chunkFilename = getChunkFilename(number, identifier); + fs.exists(chunkFilename, function(exists) { + + if (exists) { + // If the chunk with the current number exists, + // then create a ReadStream from the file + // and pipe it to the specified writableStream. + var sourceStream = fs.createReadStream(chunkFilename); + sourceStream.pipe(writableStream, { + end: false + }); + sourceStream.on('end', function() { + // When the chunk is fully streamed, + // jump to the next one + pipeChunk(number + 1); + }); + } else { + // When all the chunks have been piped, end the stream + if (options.end) writableStream.end(); + if (options.onDone) options.onDone(); + } + }); + }; + pipeChunk(1); + }; + + $.clean = function(identifier, options) { + options = options || {}; + + // Iterate over each chunk + var pipeChunkRm = function(number) { + + var chunkFilename = getChunkFilename(number, identifier); + + //console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename); + fs.exists(chunkFilename, function(exists) { + if (exists) { + + console.log('exist removing ', chunkFilename); + fs.unlink(chunkFilename, function(err) { + if (err && options.onError) options.onError(err); + }); + + pipeChunkRm(number + 1); + + } else { + + if (options.onDone) options.onDone(); + + } + }); + }; + pipeChunkRm(1); + }; + + return $; +}; \ No newline at end of file diff --git a/samples/Node.js/package.json b/samples/Node.js/package.json new file mode 100644 index 0000000..7ac0974 --- /dev/null +++ b/samples/Node.js/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "express": "^4.3.1", + "connect-multiparty": "^1.0.4" + } +} \ No newline at end of file diff --git a/samples/Node.js/public/index.html b/samples/Node.js/public/index.html new file mode 100644 index 0000000..60bada9 --- /dev/null +++ b/samples/Node.js/public/index.html @@ -0,0 +1,104 @@ + + + + basic + + + + + + + +
+

flow basic example

+
+ +
+
+

Inputs:

+ + + +
+
+

Buttons:

+ + Upload File + + Upload Folder + +
+
+
+ +

Transfers:

+ +

+ Upload + Pause + Cancel + Size: {{$flow.getSize()}} + Is Uploading: {{$flow.isUploading()}} +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
#NameSizeRelative PathUnique Identifier#ChunksProgressPausedUploadingCompletedSettings
{{$index+1}}{{file.name}}{{file.size}}{{file.relativePath}}{{file.uniqueIdentifier}}{{file.chunks.length}}{{file.progress()}}{{file.paused}}{{file.isUploading()}}{{file.isComplete()}} + +
+ +
+ +
+ Drag And Drop your file here +
+
+ + + \ No newline at end of file diff --git a/samples/Node.js/public/js/ngflow/app.js b/samples/Node.js/public/js/ngflow/app.js new file mode 100644 index 0000000..b5e36d0 --- /dev/null +++ b/samples/Node.js/public/js/ngflow/app.js @@ -0,0 +1,24 @@ +/*global angular */ +'use strict'; + +/** + * The main app module + * @name app + * @type {angular.Module} + */ +var app = angular.module('app', ['flow']) +.config(['flowFactoryProvider', function (flowFactoryProvider) { + flowFactoryProvider.defaults = { + target: '/upload', + permanentErrors: [404, 500, 501], + testChunks: false, + maxChunkRetries: 1, + chunkRetryInterval: 5000, + simultaneousUploads: 4 + }; + flowFactoryProvider.on('catchAll', function (event) { + console.log('catchAll', arguments); + }); + // Can be used with different implementations of Flow.js + // flowFactoryProvider.factory = fustyFlowFactory; +}]); \ No newline at end of file diff --git a/samples/Node.js/routes/flow-routes.js b/samples/Node.js/routes/flow-routes.js new file mode 100644 index 0000000..d0ffa66 --- /dev/null +++ b/samples/Node.js/routes/flow-routes.js @@ -0,0 +1,72 @@ +// Authenticated routes +var express = require('express'); +var router = express.Router(); +var multipart = require('connect-multiparty'); +var multipartMiddleware = multipart(); +//tmp folder has to be on the same partition as tmpdir for fs.rename to work +var os = require('os'); +console.log(os.tmpdir()); +var flow = require('../models/flow-node.js')(os.tmpdir() + '/uploads'); +var fs = require('fs'); + +//Directory to keep assembled files +var uploadFlowDir = os.tmpdir() + '/uploaded'; + +// FLOW: Configure access control allow origin header stuff +var ACCESS_CONTROLL_ALLOW_ORIGIN = false; + +// Handle uploads through Flow.js +router.post('/upload', multipartMiddleware, function(req, res) { + flow.post(req, function(status, filename, original_filename, identifier) { + console.log('POST', status, original_filename, identifier); + console.log(status); + if (status == 'done') { + // Assemble Chunks + var stream = fs.createWriteStream(uploadFlowDir + '/' + filename); + flow.write(identifier, stream,{ + onDone: console.log('File reassembled') + }); + // Clean chunks after the file is assembled + // TO DO: put in callbacks because it deletes files before assembling + //flow.clean(identifier); + } + + if (ACCESS_CONTROLL_ALLOW_ORIGIN) { + res.header("Access-Control-Allow-Origin", "*"); + } + res.status(status).send(); + }); +}); + +router.options('/upload', function(req, res){ + console.log('OPTIONS'); + if (ACCESS_CONTROLL_ALLOW_ORIGIN) { + res.header("Access-Control-Allow-Origin", "*"); + } + res.status(200).send(); +}); + +// Handle status checks on chunks through Flow.js +router.get('/upload', function(req, res) { + flow.get(req, function(status, filename, original_filename, identifier) { + console.log('GET', status); + if (ACCESS_CONTROLL_ALLOW_ORIGIN) { + res.header("Access-Control-Allow-Origin", "*"); + } + + if (status == 'found') { + status = 200; + } else { + status = 204; + } + + res.status(status).send(); + }); +}); + +router.get('/download/:identifier', function(req, res) { + flow.write(req.params.identifier, res); +}); + + +module.exports = router; \ No newline at end of file