Skip to content

Commit

Permalink
Merge pull request #5 from auth0/add-hash-files
Browse files Browse the repository at this point in the history
Add ability to generate digest of uploaded files (and upload them too)
  • Loading branch information
hzalaz authored Jun 13, 2018
2 parents d92a357 + e9fecfb commit f0ceb37
Show file tree
Hide file tree
Showing 29 changed files with 2,413 additions and 30 deletions.
21 changes: 21 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,27 @@ You can also have the following entries
- name: name of the folder inside `remoteBasePath` that overrides the default from package's name.
- snapshotName: name of the snapshot version that will override the default `development`

### Files Cryptographic Digest

If you need the digest of the files you upload you can also add inside `ccu` in your `package.json` the following

```json
"digest": {
"hashes": ["sha384"]
}
```

This will generate the `sha384` digest of every file to upload and store it in a different file named `<filename>.<hash-method>`. In the case you only need to hash only some files, e.g. JS only

```json
"digest": {
"hashes": ["sha384"],
"extensions": [".js"]
}
```

> The valid types of digests are `sha256`, `sha384` and `sha512`, if none is specified the tool will generate no digest files.
## Usage

```
Expand Down
24 changes: 5 additions & 19 deletions aws/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ var s3 = require('s3');
var walk = require('walk');
var path = require('path');
var client = s3.createClient({});
var files = require('../files');

var uploader = function (version, options) {
return from(options.localPaths).map(function (directoryPath) {
Expand All @@ -21,25 +22,10 @@ var uploader = function (version, options) {
};
var logger = options.logger;
if (options.dry) {
return Rx.Observable.create(function (observer) {
logger.debug(`Starting upload with following S3 config ${logger.pretty(params)}`);
var workingDir = process.cwd();
var walker = walk.walk(params.localDir, { followLinks: false });
walker.on('file', function (root, stats, next) {
var localPath = path.relative(workingDir, path.resolve(root, stats.name));
var file = localPath.replace(directoryPath, version.remotePath);
observer.next(file);
next();
});
walker.on('error', function(root, stats, next) {
observer.onError(stats.error);
next();
});
walker.on('end', function() {
observer.onCompleted();
});
return function() {};
});
logger.debug(`Starting upload with following S3 config ${logger.pretty(params)}`);
return files
.walk(params.localDir)
.map((localFile) => localFile.replace(directoryPath, version.remotePath));
}
return Rx.Observable.create(function (observer) {
logger.debug(`Starting upload with following S3 config ${logger.pretty(params)}`);
Expand Down
5 changes: 4 additions & 1 deletion configuration/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ module.exports = function (pkgInfo) {
throw 'Missing configuration options from package.json';
}
var config = pkgInfo['ccu'];
var digest = config.digest || {};
return {
name: config.name || pkgInfo.name,
version: pkgInfo.version,
Expand All @@ -13,6 +14,8 @@ module.exports = function (pkgInfo) {
bucket: config.bucket,
cdn: config.cdn,
mainBundleFile: config.mainBundleFile,
snapshotName: config.snapshotName
snapshotName: config.snapshotName,
hashes: Array.isArray(digest.hashes) ? digest.hashes : [],
hashOnly: Array.isArray(digest.extensions) ? digest.extensions : []
};
};
27 changes: 27 additions & 0 deletions files/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
'use strict';

const walk = require('walk');
const path = require('path');
const Rx = require('rx');

module.exports = {
write: (filePath, contents) => fs.writeFileSync(filePath, contents, { encoding:'utf8' }),
walk: (directory) => {
return Rx.Observable.create((observer) => {
const walker = walk.walk(directory, { followLinks: false });
walker.on('file', (root, stats, next) => {
const workingDir = process.cwd();
const localPath = path.relative(workingDir, path.resolve(root, stats.name));
observer.next(localPath);
next();
});
walker.on('error', (root, stats, next) => {
observer.onError(stats.error);
next();
});
walker.on('end', () => {
observer.onCompleted();
});
});
}
};
41 changes: 41 additions & 0 deletions hash/digest.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
'use strict';

const Rx = require('rx');
const from = Rx.Observable.from;
const path = require('path');
const fs = require('fs');
const crypto = require('crypto');

const availableDigests = ['sha256', 'sha384', 'sha512'];

const fromFile = (file, hash) => {
return Rx.Observable.create((observer) => {
const fd = fs.createReadStream(file);
fd.on('end', function() {
hash.end();
const value = hash.read();
observer.next(value);
observer.onCompleted();
});
fd.on('error', (error) => observer.onError(error));
fd.pipe(hash);
return () => {};
});
};

const digest = (method, file) => {
const hash = crypto.createHash(method);
hash.setEncoding('base64');
return fromFile(file, hash).map((value) => {
return { file, digest: value, method };
});
};

module.exports = (file, options) => {
const digests = availableDigests.filter((d) => options.hashes.length == 0 || options.hashes.indexOf(d) != -1);
return Rx.Observable
.from(digests)
.flatMap((method) => digest(method, file));
};

module.exports.available = availableDigests;
34 changes: 34 additions & 0 deletions hash/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
'use strict';

const Rx = require('rx');
const from = Rx.Observable.from;
const files = require('../files');
const digest = require('./digest');
const path = require('path');

module.exports = function(options) {
if (options.hashes.length == 0) {
return Rx.Observable.empty();
}
const logger = options.logger;
const ignoredExtensions = digest.available.map((d) => `.${d}`);
const hashOnly = options.hashOnly;
logger.debug(`Checking for files to hash in ${options.localPaths}`);
return from(options.localPaths).map(function (directoryPath) {
logger.debug(`Starting to hash files in ${directoryPath}`);
return files.walk(directoryPath)
.filter((filePath) => {
const parts = path.parse(filePath);
return ignoredExtensions.indexOf(parts.ext) == -1 && (options.hashOnly.length == 0 || options.hashOnly.indexOf(parts.ext) != -1);
})
.flatMap((localPath) => {
return digest(localPath, options)
})
})
.concatAll()
.doOnNext((value) => {
if (!options.dry) {
files.write(`${value.file}.${value.method}`, value.digest);
}
});
}
20 changes: 13 additions & 7 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,26 @@ var just = Rx.Observable.just;
var from = Rx.Observable.from;
var Logger = require('./logger');
var extend = require('util')._extend;
var hash = require('./hash');

module.exports = function (options) {
var logger = new Logger(options);
logger.info(`About to upload ${options.name}@${options.version} from '${logger.pretty(options.localPaths)}' to '${options.remoteBasePath}'`);
logger.debug(`Starting upload process with parameters ${logger.pretty(options)}`);
var state = Object.assign({ logger: logger }, options);
var cdn = new CDN(state);
cdn.exists(resolver.full(state).remotePath)
.tapOnNext(function (exists) {
if(exists) {
logger.warn(`File ${state.mainBundleFile} exists for version ${state.version}`);
}
})
.flatMap(function(exist) {
Rx.Observable.forkJoin([
cdn.exists(resolver.full(state).remotePath).tapOnNext(function (exists) {
if(exists) {
logger.warn(`File ${state.mainBundleFile} exists for version ${state.version}`);
}
}),
hash(state).tapOnNext(function (result) {
logger.debug(`File ${result.file} has ${result.method} digest ${result.digest}`);
})
])
.flatMap(function(result) {
const exist = result[0];
return from(resolver.for(state, exist))
.flatMap(function (version) {
return aws.uploader(version, state);
Expand Down
Loading

0 comments on commit f0ceb37

Please sign in to comment.