Skip to content

Commit

Permalink
Update sharp and mime version
Browse files Browse the repository at this point in the history
  • Loading branch information
ChenglongMa committed Nov 4, 2021
1 parent 486d569 commit ffbaa22
Show file tree
Hide file tree
Showing 5 changed files with 1,679 additions and 373 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
.idea
# Logs
logs
*.log
Expand Down
8 changes: 7 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,17 @@ $ npm i @chenglongma/skipper-gcstorage

## Changelog

### Ver 2.2.0

1. Update dependencies to the latest version.
1. [sharp](https://sharp.pixelplumbing.com/): 0.29.2, now it supports M1 chipset (thanks [lahiruelectrily (github.com)](https://github.com/lahiruelectrily))
2. [mime](https://www.npmjs.com/package/mime): 3.0.0

### Ver 2.1.0

Thanks [jspark-gigworks (Anselmo Park)](https://github.com/jspark-gigworks) so much for his comments!

1. Emit`writefile` event when finishing the job.
1. Emit `writefile` event when finishing the job.
2. Support additional `CreateWriteStreamOptions` listed in https://googleapis.dev/nodejs/storage/latest/global.html#CreateWriteStreamOptions.

### Ver 2.0.0
Expand Down
230 changes: 114 additions & 116 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const Writable = require("stream").Writable;
const _ = require("lodash");
const { Storage } = require('@google-cloud/storage');
const {Storage} = require('@google-cloud/storage');
const mime = require("mime");
const sharp = require('sharp');

Expand All @@ -14,119 +14,117 @@ const sharp = require('sharp');
* @property {Object?} bucketMetadata used to create non-existing bucket
* @property {Number?} maxBytes
* @property {Object?} metadata The metadata of gcs file
* @property {Bool?} public Whether to make the file public
* @property {Boolean?} public Whether to make the file public
* @property {Object?} resize // refer to https://sharp.pixelplumbing.com/api-resize#resize
* @property {Number?} width
* @property {Number?} height
*
* @returns {Dictionary}
* @returns {{ls: ls, read: ((function(*=): (*))|*), rm: rm, receive: (function(Object): *)}}
* @property {Function} ls
* @property {Function} read
* @property {Function} rm
* @property {Function} receive
*/
module.exports = function SkipperGCS(globalOpts) {
globalOpts = globalOpts || {};
_.defaults(globalOpts, {
bucket: "",
resize: {}
});
globalOpts = globalOpts || {};
_.defaults(globalOpts, {
bucket: "",
resize: {}
});

const adapter = {
ls: function (dirname, done) {
const bucket = _getBucket(globalOpts);
bucket.getFiles({ prefix: dirname, }, function (err, files) {
if (err) {
done(err);
} else {
files = _.map(files, "name");
done(undefined, files);
}
});
},
read: function (fd) {
if (arguments[1]) {
return arguments[1](new Error('For performance reasons, skipper-gcstorage does not support passing in a callback to `.read()`'));
}
const bucket = _getBucket(globalOpts);
return readStream = bucket.file(fd).createReadStream();
},
rm: function (filename, done) {
const bucket = _getBucket(globalOpts);
bucket.file(filename).delete(done);
},
/**
* A simple receiver for Skipper that writes Upstreams to Google Cloud Storage
*
* @param {Object} options
* @return {Stream.Writable}
*/
receive: function GCSReceiver(options) {
options = options || {};
_.defaults(options, globalOpts);
// if maxBytes is configed in "MB" ended string
// convert it into bytes
if (options.maxBytes) {
const _maxBytesRegResult = (options.maxBytes + '').match(/(\d+)m/i);
if (!_.isNull(_maxBytesRegResult)) {
options.maxBytes = _maxBytesRegResult[1] * 1024 * 1024;
}
};

// Build an instance of a writable stream in object mode.
const receiver__ = Writable({ objectMode: true, });
receiver__.once('error', (unusedErr) => {
// console.log('ERROR ON receiver ::', unusedErr);
});//œ
return {
ls: function (dirname, done) {
const bucket = _getBucket(globalOpts);
bucket.getFiles({prefix: dirname,}, function (err, files) {
if (err) {
done(err);
} else {
files = _.map(files, "name");
done(undefined, files);
}
});
},
read: function (fd) {
if (arguments[1]) {
return arguments[1](new Error('For performance reasons, skipper-gcstorage does not support passing in a callback to `.read()`'));
}
const bucket = _getBucket(globalOpts);
return bucket.file(fd).createReadStream();
},
rm: function (filename, done) {
const bucket = _getBucket(globalOpts);
bucket.file(filename).delete(done);
},
/**
* A simple receiver for Skipper that writes Upstreams to Google Cloud Storage
*
* @param {Object} options
* @return {Stream.Writable}
*/
receive: function GCSReceiver(options) {
options = options || {};
_.defaults(options, globalOpts);
// if maxBytes is configured in "MB" ended string
// convert it into bytes
if (options.maxBytes) {
const _maxBytesRegResult = (options.maxBytes + '').match(/(\d+)m/i);
if (!_.isNull(_maxBytesRegResult)) {
options.maxBytes = _maxBytesRegResult[1] * 1024 * 1024;
}
}

// This `_write` method is invoked each time a new file is pumped in
// from the upstream. `incomingFileStream` is a readable binary stream.
receiver__._write = (incomingFileStream, encoding, proceed) => {
_getOrCreatBucket(options, bucket => {
// `skipperFd` is the file descriptor-- the unique identifier.
// Often represents the location where file should be written.
//
// But note that we formerly used `fd`, but now Node attaches an `fd` property
// to Readable streams that come from the filesystem. So this kinda messed
// us up. And we had to do this instead:
const incomingFd = incomingFileStream.skipperFd || (_.isString(incomingFileStream.fd) ? incomingFileStream.fd : undefined);
if (!_.isString(incomingFd)) {
return proceed(new Error('In skipper-gcstorage adapter, write() method called with a stream that has an invalid `skipperFd`: ' + incomingFd));
}
// Build an instance of a writable stream in object mode.
const receiver__ = Writable({objectMode: true,});
receiver__.once('error', (unusedErr) => {
// console.log('ERROR ON receiver ::', unusedErr);
});//œ

incomingFileStream.once('error', (unusedErr) => {
// console.log('ERROR ON incoming readable file stream in Skipper Google Cloud Storage adapter (%s) ::', incomingFileStream.filename, unusedErr);
});//œ
// This `_write` method is invoked each time a new file is pumped in
// from the upstream. `incomingFileStream` is a readable binary stream.
receiver__._write = (incomingFileStream, encoding, proceed) => {
_getOrCreatBucket(options, bucket => {
// `skipperFd` is the file descriptor-- the unique identifier.
// Often represents the location where file should be written.
//
// But note that we formerly used `fd`, but now Node attaches an `fd` property
// to Readable streams that come from the filesystem. So this kinda messed
// us up. And we had to do this instead:
const incomingFd = incomingFileStream.skipperFd || (_.isString(incomingFileStream.fd) ? incomingFileStream.fd : undefined);
if (!_.isString(incomingFd)) {
return proceed(new Error('In skipper-gcstorage adapter, write() method called with a stream that has an invalid `skipperFd`: ' + incomingFd));
}

options.metadata = options.metadata || {};
options.metadata.contentType = mime.getType(incomingFd);
incomingFileStream.once('error', (unusedErr) => {
// console.log('ERROR ON incoming readable file stream in Skipper Google Cloud Storage adapter (%s) ::', incomingFileStream.filename, unusedErr);
});//œ

// The default `upload` implements a unique filename by combining:
// • a generated UUID (like "4d5f444-38b4-4dc3-b9c3-74cb7fbbc932")
// • the uploaded file's original extension (like ".jpg")
const file = bucket.file(incomingFd);
const isImage = options.metadata.contentType && options.metadata.contentType.startsWith('image');
const resize = { ...options.resize, fit: 'inside' };
const transformer = sharp().rotate().resize(resize);
const stream = isImage && (resize.width || resize.height)
? incomingFileStream.pipe(transformer)
: incomingFileStream;
options.metadata = options.metadata || {};
options.metadata.contentType = mime.getType(incomingFd);

stream.pipe(file.createWriteStream(options))
.on('error', (err) => receiver__.emit("error", err))
.on('finish', function () {
incomingFileStream.extra = file.metadata;
// Indicate that a file was persisted.
receiver__.emit('writefile', incomingFileStream);
proceed();
});
});
};
return receiver__;
},
};
// The default `upload` implements a unique filename by combining:
// • a generated UUID (like "4d5f444-38b4-4dc3-b9c3-74cb7fbbc932")
// • the uploaded file's original extension (like ".jpg")
const file = bucket.file(incomingFd);
const isImage = options.metadata.contentType && options.metadata.contentType.startsWith('image');
const resize = {...options.resize, fit: 'inside'};
const transformer = sharp().rotate().resize(resize);
const stream = isImage && (resize.width || resize.height)
? incomingFileStream.pipe(transformer)
: incomingFileStream;

return adapter;
stream.pipe(file.createWriteStream(options))
.on('error', (err) => receiver__.emit("error", err))
.on('finish', function () {
incomingFileStream.extra = file.metadata;
// Indicate that a file was persisted.
receiver__.emit('writefile', incomingFileStream);
proceed();
});
});
};
return receiver__;
},
};
};

//////////////////////////////////////////////////////////////////////////////
Expand All @@ -136,12 +134,12 @@ module.exports = function SkipperGCS(globalOpts) {
* @param {object} options Options to access buckets
*/
function _getBucket(options) {
const authOpts = {
projectId: options.projectId || process.env.GOOGLE_CLOUD_PROJECT,
keyFilename: options.keyFilename || (this.projectId ? process.env.GOOGLE_APPLICATION_CREDENTIALS : undefined),
}
const storage = new Storage(_stripKeysWithNilValues(authOpts));
return storage.bucket(options.bucket);
const authOpts = {
projectId: options.projectId || process.env.GOOGLE_CLOUD_PROJECT,
keyFilename: options.keyFilename || (this.projectId ? process.env.GOOGLE_APPLICATION_CREDENTIALS : undefined),
}
const storage = new Storage(_stripKeysWithNilValues(authOpts));
return storage.bucket(options.bucket);
}//ƒ

/**
Expand All @@ -150,23 +148,23 @@ function _getBucket(options) {
* @param {function} cb Callback function executed after creation
*/
function _getOrCreatBucket(options, cb) {
const bucket = _getBucket(options);
bucket.exists().then(exists => {
if (!exists[0]) {
const metadata = _stripKeysWithNilValues(options.bucketMetadata);
bucket.create(metadata).then(data => {
const newBucket = data[0];
cb(newBucket);
})
} else {
cb(bucket);
}
});
const bucket = _getBucket(options);
bucket.exists().then(exists => {
if (!exists[0]) {
const metadata = _stripKeysWithNilValues(options.bucketMetadata);
bucket.create(metadata).then(data => {
const newBucket = data[0];
cb(newBucket);
})
} else {
cb(bucket);
}
});
}//ƒ

/**
* destructive -- mutates, returns reference only for convenience
*/
function _stripKeysWithNilValues(dictionary) {
return _.omitBy(dictionary, _.isNil);
return _.omitBy(dictionary, _.isNil);
}//ƒ
Loading

0 comments on commit ffbaa22

Please sign in to comment.