Dive into secure and efficient coding practices with our curated list of the top 10 examples showcasing 'through2' in functional components in JavaScript. Our advanced machine learning engine meticulously scans each line of code, cross-referencing millions of open source libraries to ensure your implementation is not just functional, but also robust and secure. Elevate your React applications to new heights by mastering the art of handling side effects, API calls, and asynchronous operations with confidence and precision.
*/
/* jshint asi:true */
'use strict'
var fs = require('fs')
var path = require('path')
var test = require('assertit')
var through2 = require('through2')
var alwaysDone = require('../index')
var exists = path.join(__dirname, '../.gitignore')
var notExists = path.join(__dirname, '../not_exists')
var EndStream = through2.ctor(function (chunk, enc, cb) {
this.push(chunk)
cb()
}, function (cb) {
this.emit('end', 2)
cb()
})
function success () {
var read = fs.createReadStream(exists)
return read.pipe(new EndStream())
}
function failure () {
var read = fs.createReadStream(notExists)
return read.pipe(new EndStream())
}
'spritesmithOptions': {},
// Used to format output CSS
// You should be using a separate beautifier plugin
'outputIndent': '\t'
};
var settings = extend({}, defaults, options);
// Keep track of all the chunks that come in so that we can re-emit in the flush
var chunkList = [];
// We use an object for imageMap so we don't get any duplicates
var imageMap = {};
// Check to make sure all of the images exist(`options.shouldVerifyImagesExist`) before trying to sprite them
var imagePromiseArray = [];
var stream = through.obj(function(chunk, enc, cb) {
// http://nodejs.org/docs/latest/api/stream.html#stream_transform_transform_chunk_encoding_callback
//console.log('transform');
// Each `chunk` is a vinyl file: https://www.npmjs.com/package/vinyl
// chunk.cwd
// chunk.base
// chunk.path
// chunk.contents
if (chunk.isStream()) {
self.emit('error', new gutil.PluginError(PLUGIN_NAME, 'Cannot operate on stream'));
}
else if (chunk.isBuffer()) {
var contents = String(chunk.contents);
var defaultRewrite = function (url) {
if (isLocalPath(url))
return joinBaseAndPath(options.base, url);
return url;
};
if (typeof options.rewriter !== 'function') {
rewriteURL = defaultRewrite;
}
else {
rewriteURL = function (url) {
return options.rewriter(url, defaultRewrite);
}
}
// Creating a stream through which each file will pass
return through.obj(function(file, enc, cb) {
var srcFile = file.path
if (file.isNull()) {
// return empty file
cb(null, file);
}
if (file.isBuffer()) {
if (/\.css$/.test(srcFile)) {
// It's a CSS file.
var oldCSS = String(file.contents),
newCSS = rewriteCSSURLs(oldCSS, rewriteURL)
file.contents = new Buffer(newCSS);
gutil.log("Changed CSS file: \"" + srcFile + "\"");
}
else {
// It's an HTML file.
// filename of the processed file itself.
const file = this.options.file
// The `.md` file will be in the same directory as the source file. We only
// need the basename.
const basename = path.basename(file)
const date = String(new Date())
this.push('------------------------\n')
this.push(`Generated _${date}_ from [Ⓢ ${basename}](${basename} "View in source")\n`)
this.push('\n')
cb()
}
// ## `Transform`
// Instead of returning a `stream.Transform` instance, through2.ctor() returns a
// constructor for our custom Transform that operates on a line-by-line basis.
const Transform = through2.ctor(transformFunction, flushFunction)
// ## `doc`
// `doc` accepts a filename, creates a corresponding read stream, processes the
// file and writes the resulting `.md` file to disk.
const doc = file =>
fs.createReadStream(file)
// [`split`](https://www.npmjs.com/package/split) is a transform that
// generates chunks separated by new-line.
.pipe(split())
// Here we invoke our custom `Transform` instance used for processing
// the separated line-chunks.
.pipe(new Transform({
// If there is no file extension, we assume that the source file is
// a JavaScript file. In some cases, we could also determine the
// extension name from the first line of the file, e.g.
// `#!/usr/bin/env node`.
function babelify(js, dir = '') {
const babelConfig = getBabelCommonConfig();
const stream = js.pipe(babel(babelConfig));
return stream
// eslint-disable-next-line func-names
.pipe(through2.obj(function (file, encoding, next) {
const matches = file.path.match(/(routes|dashboard|guide|entry|entrywithoutsider)\.nunjucks\.(js|jsx)/);
if (matches) {
const content = file.contents.toString(encoding);
file.contents = Buffer.from(content
.replace(`'{{ ${matches[1]} }}'`, `{{ ${matches[1]} }}`)
// eslint-disable-next-line quotes
.replace(`'{{ home }}'`, '{{ home }}')
// eslint-disable-next-line quotes
.replace(`'{{ master }}'`, '{{ master }}'));
}
this.push(file);
next();
}))
.pipe(gulp.dest(path.join(libDir, dir)));
}
function resolutionStream (headA, headB) {
if (args.stdin) return pumpify.obj(process.stdin, ndjson.parse())
var pipeline = []
pipeline.push(db.diff(headA, headB))
if (args.left || args.right || args.random) {
var choice = 0 // left
if (args.right) choice = 1
if (args.random) choice = +(Math.random() < 0.5)
pipeline.push(through.obj(function (versions, enc, next) {
var winner = versions[choice]
debug('versions', versions)
debug('winner', winner)
next(null, winner)
}))
} else { // manual
pipeline.push(batcher(args.limit))
pipeline.push(manualMergeStream({vizFn: vizFn}))
}
return pumpify.obj(pipeline)
}
})
function renameIDLabels(map) {
var buf = []; // buffer so each row is renamed before continuing
return through.obj(function(row, enc, next) {
if (map[row.id]) {
row.id = map[row.id];
}
if (row.dedupe && map[row.dedupe]) {
row.dedupe = map[row.dedupe];
}
forOwn(row.deps, function(dep, key) {
if (map[dep]) {
row.deps[key] = map[dep];
}
});
buf.push(row);
next();
}, function() {
buf.forEach(function(row) {
this.push(row);
async uploadFile (file, i) {
this.setState({ files: updateAt(this.state.files, i, { pending: true }) })
const { dir, core } = this.props
const { name } = file
const path = (dir === '/' ? '' : dir) + '/' + name
const speedo = speedometer()
let speed = 0
let written = 0
const update = () => {
// todo: rerender only if state acually changed.
this.setState({ files: updateAt(this.state.files, i, { written, speed }) })
}
let debounce = setInterval(update, 200)
const passthrough = through((chunk, enc, next) => {
written += chunk.length
speed = speedo(chunk.length)
next()
})
const reader = fileReader(file)
pump(reader, passthrough)
const key = this.props.archive
const res = await core.rpc.request('fs/writeFile', { key, path, stream: reader })
clearInterval(debounce)
this.setState({ files: updateAt(this.state.files, i, { pending: false, done: true, written, speed }), pending: false, done: true })
// todo: is this clean enough?
core.getStore('fs').fetchStats({ archive: key, path: this.props.dir })
}
function getContents(file, xml) {
if (file.isBuffer()) {
return Buffer.from(xml);
}
/* elte if (file.isStream()) */
const contents = through();
contents.write(xml);
contents.end();
return contents;
}
async uploadFile (file, i) {
this.setState({ files: updateAt(this.state.files, i, { pending: true }) })
const { dir, core } = this.props
let { name, webkitRelativePath } = file
if (this.state.uploadDir) name = webkitRelativePath
const path = (dir === '/' ? '' : dir) + '/' + name
const speedo = speedometer()
let speed = 0
let written = 0
const update = () => {
// todo: rerender only if state acually changed.
this.setState({ files: updateAt(this.state.files, i, { written, speed }) })
}
let debounce = setInterval(update, 200)
const passthrough = through((chunk, enc, next) => {
written += chunk.length
speed = speedo(chunk.length)
next()
})
const reader = fileReader(file)
pump(reader, passthrough)
const key = this.props.archive
const res = await core.rpc.request('fs/writeFile', { key, path, stream: reader })
clearInterval(debounce)
this.setState({ files: updateAt(this.state.files, i, { pending: false, done: true, written, speed }), pending: false, done: true })
// todo: is this clean enough?
core.getStore('fs').fetchStats({ archive: key, path: this.props.dir })
}