Dive into secure and efficient coding practices with our curated list of the top 10 examples showcasing 'tmp-promise' in functional components in JavaScript. Our advanced machine learning engine meticulously scans each line of code, cross-referencing millions of open source libraries to ensure your implementation is not just functional, but also robust and secure. Elevate your React applications to new heights by mastering the art of handling side effects, API calls, and asynchronous operations with confidence and precision.
const npmScript = process.argv[2];
// Get the path to the test dir to run.
const jestTestsPath = process.argv[3];
if (!npmScript) {
console.error(chalk.red('Missing mandatory npm script to run'));
process.exit(1);
}
if (!jestTestsPath) {
console.error(chalk.red('Missing mandatory path to the tests dir to run'));
process.exit(1);
}
// Cleanup the temporary even if non empty.
tmp.setGracefulCleanup(tmpOptions);
function spawnWithShell(cmd, args, options) {
return spawn(cmd, args, { ...baseSpawnOptions, ...options });
}
function getPackedName() {
return new Promise((resolve, reject) => {
fs.readFile('package.json', (err, data) => {
if (err) {
reject(err);
} else {
const info = JSON.parse(data.toString());
resolve(`${info.name}-${info.version}.tgz`);
}
});
});
testRun.stdout.pipe(process.stdout);
testRun.stderr.pipe(process.stderr);
testRun.on('close', (exitCode) => {
if (exitCode === 0) {
resolve();
} else {
reject(new Error('Failed to run integration tests'));
}
});
});
}
// Create a production-like environment in a temporarily created directory
// and then run the integration tests on it.
tmp
.withDir((tmpDir) => {
const tmpDirPath = tmpDir.path;
const unpackedDirPath = path.join(tmpDirPath, 'package');
return createPackage(tmpDirPath)
.then((archiveFilePath) => unpackTarPackage(archiveFilePath, tmpDirPath))
.then(() => installPackageDeps(unpackedDirPath))
.then(() => runIntegrationTests(unpackedDirPath));
}, tmpOptions)
.catch((err) => {
console.error(err.stack ? chalk.red(err.stack) : chalk.red(err));
process.exit(1);
});
before(async () => {
UnityCacheServer.constructor._cache_instance = null;
const tmpPath = tmp.tmpNameSync();
process.env.NODE_CONFIG = JSON.stringify({
Server: {
port: 0
},
Cache: {
defaultModule: "cache_fs",
options: {
workers: 1, // test to ensure clustering is disabled automatically
cache_fs: {
cachePath: tmpPath
}
}
},
Diagnostics: {
clientRecorder: true,
async extractPage(pageNum) {
try {
return this.extractPageWithLib(pageNum);
} catch (e) {
console.error(`Could not extract file ${e.message}`);
}
// the convert command takes zero-indexed page numbers
const page = pageNum - 1;
const file = await tmp.file({ postfix: ".png" });
const { filePath, cleanup } = await createTempSymlink(this.file);
const command = [
"convert",
"-density",
"400",
`${filePath}[${page}]`,
file.path
];
try {
await exec(command);
cleanup();
} catch (e) {
console.error("Failed extracting image", e);
import fs from 'fs';
import axios from 'axios';
import tmp from 'tmp-promise';
import unzip from 'unzip';
import config from './config';
tmp.setGracefulCleanup();
export default {
async downloadContract(projectId, revision, contractName) {
console.log(`${config.coreUrl}/projects/${projectId}/revisions/${revision}/contracts/${contractName}/raw-content`);
const response = await axios.get(
`${config.coreUrl}/projects/${projectId}/revisions/${revision}/contracts/${contractName}/raw-content`,
{ responseType: 'arraybuffer', headers: { 'Content-Type': 'application/zip' } }
);
return await tmp.dir({ unsafeCleanup: true }) // create a temp directory with random name; this will be cleaned up automatically on process exit
.then(o => { // save the download ZIP archive
return new Promise((resolve, reject) => {
fs.writeFile(`${o.path}/contract-content.zip`, response.data, 'UTF-8', function(err) {
if (err) reject(err);
else resolve(o.path);
private async _initialize() {
const levelupOptions: any = { valueEncoding: "binary" };
const store = this.options.db;
let db;
if (store) {
db = await levelup(store as any, levelupOptions);
} else {
let directory = this.options.dbPath;
if (!directory) {
const dirInfo = await dir(tmpOptions);
directory = dirInfo.path;
this._cleanupDirectory = dirInfo.cleanup;
// don't continue if we closed while we were waiting for the dir
if (this.closed) return this._cleanup();
}
this.directory = directory;
const store = encode(leveldown(directory), levelupOptions);
db = await levelup(store, {});
}
// don't continue if we closed while we were waiting for the db
if (this.closed) return this._cleanup();
const open = db.open();
this.trie = sub(db, "trie", levelupOptions);
task: async (ctx, task) => {
// Create temporary directory
if (!publishDir) {
const { path: tmpDir } = await tmp.dir()
publishDir = tmpDir
}
await prepareFilesForPublishing(publishDir, files, ignore)
ctx.pathToPublish = publishDir
return `Files copied to temporary directory: ${ctx.pathToPublish}`
},
},
let source;
if (this._contextual) {
const contextualParaphrase = this._downloadParaphrase(true)
.pipe(new TypecheckStream(this._schemas));
const basicSource = StreamUtils.chain([basicParaphrase, basicSynthetic], { objectMode: true });
// Spool the basic (non-contextual, not augmented) dataset to disk
// We need to do this because:
// 1) We don't want to run to many generation/processing steps as a pipeline, because that
// would use too much memory
// 2) We need to do multiple passes over the basic dataset for different reasons, and
// we can't cache it in memory
const { path: basicDataset, fd: basicDatasetFD } =
await tmp.file({ mode: 0o600, dir: '/var/tmp' });
await StreamUtils.waitFinish(basicSource
.pipe(new Genie.DatasetStringifier())
.pipe(fs.createWriteStream(basicDataset, { fd: basicDatasetFD })));
// basicDatasetFD is closed here
let contexts = await
fs.createReadStream(basicDataset, { encoding: 'utf8' })
.pipe(byline())
.pipe(new Genie.DatasetParser({ contextual: false }))
.pipe(new Genie.ContextExtractor(this._schemas))
.read();
const contextualized =
fs.createReadStream(basicDataset, { encoding: 'utf8' })
.pipe(byline())
// Get path to simple tsconfig file which should be used for build
const tsconfigPath = join(__dirname, '../../src/tsconfig-build.json');
// Read the tsconfi file
const tsConfigString = await fsReadFileAsync(tsconfigPath, { encoding: 'utf8'}) as string;
const tsConfig = JSON.parse(tsConfigString);
// Set absolute include paths
const newIncludeFiles = [];
for (const includeFile of tsConfig.include) {
newIncludeFiles.push(join(process.cwd(), includeFile));
}
tsConfig.include = newIncludeFiles;
// Write new custom tsconfig file
const { fd, path, cleanup } = await file();
await fsWriteAsync(fd, Buffer.from(JSON.stringify(tsConfig, null, 2), 'utf8'));
return {
path,
cleanup,
};
}
module.exports = function generateScriptFile( input, output ) {
// We need to create a temp file that ESTK can run, this file will have
// all paths that are going to be converted
return tmp.file({ postfix: '.jsx' })
// "tmp.file" returns an object with the more properties, but we are only
// interested in the path property
.then( ({ path: file }) => {
log.verbose( 'Created temp file at', file )
const script = createScriptContent( input, output )
// Write script contents to temp file
return new Promise( ( resolve, reject ) => {
writeFile( file, script, err => {
if ( err ) {
return reject( err )
}
// Send file path to next function in the promise chain