Reputation: 990
I need to start the bash
terminal, sequentially execute several commands, collect their results and quit.
What is the correct way to do this in nodejs?
I tried to achieve this with child_process.spawn
but it doesn't work as I have expected even with a single command.
Here is the simplified code:
const process = spawn(`bash`, [])
// wait for the process to spawn
await new Promise<void>(resolve => process.once(`spawn`, resolve))
// log any output (expected to be the current node version)
process.stdout.on(`data`, data => console.log(data))
// wait for "node --version" to execute
await new Promise<void>(resolve => process.stdin.write(`node --version\n`, `utf8`, () => resolve()))
// wait for the process to end
await new Promise<void>(resolve => process.once(`close`, resolve))
The problem here is that I do not receive any outputs in stdout
while await process.once('spawn')
works fine.
I have logged stderr
and every other event like process.on
and stdout.on('error')
but they are all empty. So I'm wondering what is the problem here.
In addition, google has tons of examples on how to run a single command. But I need to run several in the same terminal, wait between each call and collect individual results from stdout
.
I'm not sure how to do this if this doesn't work as expected with the single command.
Upvotes: 6
Views: 9596
Reputation: 21130
When I follow the example provided by the spawn()
docs, everything seems to work fine.
const { spawn } = require('child_process'); const ls = spawn('ls', ['-lh', '/usr']); ls.stdout.on('data', (data) => { console.log(`stdout: ${data}`); }); ls.stderr.on('data', (data) => { console.error(`stderr: ${data}`); }); ls.on('close', (code) => { console.log(`child process exited with code ${code}`); });
We can modify this example to use bash
instead of ls
. Then feed it some commands (via stdin.write()
) and end()
the stream, so bash knows we are done writing.
const { spawn } = require('child_process');
const bash = spawn('bash');
bash.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
bash.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
});
bash.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
bash.stdin.write('node --version\n');
bash.stdin.write('npm --version\n');
bash.stdin.end();
When you run this script it produces (version numbers may differ):
stdout: v16.13.1
stdout: 8.1.2
child process exited with code 0
The reason for the empty lines is the fact that the command outputs include a newline at the end, console.log()
adds an additional newline character on top.
I'm confused as to why you would go about doing this all through a bash
process, when bash will end up creating different subprocesses. It feels like you are making things more complicated then they need to be.
A lot easier would be to use exec()
.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function main() {
const { stdout: nodeVersion } = await exec('node --version');
console.log("node version:", nodeVersion);
const { stdout: npmVersion } = await exec('npm --version');
console.log("npm version:", npmVersion);
}
main();
Which produces:
node version: v16.13.1
npm version: 8.1.2
Upvotes: 1
Reputation: 1506
First of all, executing a script with unknown parameters ahead of time sounds like you're using user input which opens yourself up to a host of vulnerabilities. Also always use absolute paths to avoid path poisoning. But let's assume you're properly sanitizing everything. You can use the -c
argument with bash to specify what you want to run and separate your commands with ;
s.
Per the man file:
If the -c option is present, then commands are read from the first non-option argument command_string. If there are arguments after the command_string, the first argument is assigned to $0 and any remaining arguments are assigned to the positional parameters. The assignment to $0 sets the name of the shell, which is used in warning and error messages.
Here's an example that works fine for me
#!/usr/bin/node
const {spawn} = require('child_process');
const magicString = '#@*$&#@*%&#@(#%@%#@';
const myScript = 'which_node=$(which node);' +
'node_ver=$($which_node --version);' +
'echo "where = $which_node";' +
'echo "version = $node_ver";' +
`echo '${magicString}';` +
'read foobar;' +
'echo $foobar;';
let cp = spawn('/usr/bin/bash', ['-c',myScript]);
cp.stdout.setEncoding('utf8');
let data = '';
cp.stdout.on('data',(d) => {
if(d.endsWith(magicString+'\n')) {
d = d.substring(0,d.length-magicString.length-1);
data += d;
// do whatever with data
cp.stdin.write('asdfasdsadfasdf\n');
}
else
data += d;
});
cp.on('close',()=>console.log(data));
Output as expected:
$ ./test.js
where = /usr/local/bin/node
version = v14.18.1
asdfasdsadfasdf
Upvotes: 0
Reputation: 39
Use shelljs
package and implement a helper function to run each command. Helper function:
const shell = require('shelljs');
async function runShellCmd(cmd) {
return new Promise((resolve, reject) => {
shell.exec(cmd, async (code, stdout, stderr) => {
if (!code) {
return resolve(stdout);
}
return reject(stderr);
});
});
}
// run your commands here
In case of running multiple command in one instance, I would go with a .sh
script:
node file.js
to do additional processing (e.g. calculation, execute other commands, async/await queries) and then export the values needed for the environment to continue either via file or environment variablesUpvotes: 2
Reputation: 4337
I'm not sure what the issue is, because I followed the docs, added a function returning a promise and it seems to work
I need to start the
bash
terminal, sequentially execute several commands, collect their results and quit
node
when run is a process, bash
when run is a process, ls
when run is a process, rm
when run is a process.. the terminal spawns processes from its command line interface and this below spawns processes from node(with the same text used in terminal)
let {spawn}=require('child_process') //spawn module
async function exec(command){
return await new Promise(resolve=>{
var toReturn="" //text to return
let options={env:process.env,cwd:undefined,shell:true}
let myChild=spawn(command,options)
myChild.stdout.on('data',txt=>toReturn+=txt)
myChild.stderr.on('data',txt=>toReturn+=txt)
myChild.on('close',()=>resolve(toReturn))
})
};
(async()=>{
let ls_result=await exec('ls'), next_result=null
if(someConditionBasedOn(ls_result)){next_result=await exec(if_true_cmd)}
else{next_result=await exec(if_false_cmd)}
})()
Upvotes: 0
Reputation: 75488
There are two things that may cause the problem. First is the last code executes resolve()
right away allowing for the code execution to move to the next instructions immediately. Second is console.log(data)
might not be enough to print the output. As I observed it, data
is a Buffer
, and not a string.
Please try this code and see if you get any useful message:
const { spawn } = require('child_process');
async function main() {
console.log("(console.log test)");
const process = spawn(`bash`, [])
// wait for the process to spawn
await new Promise(resolve => process.once(`spawn`, resolve))
// log any output (expected to be the current node version)
process.stdout.on(`data`, data => console.log(data.toString()))
// log any stderr
process.stderr.on(`data`, data => console.log(data.toString()))
// wait for "node --version" to execute
await new Promise(resolve => process.stdin.write(`exec node --version\n`, `utf8`, () => resolve()))
// wait for stdout and stderr stream to end, and process to close
await Promise.all([
new Promise(resolve => process.stdout.on('end', resolve)),
new Promise(resolve => process.stderr.on('end', resolve)),
new Promise(resolve => process.once(`close`, resolve))
])
}
main()
Upvotes: 3