Reputation: 1656
I am looking for the way to call shell scripts from python and write their stdout and stderr to file using logging. Here is my code:
import logging
import tempfile
import shlex
import os
def run_shell_command(command_line):
command_line_args = shlex.split(command_line)
logging.info('Subprocess: \"' + command_line + '\"')
process_succeeded = True
try:
process_output_filename = tempfile.mktemp(suffix = 'subprocess_tmp_file_')
process_output = open(process_output_filename, 'w')
command_line_process = subprocess.Popen(command_line_args,\
stdout = process_output,\
stderr = process_output)
command_line_process.wait()
process_output.close()
process_output = open(process_output_filename, 'r')
log_subprocess_output(process_output)
process_output.close()
os.remove(process_output_filename)
except:
exception = sys.exc_info()[1]
logging.info('Exception occured: ' + str(exception))
process_succeeded = False
if process_succeeded:
logging.info('Subprocess finished')
else:
logging.info('Subprocess failed')
return process_succeeded
And I am sure that there is the way to do it without creating temporary file to store process output. Any ideas?
Upvotes: 91
Views: 144320
Reputation: 1577
Handling the live output stream of a command can be achieved by iterating over stdout
as the subprocess.Popen
runs.
This implementation:
text=True
to automatically decode bytestrings into stringsCalledProcessError
upon failure if check=True
like subprocess.run
doesCompletedProcess
upon success like subprocess.run
doesimport logging
from subprocess import PIPE, STDOUT, CalledProcessError, CompletedProcess, Popen
def stream_command(
args,
*,
stdout_handler=logging.info,
check=True,
text=True,
stdout=PIPE,
stderr=STDOUT,
**kwargs,
):
"""Mimic subprocess.run, while processing the command output in real time."""
with Popen(args, text=text, stdout=stdout, stderr=stderr, **kwargs) as process:
for line in process.stdout:
stdout_handler(line[:-1])
retcode = process.poll()
if check and retcode:
raise CalledProcessError(retcode, process.args)
return CompletedProcess(process.args, retcode)
Logging to a file then becomes as simple as setting up logging
:
logging.basicConfig(
level=logging.INFO,
filename="./capture.log",
filemode="w",
encoding="utf-8",
)
logging.info("test from python")
stream_command(["echo", "test from subprocess"])
With the resulting file:
$ cat ./capture.log
INFO:root:test from python
INFO:root:test from subprocess
The behaviour can be tweaked to preference (print
instead of logging.info
, or both, etc.):
stream_command(["echo", "test"])
# INFO:root:test
stream_command("cat ./nonexist", shell=True, check=False)
# ERROR:root:cat: ./nonexist: No such file or directory
stream_command(["echo", "test"], stdout_handler=print)
# test
stdout_lines = []
def handler(line):
print(line)
logging.info(line)
stdout_lines.append(line)
stream_command(["echo", "test"], stdout_handler=handler)
print(stdout_lines)
# test
# INFO:root:test
# ['test']
Upvotes: 3
Reputation: 2684
This worked for me:
from subprocess import Popen, PIPE, STDOUT
command = f"shell command with arguments"
process = Popen(command, shell=True, stdout=PIPE, stderr=STDOUT)
with process.stdout:
for line in iter(process.stdout.readline, b''):
print(line.decode("utf-8").strip())
With exception handling:
from subprocess import Popen, PIPE, STDOUT, CalledProcessError
command = f"shell command with arguments"
process = Popen(command, shell=True, stdout=PIPE, stderr=STDOUT)
with process.stdout:
try:
for line in iter(process.stdout.readline, b''):
print(line.decode("utf-8").strip())
except CalledProcessError as e:
print(f"{str(e)}")
Upvotes: 8
Reputation: 742
I was trying to achieve the same on check_call
and check_ouput
. I found this solution to be working.
import logging
import threading
import os
import subprocess
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
class LogPipe(threading.Thread):
def __init__(self, level):
"""Setup the object with a logger and a loglevel
and start the thread
"""
threading.Thread.__init__(self)
self.daemon = False
self.level = level
self.fdRead, self.fdWrite = os.pipe()
self.pipeReader = os.fdopen(self.fdRead)
self.start()
def fileno(self):
"""Return the write file descriptor of the pipe"""
return self.fdWrite
def run(self):
"""Run the thread, logging everything."""
for line in iter(self.pipeReader.readline, ''):
logging.log(self.level, line.strip('\n'))
self.pipeReader.close()
def close(self):
"""Close the write end of the pipe."""
os.close(self.fdWrite)
def write(self):
"""If your code has something like sys.stdout.write"""
logging.log(self.level, message)
def flush(self):
"""If you code has something like this sys.stdout.flush"""
pass
After implementing it, I performed the below steps:
try:
# It works on multiple handlers as well
logging.basicConfig(handlers=[logging.FileHandler(log_file), logging.StreamHandler()])
sys.stdout = LogPipe(logging.INFO)
sys.stderr = LogPipe(logging.ERROR)
...
subprocess.check_call(subprocess_cmd, stdout=sys.stdout, stderr=sys.stderr)
export_output = subprocess.check_output(subprocess_cmd, stderr=sys.stderr)
...
finally:
sys.stdout.close()
sys.stderr.close()
# It is neccessary to close the file handlers properly.
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
logging.shutdown()
os.remove(log_file)
Upvotes: 8
Reputation: 414149
You could try to pass the pipe directly without buffering the whole subprocess output in memory:
from subprocess import Popen, PIPE, STDOUT
process = Popen(command_line_args, stdout=PIPE, stderr=STDOUT)
with process.stdout:
log_subprocess_output(process.stdout)
exitcode = process.wait() # 0 means success
where log_subprocess_output()
could look like:
def log_subprocess_output(pipe):
for line in iter(pipe.readline, b''): # b'\n'-separated lines
logging.info('got line from subprocess: %r', line)
Upvotes: 85
Reputation: 101919
I am sure that there is the way to do it without creating temporary file to store process output
You simply have to check for the documentation of Popen
, in particular about stdout
and stderr
:
stdin
,stdout
andstderr
specify the executed program’s standard input, standard output and standard error file handles, respectively. Valid values arePIPE
, an existing file descriptor (a positive integer), an existing file object, andNone
.PIPE
indicates that a new pipe to the child should be created. With the default settings ofNone
, no redirection will occur; the child’s file handles will be inherited from the parent. Additionally,stderr
can beSTDOUT
, which indicates that thestderr
data from the child process should be captured into the same file handle as forstdout
.
So you can see that you can either use a file object, or the PIPE
value. This allows you to use the communicate()
method to retrieve the output:
from StringIO import StringIO
process = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output, error = process.communicate()
log_subprocess_output(StringIO(output))
I'd rewrite your code as:
import shlex
import logging
import subprocess
from StringIO import StringIO
def run_shell_command(command_line):
command_line_args = shlex.split(command_line)
logging.info('Subprocess: "' + command_line + '"')
try:
command_line_process = subprocess.Popen(
command_line_args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
process_output, _ = command_line_process.communicate()
# process_output is now a string, not a file,
# you may want to do:
# process_output = StringIO(process_output)
log_subprocess_output(process_output)
except (OSError, CalledProcessError) as exception:
logging.info('Exception occured: ' + str(exception))
logging.info('Subprocess failed')
return False
else:
# no exception was raised
logging.info('Subprocess finished')
return True
Upvotes: 43