要从我的python脚本启动程序,我使用以下方法:
def execute(command):
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = process.communicate()[0]
exitCode = process.returncode
if (exitCode == 0):
return output
else:
raise ProcessException(command, exitCode, output)
所以当我启动一个进程,比如进程。执行("mvn clean install")时,我的程序会一直等待,直到进程完成,只有到那时我才能得到程序的完整输出。这是恼人的,如果我正在运行一个进程,需要一段时间才能完成。
我能让我的程序一行一行地写进程输出吗,在循环结束之前轮询进程输出什么的?
我找到了这篇文章,可能与此有关。
在@jfs的出色回答的基础上,这里有一个完整的工作示例供您使用。要求Python 3.7或更新版本。
sub.py
import time
for i in range(10):
print(i, flush=True)
time.sleep(1)
main.py
from subprocess import PIPE, Popen
import sys
with Popen([sys.executable, 'sub.py'], bufsize=1, stdout=PIPE, text=True) as sub:
for line in sub.stdout:
print(line, end='')
注意子脚本中使用的flush参数。
如果有人想同时使用线程从stdout和stderr读取,这是我想到的:
import threading
import subprocess
import Queue
class AsyncLineReader(threading.Thread):
def __init__(self, fd, outputQueue):
threading.Thread.__init__(self)
assert isinstance(outputQueue, Queue.Queue)
assert callable(fd.readline)
self.fd = fd
self.outputQueue = outputQueue
def run(self):
map(self.outputQueue.put, iter(self.fd.readline, ''))
def eof(self):
return not self.is_alive() and self.outputQueue.empty()
@classmethod
def getForFd(cls, fd, start=True):
queue = Queue.Queue()
reader = cls(fd, queue)
if start:
reader.start()
return reader, queue
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutReader, stdoutQueue) = AsyncLineReader.getForFd(process.stdout)
(stderrReader, stderrQueue) = AsyncLineReader.getForFd(process.stderr)
# Keep checking queues until there is no more output.
while not stdoutReader.eof() or not stderrReader.eof():
# Process all available lines from the stdout Queue.
while not stdoutQueue.empty():
line = stdoutQueue.get()
print 'Received stdout: ' + repr(line)
# Do stuff with stdout line.
# Process all available lines from the stderr Queue.
while not stderrQueue.empty():
line = stderrQueue.get()
print 'Received stderr: ' + repr(line)
# Do stuff with stderr line.
# Sleep for a short time to avoid excessive CPU use while waiting for data.
sleep(0.05)
print "Waiting for async readers to finish..."
stdoutReader.join()
stderrReader.join()
# Close subprocess' file descriptors.
process.stdout.close()
process.stderr.close()
print "Waiting for process to exit..."
returnCode = process.wait()
if returnCode != 0:
raise subprocess.CalledProcessError(returnCode, command)
我只是想分享这个,因为我在这个问题上试图做类似的事情,但没有一个答案能解决我的问题。希望它能帮助到某些人!
请注意,在我的用例中,外部进程杀死了Popen()所使用的进程。
import time
import sys
import subprocess
import threading
import queue
cmd='esptool.py --chip esp8266 write_flash -z 0x1000 /home/pi/zero2/fw/base/boot_40m.bin'
cmd2='esptool.py --chip esp32 -b 115200 write_flash -z 0x1000 /home/pi/zero2/fw/test.bin'
cmd3='esptool.py --chip esp32 -b 115200 erase_flash'
class ExecutorFlushSTDOUT(object):
def __init__(self,timeout=15):
self.process = None
self.process_output = queue.Queue(0)
self.capture_output = threading.Thread(target=self.output_reader)
self.timeout=timeout
self.result=False
self.validator=None
def output_reader(self):
start=time.time()
while self.process.poll() is None and (time.time() - start) < self.timeout:
try:
if not self.process_output.full():
line=self.process.stdout.readline()
if line:
line=line.decode().rstrip("\n")
start=time.time()
self.process_output.put(line)
if self.validator:
if self.validator in line: print("Valid");self.result=True
except:pass
self.process.kill()
return
def start_process(self,cmd_list,callback=None,validator=None,timeout=None):
if timeout: self.timeout=timeout
self.validator=validator
self.process = subprocess.Popen(cmd_list,stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)
self.capture_output.start()
line=None
self.result=False
while self.process.poll() is None:
try:
if not self.process_output.empty():
line = self.process_output.get()
if line:
if callback:callback(line)
#print(line)
line=None
except:pass
error = self.process.returncode
if error:
print("Error Found",str(error))
raise RuntimeError(error)
return self.result
execute = ExecutorFlushSTDOUT()
def liveOUTPUT(line):
print("liveOUTPUT",line)
try:
if "Writing" in line:
line=''.join([n for n in line.split(' ')[3] if n.isdigit()])
print("percent={}".format(line))
except Exception as e:
pass
result=execute.start_process(cmd2,callback=liveOUTPUT,validator="Hash of data verified.")
print("Finish",result)
这里没有一个答案能满足我所有的需求。
没有用于标准输出的线程(也没有队列等)
非阻塞,因为我需要检查其他事情正在进行
使用PIPE,因为我需要做很多事情,例如流输出,写入日志文件,并返回输出的字符串副本。
一点背景知识:我使用ThreadPoolExecutor来管理一个线程池,每个线程启动一个子进程并并发地运行它们。(在Python2.7中,但这应该在更新的3中工作。X也是)。我不希望只使用线程来收集输出,因为我希望有尽可能多的可用线程用于其他事情(一个20个进程的池将使用40个线程来运行;1用于进程线程,1用于stdout…如果你想要stderr我猜)
我在这里剥离了很多异常,所以这是基于在生产中工作的代码。希望我没有在复制粘贴过程中破坏它。同时,非常欢迎反馈!
import time
import fcntl
import subprocess
import time
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# Make stdout non-blocking when using read/readline
proc_stdout = proc.stdout
fl = fcntl.fcntl(proc_stdout, fcntl.F_GETFL)
fcntl.fcntl(proc_stdout, fcntl.F_SETFL, fl | os.O_NONBLOCK)
def handle_stdout(proc_stream, my_buffer, echo_streams=True, log_file=None):
"""A little inline function to handle the stdout business. """
# fcntl makes readline non-blocking so it raises an IOError when empty
try:
for s in iter(proc_stream.readline, ''): # replace '' with b'' for Python 3
my_buffer.append(s)
if echo_streams:
sys.stdout.write(s)
if log_file:
log_file.write(s)
except IOError:
pass
# The main loop while subprocess is running
stdout_parts = []
while proc.poll() is None:
handle_stdout(proc_stdout, stdout_parts)
# ...Check for other things here...
# For example, check a multiprocessor.Value('b') to proc.kill()
time.sleep(0.01)
# Not sure if this is needed, but run it again just to be sure we got it all?
handle_stdout(proc_stdout, stdout_parts)
stdout_str = "".join(stdout_parts) # Just to demo
我确信这里有额外的开销,但在我的情况下这不是一个问题。从功能上讲,它满足了我的需要。我唯一没有解决的问题是,为什么这对于日志消息非常有效,但我看到一些打印消息在稍后同时显示。