Reputation: 7019
I want to monitor progress across multiple workers which are different processes. For each subprocess I have its own progress bar but it doest work properly with ProcessPoolExecutor
executor.
def main():
with futures.ProcessPoolExecutor(max_workers=PROCESSES) as executor:
fut_to_num = {}
for i in range(PROCESSES):
fut = executor.submit(execute_many_threads, i)
fut_to_num[fut] = i
for future in futures.as_completed(fut_to_num):
r = future.result()
# print('{} returned {}'.format(fut_to_num[future], r))
print('\nDone!\n')
def execute_many_threads(n_pool=0):
with futures.ThreadPoolExecutor(max_workers=THREADS) as executor:
for i in range(THREADS):
executor.submit(execute_thread, n_pool, i)
return n_pool+1
def execute_thread(n_pool=0, n_thread=0):
s = random.randint(1, 5)
thread_num = n_pool*(PROCESSES-1) + n_thread
progress = tqdm.tqdm(
desc='#{:02d}'.format(thread_num),
position=thread_num,
total=10*s,
leave=False,
)
# print('Executing {}: {}...'.format(thread_num, s))
for i in range(s):
time.sleep(1)
progress.update(n=10)
progress.close()
return s
When replaced with ThreadPoolExecutor
(second line) everything works properly, so I assume this might be related to some serialization issue. Could you please help figure out proper invocation of tqdm
when using multiprocessing.
python --version
is Python 3.7.2
and tqdm==4.31.1
, Ubuntu 18.04
Upvotes: 7
Views: 6722
Reputation: 301
According to https://github.com/tqdm/tqdm?tab=readme-ov-file#nested-progress-bars and https://github.com/tqdm/tqdm/blob/master/examples/parallel_bars.py,
with futures.ProcessPoolExecutor(
max_workers=PROCESSES,
initializer=tqdm.tqdm.set_lock,
initargs=(tqdm.tqdm.get_lock(),),
) as executor:
to share the same tqdm.tqdm._lock = TqdmDefaultWriteLock()
among the processes.
Upvotes: 0