首页 > 解决方案 > 多处理多个执行器等待每个作业

问题描述

我有 2 个任务 - load_url 和 some_job。需要同时提交并使用 python 执行器...

看起来执行程序没有启动 - some_job - 直到它完成 load_url。

当我运行此作业时 - some_job 仅在 load_url 过程完成后启动。

import sys
from concurrent import futures
import urllib.request
import multiprocessing
import time

URLS = ['http://www.foxnews.com/',
        'http://www.cnn.com/',
        'http://europe.wsj.com/',
        'http://www.bbc.co.uk/',
        'http://some-made-up-domain.com/']


def load_url(url, e, timeout=0):
    # it will take 10 seconds to process a URL
    time.sleep(10)
    return None

def some_job(url, e, timeout=0):
    print('some_job')
    time.sleep(10)
    return None


def check_futures(livefutures):
    runningfutures = True
    while runningfutures:
            runningfutures = [f for f in livefutures if f.running()]
            print('runningfutures=', runningfutures)


def main():
    print('start')
    manager = multiprocessing.Manager()
    e = manager.Event()
    t = '1'
    with futures.ProcessPoolExecutor(max_workers=5) as executor:
        livefutures = dict(
            (executor.submit(load_url, url, e), url)
            for url in URLS)

        test = executor.submit(some_job, t, e)
        print('check_futures')
        # runningfutures = True
        check_futures(livefutures)


if __name__ == '__main__':
    main()

标签: python

解决方案


推荐阅读