多线程多进程多协程(转载)
- Bilibili 蚂蚁学Python UP主说得很好
- 工作中看视频不方便,截取重点部分改为文字版,方便抄作业
- 原地址:https://www.bilibili.com/video/BV1bK411A7tV
- 定义一个函数
def my_func(a,b): do_something(a,b)
- 创建线程
import threadingt = threading.Thread(target=my_func, args=(a, b))
- 启动线程
t.start()
- 等待结束
t.join()
- 【多线程多进程多协程】queue.Queue是线程安全的
import queueq = queue.Queue()# 添加与获取 q.put(time) item = q.get()# 查看状态 q.qsize() q.empty() q.full()
- try-finally
import threadinglock = threading.Lock() lock.acquire() try: # do something finally: lock.release()
- with
import threadinglock = threading.Lock() with lock: # do something
- map函数,结果预入参顺序对应
from concurrent.futures import ThreadPoolExecutorarg_list = []with ThreadPoolExecutor() as pool: results = pool.map(my_func, arg_list) for result in results: print(results)
- submit函数,as_completed顺序可按完成顺序
from concurrent.futures import ThreadPoolExecutor, as_completedarg_list = []with ThreadPoolExecutor() as pool: futures = [pool.submit(my_func, arg) for arg in arg_list] # 按输入顺序 for future in futures: print(future.result()) # 按完成顺序 for future in as_completed(futures): print(future.result())
import time
from concurrent.futures import ThreadPoolExecutor
from flask import Flaskapp = Flask(__name__)
pool = ThreadPoolExecutor()def do_1():
time.sleep(1)
return 'do_1'def do_2():
time.sleep(1)
return 'do_2'def do_3():
time.sleep(1)
return 'do_3'@app.route("/")
def index():
result_1 = pool.submit(do_1)
result_2 = pool.submit(do_2)
result_3 = pool.submit(do_3)
return {
'1': result_1.result(),
'2': result_2.result(),
'3': result_3.result(),
}if __name__ == "__main__":
app.run()
多进程
图片截图自 蚂蚁学Python Bilibili 03:00
文章图片
Flask使用多进程
import time
from concurrent.futures import ProcessPoolExecutor
from flask import Flaskapp = Flask(__name__)def do_1():
time.sleep(1)
return 'do_1'def do_2():
time.sleep(1)
return 'do_2'def do_3():
time.sleep(1)
return 'do_3'@app.route("/")
def index():
result_1 = pool.submit(do_1)
result_2 = pool.submit(do_2)
result_3 = pool.submit(do_3)
return {
'1': result_1.result(),
'2': result_2.result(),
'3': result_3.result(),
}if __name__ == "__main__":
pool = ProcessPoolExecutor()
app.run()
协程:asyncio、await
import asyncio
import aiohttploop = asyncio.get_event_loop()async def get_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
result = await resp.text()
print(f"url:{url},{len(result)}")urls = [f"https://www.cnblogs.com/#p{page}" for page in range(1, 50 + 1)]tasks = [loop.create_task(get_url(url)) for url in urls]loop.run_until_complete(asyncio.wait(tasks))
控制asyncio并发数
- try-finally
import asyncio sem = asyncio.Semaphore(10)await sem.acquire() try: # do something finally: sem.release()
- with
import asyncio sem = asyncio.Semaphore(10)async with sem: # do something
- 举例
import asyncio import aiohttploop = asyncio.get_event_loop()# 限制10个并发 semaphore = asyncio.Semaphore(10)async def get_url(url): async with semaphore: async with aiohttp.ClientSession() as session: async with session.get(url) as resp: result = await resp.text() print(f"url:{url},{len(result)}")urls = [f"https://www.cnblogs.com/#p{page}" for page in range(1, 50 + 1)]tasks = [loop.create_task(get_url(url)) for url in urls]loop.run_until_complete(asyncio.wait(tasks))