系统运维
183
今天研究了下python3的新特性 asynico,试了试aiohttp协程效果,单核QPS在500~600之间,性能还可以。
import aiohttp import asyncio import hashlib import time from asyncio import Queue class Fetch: def __init__(self): self.work_queue = Queue() self.max_loop = 10000 self.host = "http://14.29.5.29/XXXX" self.payload = {"planId": 10000007, "activityId": 1002, "label": 1, "key": "98214ecfe6b9ae8855e3ac6509ad940f", "keyType": "imei", "batchId": 1, "token": "395La7f9x9x"} async def get_url(self, host, payload): async with aiohttp.ClientSession() as session: async with session.post(host, data=payload) as resp: text = await resp.text() if "1" in text: print(text, payload["key"]) async def consumer(self): while True: param = await self.work_queue.get() if param: await self.get_url(self.host, param) self.work_queue.task_done() else: break async def producer(self): i = 0 string = '866260035710238' while 1: if i: md5_str = hashlib.md5(string.encode('utf-8')) self.payload["key"] = md5_str.hexdigest() string = str(int(string) + 1) await self.work_queue.put(self.payload.copy()) #必须要 i += 1 if i > self.max_loop: break async def run(self): await self.producer() print('start consumer...') tasks = [ loop.create_task(self.consumer()) for i in range(10) ] await self.work_queue.join() print('end join') for task in tasks: task.cancel() t1 = time.time() loop = asyncio.get_event_loop() test = Fetch() loop.run_until_complete(test.run()) loop.close() print(time.time() - t1)