12345678910111213141516171819202122232425262728293031323334353637383940 |
- import queue, requests
- from threading import Thread
- from rs_zxgk.shixin.shixin import Get_Info
- def spider(data9):
- data9["obj"].main()
- # 所有的操作 CURD 请求都写到这个函数中 data9 是一个任务 就相当于一个url
- # 获取代理什么的,请求,全部写到这里面
- # TODO requests 里面 timeout 要写,否则线程会卡死
- # url = 'https://www.baidu.com/'
- # headers = {
- # 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36',
- # }
- # # requests 必须加timeout 参数,要不然会线程卡死
- # res = requests.get(url=url, headers=headers, timeout=10)
- # print('请求次数为', data9['req'], '开始时间为', data9['s'], '结束时间为', int(time.time()), '结果为: ', res.status_code)
- q = queue.Queue(20)
- if __name__ == "__main__":
- class Work(Thread):
- def run(self):
- while True:
- spider(q.get())
- for i in range(10): # 控制线程数,相当于开10个线程
- Work().start()
- import time
- info = Get_Info()
- s = int(time.time())
- try:
- for conn in range(1, 100000):
- q.put({'req': conn, 's': s, "obj": info}, timeout=None)
- except Exception as e:
- pass
|