Is there any way to speed up the SAME post request to an API using multithreading ? I saw examples with many different requests but not with only one. I try to make an average 20 requests per second with a stop in the script when the request send me a 200 response code
My code :
# coding: utf-8
import sys
import requests
import time
import hashlib
import hmac
import json
from concurrent.futures import ThreadPoolExecutor, as_completed
def boucleWhile(statusCommande):
while statusCommande != 200:
print("intérieur while")
host = "https://www.google.com"
headers = {'Accept': 'application/json', 'Content-Type': 'application/json'}
url = '/api'
query_param = ''
body = '{"text":"t-123456"}'
sign_headers = gen_sign('POST', prefix + url, query_param, body)
headers.update(sign_headers)
r = requests.post(host + prefix + url, headers=headers, data=body)
#r = requests.request('POST', host + prefix + url, headers=headers, data=body)
print(r.status_code)
print(r.content)
print(r.json())
if r.status_code == 200:
return r.content
if __name__ == "__main__":
url_list = [
"https://www.google.com/api"
]
host = "https://www.google.com"
prefix = ""
headers = {'Accept': 'application/json', 'Content-Type': 'application/json'}
url = '/api'
query_param = ''
body = '{"text":"t-123456"}'
#change XXX value
sign_headers = gen_sign('POST', prefix + url, query_param, body)
headers.update(sign_headers)
r = requests.post(host + prefix + url, headers=headers, data=body)
#r = requests.request('POST', host + prefix + url, headers=headers, data=body)
print(r.status_code)
print(r.content)
print(r.json())
print(body)
statusCommande = r.status_code
#start = time()
processes = []
with ThreadPoolExecutor(max_workers=200) as executor:
for url in url_list:
processes.append(executor.submit(boucleWhile(statusCommande), url))
for task in as_completed(processes):
print(task.result())
#print(f'Time taken: {time() - start}')
Any idea ?