Very slow http post requests in python

459 Views Asked by At

I have created a python script which collects json data from facebook graph api and check for user job_title information.

By using this script, I am notifying users to update their job_title via chatbot but this process takes too much time to send requests to all users.

import json
import requests

users_url = Facebook API to fetch user details
MESSAGE_TO_SEND = '....PLEASE UPDATE JOB TITLE....'

ACCESS_TOKEN = Page_Access_token

def reply(user_id, msg,ACCESS_TOKEN):
    data = {
        "recipient": { "id": user_id },
        "message": { "text": msg }
    }

    resp = requests.post("https://graph.facebook.com/v9.0/me/messages?access_token="+ ACCESS_TOKEN, json=data)
    print('Message Sent to : ',user_id)
    # print(resp.content, resp, 'response from facebook')

def header(ACCESS_TOKEN):
    return {'Authorization': 'Bearer ' + ACCESS_TOKEN}

def user_data(ACCESS_TOKEN):
    
    headers = header(ACCESS_TOKEN)
    data = requests.get(users_url,headers=headers)
    result_json = json.loads(data.text)
    resources = result_json['Resources']
    
    for titles in range(0,len(resources)):
            if 'title' not in resources[titles]:
                user_id = str(resources[titles]['id'])
                reply(user_id, MESSAGE_TO_SEND,ACCESS_TOKEN)


user_data(ACCESS_TOKEN)

Please Help me....What can i do?

1

There are 1 best solutions below

0
AudioBubble On BEST ANSWER

Adapting the example here...

from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import as_completed
import time

def square(n):
    time.sleep(3.0)
    print( n * n )

def main():
    values = range(10)
    with ThreadPoolExecutor(max_workers = 5) as executor:
        results = executor.map(square, values)
    # for result in results:
        # print(result)

if __name__ == '__main__':
    st = time.time()
    main()
    et = time.time()
    print('{:.3f} seconds'.format(et-st))

Replace values with your list of user_ids and square with your reply function and set max_workers to a number of your liking.