반응형
Recent Posts
Recent Comments
관리 메뉴

개발잡부

python 비동기 API 호출 본문

Python

python 비동기 API 호출

닉의네임 2024. 3. 13. 14:48
반응형

 

# -*- coding: utf-8 -*-
import time
import json
import datetime as dt
from datetime import datetime

import asyncio
import aiohttp

from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk

import requests
import ssl
import urllib3
from time import sleep
from urllib import parse
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor

print(ssl.OPENSSL_VERSION)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

async def fetch_url(session, url):
    async with session.get(url) as response:
        return await response.text()


async def process_doo(keyword):


    url1 = HOST + "/home/1.0/total/search?sort=RANK&inputKeyword=" + keyword + "&searchKeyword=" + keyword + "&page=1&perPage=20"
    # response1 = requests.get(url1, verify=False)
    # json_data1 = json.loads(response1.text)

    url2 = HOST + "/home/1.0/total/search/filter?sort=RANK&inputKeyword=" + keyword + "&searchKeyword=" + keyword + "&page=1&perPage=20"
    # response2 = requests.get(url2, verify=False)
    # json_data2 = json.loads(response2.text)

    url3 = HOST + "/home/1.0/total/search/item?sort=RANK&inputKeyword=" + keyword + "&searchKeyword=" + keyword + "&page=1&perPage=20"
    # response3 = requests.get(url3, verify=False)
    # json_data3 = json.loads(response3.text)

    urls = [
        url1,url2,url3
    ]
    connector=aiohttp.TCPConnector(ssl=False)
    async with aiohttp.ClientSession(connector=connector) as session:
        tasks = [fetch_url(session, url) for url in urls]
        results = await asyncio.gather(*tasks)
        for result in results:
            print(result)


    # print(str(json_data1["data"]["dataList"][0]["itemNm"]))


async def worker():
    i = 0
    with open(CSV_FILE) as data_file:
        array_tesk = []
        for line in data_file:
            keyword = parse.quote(line.strip())
            array_tesk.append(asyncio.create_task( process_doo(keyword)))
            if (i % 1 == 0):
                print(i)
                await asyncio.wait(array_tesk)
                array_tesk = []
            i = i +1

if __name__ == '__main__':

    HOST = "http://localhost:8090"
    #HOST = "https://totalsearch-api-dev.homeplus.kr"

    CSV_FILE = "./keyword_test.csv"
    client = Elasticsearch("https://id:pw@domain:port/", ca_certs=False,
                           verify_certs=False)
    client.indices.clear_cache()
    now = datetime.now()
    start_time = now.strftime("%Y-%m-%d %H:%M:%S")

    with ThreadPoolExecutor(max_workers=15) as executor:
        futures = [executor.submit(asyncio.run(worker())) for _ in range(1)]

        # Wait for all tasks to complete
        # for future in futures:
        #     result = future.result()
    now = datetime.now()
    end_time = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Run time :: " + start_time + " ~ " + end_time)
    print("Done.")
반응형
Comments