반응형
Recent Posts
Recent Comments
관리 메뉴

개발잡부

파일 읽어서 초당 200개 트래픽 본문

카테고리 없음

파일 읽어서 초당 200개 트래픽

닉의네임 2024. 7. 19. 13:43
반응형
import asyncio
import aiohttp
import time
from urllib import parse
import urllib3
import matplotlib.pyplot as plt
import numpy as np
from time import sleep
from datetime import datetime, timedelta
from elasticsearch import Elasticsearch
import ssl
import os
#API cache 사용과 응답속도 확인
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

async def fetch(session, url):
    try:
        async with session.get(url, ssl=False, timeout=aiohttp.ClientTimeout(total=600)) as response:
            return await response.text()
    except aiohttp.ClientConnectorError as e:
        print(f"Connection Error: {e}")
    except asyncio.TimeoutError:
        print("Request timed out")

async def fetch_all(urls):
    async with aiohttp.ClientSession() as session:
        tasks = []
        for url in urls:
            tasks.append(fetch(session, url))
        return await asyncio.gather(*tasks, return_exceptions=True)

async def main():
#     url = "https://totalsearch-api-qa.homeplus.kr"  # 호출하려는 API URL
#     url = "http://localhost:8090"  # localhost
    urls = []

    time_a = []
    arr_node1 = []
    arr_node2 = []
    arr_node3 = []
    arr_node4 = []
    arr_node5 = []
    arr_node6 = []
    arr_req1 = []
    arr_req2 = []
    arr_req3 = []
    arr_req4 = []
    arr_req5 = []
    arr_req6 = []

    with open(CSV_FILE) as data_file:
        count_i = 0
        for line in data_file:
            keyword = parse.quote(line.strip())
            urls.append(HOST + "/home/1.0/total/search?sort=RANK&inputKeyword=" + keyword + "&searchKeyword=" + keyword + "&page=1&perPage=20")
            if (len(urls) % CHUNK == 0):
                start_time = time.time()
                results = await fetch_all(urls)
                end_time = time.time()
                time_a.append((time.time() - start_time ) * 1000)

                node1, node2, node3, node4, node5, node6,req1, req2, req3, req4, req5, req6 = query_cache_monitoring()
                arr_node1.append(node1)
                arr_node2.append(node2)
                arr_node3.append(node3)
                arr_node4.append(node4)
                arr_node5.append(node5)
                arr_node6.append(node6)
                arr_req1.append(req1)
                arr_req2.append(req2)
                arr_req3.append(req3)
                arr_req4.append(req4)
                arr_req5.append(req5)
                arr_req6.append(req6)
                dt_object = datetime.fromtimestamp(end_time)
                print("Shot!!! ::: " + str(count_i) + " ::: "+str(dt_object.isoformat()))
                count_i +=CHUNK
                urls = []

    t = range(0, len(time_a))
    plt.rcParams['font.family'] = 'AppleGothic'

    fs = 1
    y = time_a

    # Plot the raw time series
    axs = plt.figure(figsize=(12,6) , layout='constrained').subplot_mosaic([
        ['time', 'time', 'time'],
        ['node1', 'node2', 'node3'],
        ['node4', 'node5', 'node6'],
    ])


    axs['time'].plot(t, y, lw=lw)
    axs['time'].set_xlabel(str(len(time_a)) + '회')
    axs['time'].set_ylabel('Time(ms)')

    axs['node1'].plot(t, arr_node1, 'g', lw=lw)
    axs['node1'].plot(t, arr_req1, 'r', lw=lw)
    # axs['node1'].psd(arr_node1, NFFT=len(t), pad_to=len(t), Fs=fs)
    axs['node1'].set_ylabel('Cache')

    axs['node2'].plot(t, arr_node2, 'g', lw=lw)
    axs['node2'].plot(t, arr_req2, 'r', lw=lw)
    # axs['node2'].psd(arr_node2, NFFT=len(t), pad_to=len(t), Fs=fs)
    axs['node2'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node3'].plot(t, arr_node3, 'g', lw=lw)
    axs['node3'].plot(t, arr_req3, 'r', lw=lw)
    # axs['node3'].psd(arr_node3, NFFT=len(t) // 2, pad_to=len(t), noverlap=0, Fs=fs)
    axs['node3'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node4'].plot(t, arr_node4, 'g', lw=lw)
    axs['node4'].plot(t, arr_req4, 'r', lw=lw)
    axs['node4'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node5'].plot(t, arr_node5, 'g', lw=lw)
    axs['node5'].plot(t, arr_req5, 'r', lw=lw)
    axs['node5'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node6'].plot(t, arr_node6, 'g', lw=lw)
    axs['node6'].plot(t, arr_req6, 'r', lw=lw)
    axs['node6'].set_ylabel('')

    axs['node3'].set_title('node3')

    for title, ax in axs.items():
        if title == 'time':
            continue

        ax.set_title(title)
        ax.sharex(axs['node1'])
        ax.sharey(axs['node1'])

#     print(f"Time taken: {end_time - start_time} seconds")
#     print(f"Number of responses: {len(results)}")
def query_cache_monitoring():
    data = client.nodes.stats()
    node1 = data["nodes"]["vGT_Ao0pQoa5fXxCiD9vPQ"]["indices"]
    node2 = data["nodes"]["2b7CiYd8RFCtgA5P3LurIQ"]["indices"]
    node3 = data["nodes"]["T_0Pwn-1STOpEQCThXNmKw"]["indices"]
    node4 = data["nodes"]["TFAxWZkSTKSvgUTZbSFjyw"]["indices"]
    node5 = data["nodes"]["nCuC5PIUTEOqOu5kMzgo0w"]["indices"]
    node6 = data["nodes"]["pWrpoOBsSqO5Nar4sZQnCQ"]["indices"]

    return node1["query_cache"]["memory_size_in_bytes"] / div, node2["query_cache"]["memory_size_in_bytes"] / div, \
           node3["query_cache"]["memory_size_in_bytes"] / div, node4["query_cache"]["memory_size_in_bytes"] / div, node5["query_cache"]["memory_size_in_bytes"] / div, node6["query_cache"]["memory_size_in_bytes"] / div, node1["request_cache"]["memory_size_in_bytes"] / div, node2["request_cache"]["memory_size_in_bytes"] / div, \
           node3["request_cache"]["memory_size_in_bytes"] / div, node4["request_cache"]["memory_size_in_bytes"] / div, node5["request_cache"]["memory_size_in_bytes"] / div, node6["request_cache"]["memory_size_in_bytes"] / div

if __name__ == "__main__":

    client = Elasticsearch("https://elastic:elastic1!@totalsearch-es-qa.homeplus.kr:443/", ca_certs=False,
                           verify_certs=False)
    client.indices.clear_cache()
    div = 1000000
    lw = 0.7
    y = []
    plt.rcParams['font.family'] = 'AppleGothic'

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    CHUNK = 300
    HOST = "http://localhost:8090"  # localhost
    directory_path = 'event'
    file_names = os.listdir(directory_path)

    for file_name in file_names:
        print(file_name)
        CSV_FILE = directory_path+"/"+file_name
        asyncio.run(main())

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Run time :: " + start_time_view + " ~ " + end_time_view)

    plt.show()




반응형
Comments