반응형
Recent Posts
Recent Comments
관리 메뉴

개발잡부

[es] Shard 구성 변경 본문

ElasticStack/Elasticsearch

[es] Shard 구성 변경

닉의네임 2024. 8. 13. 11:01
반응형
  • AS-IS: 6서버 1노드 1샤드 (primary 3, replica 1) 각 데이터 노드별로 1개의 샤드 구성 - 운영과 동일한 구조
  • TO-BE: 6서버 1노드 1샤드 (primary 3, replica 3) 각 데이터 노드별로 2개의 샤드 구성

 

 

  • 데이터 구성
    • As-is: prd data copy index (07월10일 PRD데이터)
    • To-be: prd data copy index (07월10일 PRD데이터)
  • QA api 호출
  • 100건/초
  • 데이터 추출 기준: 2024-03-01 09:00:00 ~ 2024-03-01 09:30:00 까지 5분간격 으로 10,000 개씩 7만개 추출 (중복제거를 하지 않은 request 추출)
  • sleep 0.3초
  • 최초 1회 es cache 초기화 후 실행
  • 7만개 키워드 1회 실행 후 Alias 변경후 테스트 X 2

    Asis Run time :: 2024-08-12 19:04:58 ~ 2024-08-12 22:00:48
    Tobe Run time :: 2024-08-12 22:01:49 ~ 2024-08-13 00:41:46
    1차 완료 
    Asis Run time2 :: 2024-08-13 00:41:47 ~ 2024-08-13 03:18:32
    Tobe Run time2 :: 2024-08-13 03:19:32 ~ 2024-08-13 05:51:31
    2차 완료 

 

 

 

 

import asyncio
import aiohttp
import time
from urllib import parse
import urllib3
import matplotlib.pyplot as plt
import numpy as np
from time import sleep
from datetime import datetime, timedelta
from elasticsearch import Elasticsearch
import ssl
import os
#API cache 사용과 응답속도 확인
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

async def fetch(session, url):
    try:
        async with session.get(url, ssl=False, timeout=aiohttp.ClientTimeout(total=600)) as response:
            return await response.text()
    except aiohttp.ClientConnectorError as e:
        print(f"Connection Error: {e}")
    except asyncio.TimeoutError:
        print("Request timed out")

async def fetch_all(urls):
    async with aiohttp.ClientSession() as session:
        tasks = []
        for url in urls:
            tasks.append(fetch(session, url))
        return await asyncio.gather(*tasks, return_exceptions=True)

async def main():
#     url = "https://totalsearch-api-qa.homeplus.kr"  # 호출하려는 API URL
#     url = "http://localhost:8090"  # localhost
    urls = []

    time_a = []
    arr_node1 = []
    arr_node2 = []
    arr_node3 = []
    arr_node4 = []
    arr_node5 = []
    arr_node6 = []
    arr_req1 = []
    arr_req2 = []
    arr_req3 = []
    arr_req4 = []
    arr_req5 = []
    arr_req6 = []

    with open(CSV_FILE) as data_file:
        count_i = 0
        for line in data_file:
            keyword = parse.quote(line.strip())
            urls.append(HOST + "/home/1.0/total/search?sort=RANK&inputKeyword=" + keyword + "&searchKeyword=" + keyword + "&page=1&perPage=1")
            if (len(urls) % CHUNK == 0):
                start_time = time.time()
                results = await fetch_all(urls)
                end_time = time.time()
                time_a.append((time.time() - start_time ) * 1000)

                node1, node2, node3, node4, node5, node6,req1, req2, req3, req4, req5, req6 = query_cache_monitoring()
                arr_node1.append(node1)
                arr_node2.append(node2)
                arr_node3.append(node3)
                arr_node4.append(node4)
                arr_node5.append(node5)
                arr_node6.append(node6)
                arr_req1.append(req1)
                arr_req2.append(req2)
                arr_req3.append(req3)
                arr_req4.append(req4)
                arr_req5.append(req5)
                arr_req6.append(req6)
                dt_object = datetime.fromtimestamp(end_time)
                print("Shot!!! ::: " + str(count_i) + " ::: "+str(dt_object.isoformat()))
                count_i +=CHUNK
                sleep(0.3)
                urls = []

    t = range(0, len(time_a))
    plt.rcParams['font.family'] = 'AppleGothic'

    fs = 1
    y = time_a

    # Plot the raw time series
    axs = plt.figure(figsize=(12,6) , layout='constrained').subplot_mosaic([
        ['time', 'time', 'time'],
        ['node1', 'node2', 'node3'],
        ['node4', 'node5', 'node6'],
    ])


    axs['time'].plot(t, y, lw=lw)
    axs['time'].set_xlabel(str(len(time_a)) + '회')
    axs['time'].set_ylabel('Time(ms)')

    axs['node1'].plot(t, arr_node1, 'g', lw=lw)
    axs['node1'].plot(t, arr_req1, 'r', lw=lw)
    # axs['node1'].psd(arr_node1, NFFT=len(t), pad_to=len(t), Fs=fs)
    axs['node1'].set_ylabel('Cache')

    axs['node2'].plot(t, arr_node2, 'g', lw=lw)
    axs['node2'].plot(t, arr_req2, 'r', lw=lw)
    # axs['node2'].psd(arr_node2, NFFT=len(t), pad_to=len(t), Fs=fs)
    axs['node2'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node3'].plot(t, arr_node3, 'g', lw=lw)
    axs['node3'].plot(t, arr_req3, 'r', lw=lw)
    # axs['node3'].psd(arr_node3, NFFT=len(t) // 2, pad_to=len(t), noverlap=0, Fs=fs)
    axs['node3'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node4'].plot(t, arr_node4, 'g', lw=lw)
    axs['node4'].plot(t, arr_req4, 'r', lw=lw)
    axs['node4'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node5'].plot(t, arr_node5, 'g', lw=lw)
    axs['node5'].plot(t, arr_req5, 'r', lw=lw)
    axs['node5'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node6'].plot(t, arr_node6, 'g', lw=lw)
    axs['node6'].plot(t, arr_req6, 'r', lw=lw)
    axs['node6'].set_ylabel('')

    axs['node3'].set_title('node3')

    for title, ax in axs.items():
        if title == 'time':
            continue

        ax.set_title(title)
        ax.sharex(axs['node1'])
        ax.sharey(axs['node1'])

#     print(f"Time taken: {end_time - start_time} seconds")
#     print(f"Number of responses: {len(results)}")
def query_cache_monitoring():
    data = client.nodes.stats()
    node1 = data["nodes"]["vGT_Ao0pQoa5fXxCiD9vPQ"]["indices"]
    node2 = data["nodes"]["2b7CiYd8RFCtgA5P3LurIQ"]["indices"]
    node3 = data["nodes"]["T_0Pwn-1STOpEQCThXNmKw"]["indices"]
    node4 = data["nodes"]["TFAxWZkSTKSvgUTZbSFjyw"]["indices"]
    node5 = data["nodes"]["nCuC5PIUTEOqOu5kMzgo0w"]["indices"]
    node6 = data["nodes"]["pWrpoOBsSqO5Nar4sZQnCQ"]["indices"]

    return node1["query_cache"]["memory_size_in_bytes"] / div, node2["query_cache"]["memory_size_in_bytes"] / div, \
           node3["query_cache"]["memory_size_in_bytes"] / div, node4["query_cache"]["memory_size_in_bytes"] / div, node5["query_cache"]["memory_size_in_bytes"] / div, node6["query_cache"]["memory_size_in_bytes"] / div, node1["request_cache"]["memory_size_in_bytes"] / div, node2["request_cache"]["memory_size_in_bytes"] / div, \
           node3["request_cache"]["memory_size_in_bytes"] / div, node4["request_cache"]["memory_size_in_bytes"] / div, node5["request_cache"]["memory_size_in_bytes"] / div, node6["request_cache"]["memory_size_in_bytes"] / div

# 별칭 추가
def add_alias(es, index_name, alias_name):
    es.indices.put_alias(index=index_name, name=alias_name)
    print(f"Alias '{alias_name}' added to index '{index_name}'")

# 별칭 제거
def remove_alias(es, index_name, alias_name):
    es.indices.delete_alias(index=index_name, name=alias_name)
    print(f"Alias '{alias_name}' removed from index '{index_name}'")

def update_alias(client, old_index_name, new_index_name, alias_name):
    remove_alias(client, old_index_name, alias_name)
    add_alias(client, new_index_name, alias_name)
    print(f"Alias '{alias_name}' updated from index '{old_index_name}' to '{new_index_name}'")

if __name__ == "__main__":

    sleep(600)
    now = datetime.now()
    date_view = now.strftime("%Y%m%d")
    f_v = open("shard_test_"+str(date_view)+".txt",'w')

    HYPER_OLD_INDEX_NAME = "prd-hyper-item-20240710"
    DS_OLD_INDEX_NAME = "prd-ds-item-20240710"
    EXP_OLD_INDEX_NAME = "local-prd-exp-item"

#     HYPER_ALIAS_NAME = "prd-hyper-item"
#     DS_ALIAS_NAME = "prd-ds-item"
#     EXP_ALIAS_NAME = "prd-exp-item"

    HYPER_ALIAS_NAME = "hyper-item"
    DS_ALIAS_NAME = "ds-item"
    EXP_ALIAS_NAME = "exp-item"

    client = Elasticsearch("https://elastic:elastic1!@totalsearch-es-qa.homeplus.kr:443/", ca_certs=False,
                           verify_certs=False)
    client.indices.clear_cache()
    div = 1000000
    lw = 0.7
    y = []
    plt.rcParams['font.family'] = 'AppleGothic'

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    CHUNK = 100
#     HOST = "http://localhost:8090"  # localhost
    HOST = "https://totalsearch-api-qa.homeplus.kr"

#     directory_path = 'start'
    directory_path = 'event3'

    for i in range(0,2):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Asis Run time :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Asis Run time :: " + start_time_view + " ~ " + end_time_view+ "\n")

    HYPER_NEW_INDEX_NAME = "shard3-hyper-item-20240802"
    DS_NEW_INDEX_NAME = "shard3-ds-item-20240802"
    EXP_NEW_INDEX_NAME = "shard3-exp-item-20240807"

    update_alias(client, HYPER_OLD_INDEX_NAME, HYPER_NEW_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_OLD_INDEX_NAME, DS_NEW_INDEX_NAME, DS_ALIAS_NAME)
#     update_alias(client, EXP_OLD_INDEX_NAME, EXP_NEW_INDEX_NAME, EXP_ALIAS_NAME)

    sleep(60)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,2):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Tobe Run time :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Tobe Run time :: " + start_time_view + " ~ " + end_time_view+ "\n")
    f_v.write("1차 완료 \n")

#원복
    update_alias(client, HYPER_NEW_INDEX_NAME, HYPER_OLD_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_NEW_INDEX_NAME, DS_OLD_INDEX_NAME, DS_ALIAS_NAME)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,2):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Asis Run time2 :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Asis Run time2 :: " + start_time_view + " ~ " + end_time_view+ "\n")

    update_alias(client, HYPER_OLD_INDEX_NAME, HYPER_NEW_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_OLD_INDEX_NAME, DS_NEW_INDEX_NAME, DS_ALIAS_NAME)
#     update_alias(client, EXP_OLD_INDEX_NAME, EXP_NEW_INDEX_NAME, EXP_ALIAS_NAME)

    sleep(60)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,2):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Tobe Run time2 :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Tobe Run time2 :: " + start_time_view + " ~ " + end_time_view+ "\n")

    update_alias(client, HYPER_NEW_INDEX_NAME, HYPER_OLD_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_NEW_INDEX_NAME, DS_OLD_INDEX_NAME, DS_ALIAS_NAME)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,2):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Asis Run time3 :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Asis Run time3 :: " + start_time_view + " ~ " + end_time_view+ "\n")

    update_alias(client, HYPER_OLD_INDEX_NAME, HYPER_NEW_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_OLD_INDEX_NAME, DS_NEW_INDEX_NAME, DS_ALIAS_NAME)
#     update_alias(client, EXP_OLD_INDEX_NAME, EXP_NEW_INDEX_NAME, EXP_ALIAS_NAME)
    sleep(60)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,2):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Tobe Run time3 :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Tobe Run time3 :: " + start_time_view + " ~ " + end_time_view+ "\n")


############################
    f_v.close()

#matplotlib chart show
#     plt.show()




 

 

import asyncio
import aiohttp
import time
from urllib import parse
import urllib3
import matplotlib.pyplot as plt
import numpy as np
from time import sleep
from datetime import datetime, timedelta
from elasticsearch import Elasticsearch
import ssl
import os
#API cache 사용과 응답속도 확인
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

async def fetch(session, url):
    try:
        async with session.get(url, ssl=False, timeout=aiohttp.ClientTimeout(total=600)) as response:
            return await response.text()
    except aiohttp.ClientConnectorError as e:
        print(f"Connection Error: {e}")
    except asyncio.TimeoutError:
        print("Request timed out")

async def fetch_all(urls):
    async with aiohttp.ClientSession() as session:
        tasks = []
        for url in urls:
            tasks.append(fetch(session, url))
        return await asyncio.gather(*tasks, return_exceptions=True)

async def main():
#     url = "https://totalsearch-api-qa.homeplus.kr"  # 호출하려는 API URL
#     url = "http://localhost:8090"  # localhost
    urls = []

    time_a = []
    arr_node1 = []
    arr_node2 = []
    arr_node3 = []
    arr_node4 = []
    arr_node5 = []
    arr_node6 = []
    arr_req1 = []
    arr_req2 = []
    arr_req3 = []
    arr_req4 = []
    arr_req5 = []
    arr_req6 = []

    with open(CSV_FILE) as data_file:
        count_i = 0
        for line in data_file:
            keyword = parse.quote(line.strip())
            urls.append(HOST + "/home/1.0/total/search?sort=RANK&inputKeyword=" + keyword + "&searchKeyword=" + keyword + "&page=1&perPage=1")
            if (len(urls) % CHUNK == 0):
                start_time = time.time()
                results = await fetch_all(urls)
                end_time = time.time()
                time_a.append((time.time() - start_time ) * 1000)

                node1, node2, node3, node4, node5, node6,req1, req2, req3, req4, req5, req6 = query_cache_monitoring()
                arr_node1.append(node1)
                arr_node2.append(node2)
                arr_node3.append(node3)
                arr_node4.append(node4)
                arr_node5.append(node5)
                arr_node6.append(node6)
                arr_req1.append(req1)
                arr_req2.append(req2)
                arr_req3.append(req3)
                arr_req4.append(req4)
                arr_req5.append(req5)
                arr_req6.append(req6)
                dt_object = datetime.fromtimestamp(end_time)
                print("Shot!!! ::: " + str(count_i) + " ::: "+str(dt_object.isoformat()))
                count_i +=CHUNK
                sleep(0.3)
                urls = []

    t = range(0, len(time_a))
    plt.rcParams['font.family'] = 'AppleGothic'

    fs = 1
    y = time_a

    # Plot the raw time series
    axs = plt.figure(figsize=(12,6) , layout='constrained').subplot_mosaic([
        ['time', 'time', 'time'],
        ['node1', 'node2', 'node3'],
        ['node4', 'node5', 'node6'],
    ])


    axs['time'].plot(t, y, lw=lw)
    axs['time'].set_xlabel(str(len(time_a)) + '회')
    axs['time'].set_ylabel('Time(ms)')

    axs['node1'].plot(t, arr_node1, 'g', lw=lw)
    axs['node1'].plot(t, arr_req1, 'r', lw=lw)
    # axs['node1'].psd(arr_node1, NFFT=len(t), pad_to=len(t), Fs=fs)
    axs['node1'].set_ylabel('Cache')

    axs['node2'].plot(t, arr_node2, 'g', lw=lw)
    axs['node2'].plot(t, arr_req2, 'r', lw=lw)
    # axs['node2'].psd(arr_node2, NFFT=len(t), pad_to=len(t), Fs=fs)
    axs['node2'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node3'].plot(t, arr_node3, 'g', lw=lw)
    axs['node3'].plot(t, arr_req3, 'r', lw=lw)
    # axs['node3'].psd(arr_node3, NFFT=len(t) // 2, pad_to=len(t), noverlap=0, Fs=fs)
    axs['node3'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node4'].plot(t, arr_node4, 'g', lw=lw)
    axs['node4'].plot(t, arr_req4, 'r', lw=lw)
    axs['node4'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node5'].plot(t, arr_node5, 'g', lw=lw)
    axs['node5'].plot(t, arr_req5, 'r', lw=lw)
    axs['node5'].set_ylabel('')

    # Plot the PSD with different amounts of overlap between blocks
    axs['node6'].plot(t, arr_node6, 'g', lw=lw)
    axs['node6'].plot(t, arr_req6, 'r', lw=lw)
    axs['node6'].set_ylabel('')

    axs['node3'].set_title('node3')

    for title, ax in axs.items():
        if title == 'time':
            continue

        ax.set_title(title)
        ax.sharex(axs['node1'])
        ax.sharey(axs['node1'])

#     print(f"Time taken: {end_time - start_time} seconds")
#     print(f"Number of responses: {len(results)}")
def query_cache_monitoring():
    data = client.nodes.stats()
    node1 = data["nodes"]["vGT_Ao0pQoa5fXxCiD9vPQ"]["indices"]
    node2 = data["nodes"]["2b7CiYd8RFCtgA5P3LurIQ"]["indices"]
    node3 = data["nodes"]["T_0Pwn-1STOpEQCThXNmKw"]["indices"]
    node4 = data["nodes"]["TFAxWZkSTKSvgUTZbSFjyw"]["indices"]
    node5 = data["nodes"]["nCuC5PIUTEOqOu5kMzgo0w"]["indices"]
    node6 = data["nodes"]["pWrpoOBsSqO5Nar4sZQnCQ"]["indices"]

    return node1["query_cache"]["memory_size_in_bytes"] / div, node2["query_cache"]["memory_size_in_bytes"] / div, \
           node3["query_cache"]["memory_size_in_bytes"] / div, node4["query_cache"]["memory_size_in_bytes"] / div, node5["query_cache"]["memory_size_in_bytes"] / div, node6["query_cache"]["memory_size_in_bytes"] / div, node1["request_cache"]["memory_size_in_bytes"] / div, node2["request_cache"]["memory_size_in_bytes"] / div, \
           node3["request_cache"]["memory_size_in_bytes"] / div, node4["request_cache"]["memory_size_in_bytes"] / div, node5["request_cache"]["memory_size_in_bytes"] / div, node6["request_cache"]["memory_size_in_bytes"] / div

# 별칭 추가
def add_alias(es, index_name, alias_name):
    es.indices.put_alias(index=index_name, name=alias_name)
    print(f"Alias '{alias_name}' added to index '{index_name}'")

# 별칭 제거
def remove_alias(es, index_name, alias_name):
    es.indices.delete_alias(index=index_name, name=alias_name)
    print(f"Alias '{alias_name}' removed from index '{index_name}'")

def update_alias(client, old_index_name, new_index_name, alias_name):
    remove_alias(client, old_index_name, alias_name)
    add_alias(client, new_index_name, alias_name)
    print(f"Alias '{alias_name}' updated from index '{old_index_name}' to '{new_index_name}'")

if __name__ == "__main__":

    sleep(600)

    now = datetime.now()
    date_view = now.strftime("%Y%m%d")
    f_v = open("shard_test_"+str(date_view)+".txt",'w')

    HYPER_OLD_INDEX_NAME = "prd-hyper-item-20240710"
    DS_OLD_INDEX_NAME = "prd-ds-item-20240710"
    EXP_OLD_INDEX_NAME = "local-prd-exp-item"

    HYPER_NEW_INDEX_NAME = "shard3-hyper-item-20240802"
    DS_NEW_INDEX_NAME = "shard3-ds-item-20240802"
    EXP_NEW_INDEX_NAME = "shard3-exp-item-20240807"

#     HYPER_ALIAS_NAME = "prd-hyper-item"
#     DS_ALIAS_NAME = "prd-ds-item"
#     EXP_ALIAS_NAME = "prd-exp-item"

    HYPER_ALIAS_NAME = "hyper-item"
    DS_ALIAS_NAME = "ds-item"
    EXP_ALIAS_NAME = "exp-item"

    client = Elasticsearch("https://elastic:elastic1!@totalsearch-es-qa.homeplus.kr:443/", ca_certs=False,
                           verify_certs=False)
    client.indices.clear_cache()
    div = 1000000
    lw = 0.7
    y = []
    plt.rcParams['font.family'] = 'AppleGothic'

    CHUNK = 100
#     HOST = "http://localhost:8090"  # localhost
    HOST = "https://totalsearch-api-qa.homeplus.kr"
#     directory_path = 'start'
    directory_path = 'event5'

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")


    for i in range(0,1):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
        sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Asis Run time :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Asis Run time :: " + start_time_view + " ~ " + end_time_view+ "\n")

#Alias 변경
    update_alias(client, HYPER_OLD_INDEX_NAME, HYPER_NEW_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_OLD_INDEX_NAME, DS_NEW_INDEX_NAME, DS_ALIAS_NAME)
#     update_alias(client, EXP_OLD_INDEX_NAME, EXP_NEW_INDEX_NAME, EXP_ALIAS_NAME)

    sleep(60)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,1):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Tobe Run time :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Tobe Run time :: " + start_time_view + " ~ " + end_time_view+ "\n")
    print("==================================================================================")
    sleep(600)

#원복 Alias 변경
    update_alias(client, HYPER_NEW_INDEX_NAME, HYPER_OLD_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_NEW_INDEX_NAME, DS_OLD_INDEX_NAME, DS_ALIAS_NAME)
    sleep(60)

    for i in range(0,1):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Asis2 Run time :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Asis2 Run time :: " + start_time_view + " ~ " + end_time_view+ "\n")
    sleep(600)

    update_alias(client, HYPER_OLD_INDEX_NAME, HYPER_NEW_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_OLD_INDEX_NAME, DS_NEW_INDEX_NAME, DS_ALIAS_NAME)
    sleep(60)

    now = datetime.now()
    start_time_view = now.strftime("%Y-%m-%d %H:%M:%S")

    for i in range(0,1):
        client.indices.clear_cache()
        file_names = os.listdir(directory_path)
        for file_name in file_names:
            print(file_name)
            CSV_FILE = directory_path+"/"+file_name
            asyncio.run(main())
#         sleep(600)

    now = datetime.now()
    end_time_view = now.strftime("%Y-%m-%d %H:%M:%S")
    print("Tobe2 Run time :: " + start_time_view + " ~ " + end_time_view)
    f_v.write("Tobe2 Run time :: " + start_time_view + " ~ " + end_time_view+ "\n")

############################
    f_v.close()

#원복 Alias 변경
    update_alias(client, HYPER_NEW_INDEX_NAME, HYPER_OLD_INDEX_NAME, HYPER_ALIAS_NAME)
    update_alias(client, DS_NEW_INDEX_NAME, DS_OLD_INDEX_NAME, DS_ALIAS_NAME)


#matplotlib chart show
#     plt.show()




반응형

'ElasticStack > Elasticsearch' 카테고리의 다른 글

[es] top_hit  (0) 2024.10.15
[es] 샤드 구성 변경 테스트  (0) 2024.08.08
[es] data node cpu 튀는 현상  (0) 2024.07.31
[es] data node cpu 안정화  (1) 2024.07.19
[es] 엘라스틱서치 샤드 최적화  (0) 2024.06.28
Comments