From 41d92378c2c1b83b2acd56f9e6d6ca5ef8e62f6b Mon Sep 17 00:00:00 2001 From: yono Date: Thu, 22 May 2025 09:51:03 +0900 Subject: [PATCH 01/30] =?UTF-8?q?feat:=E3=83=9E=E3=82=B9=E3=82=BF=E3=83=A1?= =?UTF-8?q?=E3=83=B3=E3=83=86=E3=81=AE=E5=BB=83=E6=AD=A2=E3=80=81=E4=B8=8D?= =?UTF-8?q?=E8=A6=81=E3=81=AB=E3=81=AA=E3=82=8B=E7=AE=87=E6=89=80=E3=81=AE?= =?UTF-8?q?=E3=82=B3=E3=83=A1=E3=83=B3=E3=83=88=E3=82=A2=E3=82=A6=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/controller/master_mainte.py | 633 ++++---- ecs/jskult-webapp/src/main.py | 2 +- .../src/model/db/master_mente_count.py | 13 +- .../src/model/internal/master_mainte_csv.py | 1415 +++++++++-------- .../master_mainte_emp_chg_inst_function.py | 289 ++-- .../src/model/request/master_mainte_csvdl.py | 331 ++-- .../src/model/request/master_mainte_csvup.py | 51 +- .../view/inst_emp_csv_download_view_model.py | 55 +- .../view/inst_emp_csv_upload_view_model.py | 73 +- .../view/master_mainte_menu_view_model.py | 7 +- .../src/model/view/menu_view_model.py | 9 +- .../model/view/table_override_view_model.py | 9 +- .../src/model/view/user_view_model.py | 5 +- .../repositories/bu_master_cd_repository.py | 49 +- .../repositories/emp_chg_inst_repository.py | 503 +++--- .../src/repositories/emp_master_repository.py | 53 +- .../generic_kbn_mst_repository.py | 57 +- .../src/repositories/mst_inst_repository.py | 49 +- .../src/services/batch_status_service.py | 11 +- .../src/services/master_mainte_service.py | 447 +++--- .../src/static/css/masterMainte.css | 1 + .../static/function/businessLogicScript.js | 47 +- ecs/jskult-webapp/src/system_var/constants.py | 217 +-- .../src/system_var/environment.py | 2 +- .../src/templates/instEmpCsvDL.html | 1 + .../src/templates/instEmpCsvUL.html | 3 +- .../src/templates/masterMainteMenu.html | 1 + ecs/jskult-webapp/src/templates/menu.html | 5 +- .../src/templates/tableOverride.html | 3 +- 29 files changed, 2185 insertions(+), 2156 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/master_mainte.py b/ecs/jskult-webapp/src/controller/master_mainte.py index 87b16978..89313271 100644 --- a/ecs/jskult-webapp/src/controller/master_mainte.py +++ b/ecs/jskult-webapp/src/controller/master_mainte.py @@ -1,371 +1,372 @@ -from io import BytesIO, TextIOWrapper -from typing import Optional +# # ファイル削除予定 +# from io import BytesIO, TextIOWrapper +# from typing import Optional -from fastapi import APIRouter, Depends, HTTPException, Request -from fastapi.responses import HTMLResponse -from starlette import status +# from fastapi import APIRouter, Depends, HTTPException, Request +# from fastapi.responses import HTMLResponse +# from starlette import status -from src.depends.services import get_service -from src.logging.get_logger import get_logger -from src.model.internal.session import UserSession -from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel -from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel -from src.model.view.inst_emp_csv_download_view_model import \ - InstEmpCsvDownloadViewModel -from src.model.view.inst_emp_csv_upload_view_model import \ - InstEmpCsvUploadViewModel -from src.model.view.master_mainte_menu_view_model import \ - MasterMainteMenuViewModel -from src.model.view.table_override_view_model import TableOverrideViewModel -from src.router.session_router import AuthenticatedRoute -from src.services.batch_status_service import BatchStatusService -from src.services.login_service import LoginService -from src.services.master_mainte_service import MasterMainteService -from src.system_var import constants -from src.templates import templates +# from src.depends.services import get_service +# from src.logging.get_logger import get_logger +# from src.model.internal.session import UserSession +# from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel +# from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel +# from src.model.view.inst_emp_csv_download_view_model import \ +# InstEmpCsvDownloadViewModel +# from src.model.view.inst_emp_csv_upload_view_model import \ +# InstEmpCsvUploadViewModel +# from src.model.view.master_mainte_menu_view_model import \ +# MasterMainteMenuViewModel +# from src.model.view.table_override_view_model import TableOverrideViewModel +# from src.router.session_router import AuthenticatedRoute +# from src.services.batch_status_service import BatchStatusService +# from src.services.login_service import LoginService +# from src.services.master_mainte_service import MasterMainteService +# from src.system_var import constants +# from src.templates import templates -logger = get_logger('マスターメンテ') +# logger = get_logger('マスターメンテ') -router = APIRouter() -router.route_class = AuthenticatedRoute +# router = APIRouter() +# router.route_class = AuthenticatedRoute -######################### -# Views # -######################### +# ######################### +# # Views # +# ######################### -@router.get('/masterMainteMenu', response_class=HTMLResponse) -def menu_view( - request: Request, - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.get('/masterMainteMenu', response_class=HTMLResponse) +# def menu_view( +# request: Request, +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # 画面表示用のモデル - menu = MasterMainteMenuViewModel() +# # 画面表示用のモデル +# menu = MasterMainteMenuViewModel() - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'masterMainteMenu.html', - { - 'request': request, - 'menu': menu - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'masterMainteMenu.html', +# { +# 'request': request, +# 'menu': menu +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.get('/instEmpCsvUL', response_class=HTMLResponse) -def inst_emp_csv_upload_view( - request: Request, - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.get('/instEmpCsvUL', response_class=HTMLResponse) +# def inst_emp_csv_upload_view( +# request: Request, +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # 画面表示用のモデル - mainte_csv_up = InstEmpCsvUploadViewModel() +# # 画面表示用のモデル +# mainte_csv_up = InstEmpCsvUploadViewModel() - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'instEmpCsvUL.html', - { - 'request': request, - 'mainte_csv_up': mainte_csv_up - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'instEmpCsvUL.html', +# { +# 'request': request, +# 'mainte_csv_up': mainte_csv_up +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.post('/instEmpCsvUL', response_class=HTMLResponse) -async def inst_emp_csv_upload( - request: Request, - csv_upload_form: MasterMainteCsvUpModel = Depends(MasterMainteCsvUpModel.as_form), - master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.post('/instEmpCsvUL', response_class=HTMLResponse) +# async def inst_emp_csv_upload( +# request: Request, +# csv_upload_form: MasterMainteCsvUpModel = Depends(MasterMainteCsvUpModel.as_form), +# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # 画面表示用のモデル - error_message_list = [] - csv_filename: str = csv_upload_form.csv_file.filename - if csv_upload_form.csv_file.size == 0: - error_message_list.append('選択されたファイルが見つかりませんでした。') - elif not csv_filename.endswith('.csv'): - error_message_list.append('選択されたファイル形式が"csv"ではありません。') - elif csv_upload_form.csv_file.size >= constants.MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE: - error_message_list.append('選択されたCSVファイルサイズが大きいです。20MB未満にしてください。') - else: - mainte_csv_up = master_mainte_service.prepare_mainte_csv_up_view( - TextIOWrapper(BytesIO(await csv_upload_form.csv_file.read()), encoding='utf-8'), - csv_upload_form.csv_file.filename, - csv_upload_form) +# # 画面表示用のモデル +# error_message_list = [] +# csv_filename: str = csv_upload_form.csv_file.filename +# if csv_upload_form.csv_file.size == 0: +# error_message_list.append('選択されたファイルが見つかりませんでした。') +# elif not csv_filename.endswith('.csv'): +# error_message_list.append('選択されたファイル形式が"csv"ではありません。') +# elif csv_upload_form.csv_file.size >= constants.MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE: +# error_message_list.append('選択されたCSVファイルサイズが大きいです。20MB未満にしてください。') +# else: +# mainte_csv_up = master_mainte_service.prepare_mainte_csv_up_view( +# TextIOWrapper(BytesIO(await csv_upload_form.csv_file.read()), encoding='utf-8'), +# csv_upload_form.csv_file.filename, +# csv_upload_form) - if len(error_message_list) > 0: - mainte_csv_up = InstEmpCsvUploadViewModel( - is_verified=True, - error_message_list=error_message_list, - select_function=csv_upload_form.select_function, - select_table=csv_upload_form.select_table) +# if len(error_message_list) > 0: +# mainte_csv_up = InstEmpCsvUploadViewModel( +# is_verified=True, +# error_message_list=error_message_list, +# select_function=csv_upload_form.select_function, +# select_table=csv_upload_form.select_table) - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'instEmpCsvUL.html', - { - 'request': request, - 'mainte_csv_up': mainte_csv_up - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'instEmpCsvUL.html', +# { +# 'request': request, +# 'mainte_csv_up': mainte_csv_up +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.post('/newInst', response_class=HTMLResponse) -def new_inst_result_view( - request: Request, - csv_upload_form: Optional[MasterMainteCsvUpModel] = Depends(MasterMainteCsvUpModel.as_form), - master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)), - login_service: LoginService = Depends(get_service(LoginService)) -): - session: UserSession = request.session +# @router.post('/newInst', response_class=HTMLResponse) +# def new_inst_result_view( +# request: Request, +# csv_upload_form: Optional[MasterMainteCsvUpModel] = Depends(MasterMainteCsvUpModel.as_form), +# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)), +# login_service: LoginService = Depends(get_service(LoginService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # ユーザIDからユーザ名を取得 - user_name = login_service.logged_in_user(session.user_id).user_name - # CSVデータを登録し、登録完了画面のモデルを返却する - mainte_csv_up = master_mainte_service.prepare_mainte_new_inst_view(user_name, csv_upload_form) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # ユーザIDからユーザ名を取得 +# user_name = login_service.logged_in_user(session.user_id).user_name +# # CSVデータを登録し、登録完了画面のモデルを返却する +# mainte_csv_up = master_mainte_service.prepare_mainte_new_inst_view(user_name, csv_upload_form) - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'instEmpCsvUL.html', - { - 'request': request, - 'mainte_csv_up': mainte_csv_up - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'instEmpCsvUL.html', +# { +# 'request': request, +# 'mainte_csv_up': mainte_csv_up +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.get('/instEmpCsvDL', response_class=HTMLResponse) -def inst_emp_csv_download_view( - request: Request, - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.get('/instEmpCsvDL', response_class=HTMLResponse) +# def inst_emp_csv_download_view( +# request: Request, +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # 画面表示用のモデル - mainte_csv_dl = InstEmpCsvDownloadViewModel( - is_search=False - ) +# # 画面表示用のモデル +# mainte_csv_dl = InstEmpCsvDownloadViewModel( +# is_search=False +# ) - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'instEmpCsvDL.html', - { - 'request': request, - 'mainte_csv_dl': mainte_csv_dl - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'instEmpCsvDL.html', +# { +# 'request': request, +# 'mainte_csv_dl': mainte_csv_dl +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.post('/download', response_class=HTMLResponse) -def inst_emp_csv_download( - request: Request, - csv_download_form: Optional[MasterMainteCsvDlModel] = Depends(MasterMainteCsvDlModel.as_form), - master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.post('/download', response_class=HTMLResponse) +# def inst_emp_csv_download( +# request: Request, +# csv_download_form: Optional[MasterMainteCsvDlModel] = Depends(MasterMainteCsvDlModel.as_form), +# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - search_result_df = master_mainte_service.search_emp_chg_inst_data(csv_download_form) +# search_result_df = master_mainte_service.search_emp_chg_inst_data(csv_download_form) - (result_msg, download_file_url) = master_mainte_service.upload_emp_chg_inst_data_file( - search_result_df, - session.user_id, - csv_download_form.select_table) +# (result_msg, download_file_url) = master_mainte_service.upload_emp_chg_inst_data_file( +# search_result_df, +# session.user_id, +# csv_download_form.select_table) - # 画面表示用のモデル - mainte_csv_dl = InstEmpCsvDownloadViewModel( - is_search=True, - ta_cd=csv_download_form.ta_cd, - inst_cd=csv_download_form.inst_cd, - emp_cd=csv_download_form.emp_cd, - emp_chg_type_cd=csv_download_form.emp_chg_type_cd, - apply_date_from=csv_download_form.apply_date_from, - start_date_from=csv_download_form.start_date_from, - start_date_to=csv_download_form.start_date_to, - end_date_from=csv_download_form.end_date_from, - end_date_to=csv_download_form.end_date_to, - create_date_from=csv_download_form.create_date_from, - create_date_to=csv_download_form.create_date_to, - update_date_from=csv_download_form.update_date_from, - update_date_to=csv_download_form.update_date_to, - select_table=csv_download_form.select_table, - data_count=search_result_df.shape[0], - download_file_url=download_file_url, - file_name=constants.MENTE_CSV_DOWNLOAD_FILE_NAME, - result_msg=result_msg - ) +# # 画面表示用のモデル +# mainte_csv_dl = InstEmpCsvDownloadViewModel( +# is_search=True, +# ta_cd=csv_download_form.ta_cd, +# inst_cd=csv_download_form.inst_cd, +# emp_cd=csv_download_form.emp_cd, +# emp_chg_type_cd=csv_download_form.emp_chg_type_cd, +# apply_date_from=csv_download_form.apply_date_from, +# start_date_from=csv_download_form.start_date_from, +# start_date_to=csv_download_form.start_date_to, +# end_date_from=csv_download_form.end_date_from, +# end_date_to=csv_download_form.end_date_to, +# create_date_from=csv_download_form.create_date_from, +# create_date_to=csv_download_form.create_date_to, +# update_date_from=csv_download_form.update_date_from, +# update_date_to=csv_download_form.update_date_to, +# select_table=csv_download_form.select_table, +# data_count=search_result_df.shape[0], +# download_file_url=download_file_url, +# file_name=constants.MENTE_CSV_DOWNLOAD_FILE_NAME, +# result_msg=result_msg +# ) - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'instEmpCsvDL.html', - { - 'request': request, - 'mainte_csv_dl': mainte_csv_dl - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'instEmpCsvDL.html', +# { +# 'request': request, +# 'mainte_csv_dl': mainte_csv_dl +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.get('/tableOverride', response_class=HTMLResponse) -def table_override_view( - request: Request, - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.get('/tableOverride', response_class=HTMLResponse) +# def table_override_view( +# request: Request, +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # 画面表示用のモデル - table_override = TableOverrideViewModel() +# # 画面表示用のモデル +# table_override = TableOverrideViewModel() - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'tableOverride.html', - { - 'request': request, - 'table_override': table_override - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'tableOverride.html', +# { +# 'request': request, +# 'table_override': table_override +# }, +# headers={'session_key': session_key} +# ) +# return templates_response -@router.post('/tableOverride', response_class=HTMLResponse) -def table_override_result_view( - request: Request, - master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), - batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) -): - session: UserSession = request.session +# @router.post('/tableOverride', response_class=HTMLResponse) +# def table_override_result_view( +# request: Request, +# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)), +# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +# ): +# session: UserSession = request.session - # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる - if session.master_mainte_flg == constants.PERMISSION_DISABLED: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) +# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる +# if session.master_mainte_flg == constants.PERMISSION_DISABLED: +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - # バッチ処理中の場合、ログアウトさせる - if batch_status_service.is_batch_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) - # dump処理中の場合、ログアウトさせる - if batch_status_service.is_dump_processing(): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) +# # バッチ処理中の場合、ログアウトさせる +# if batch_status_service.is_batch_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, +# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) +# # dump処理中の場合、ログアウトさせる +# if batch_status_service.is_dump_processing(): +# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) - # 画面表示用のモデル - table_override = master_mainte_service.copy_data_real_to_dummy() +# # 画面表示用のモデル +# table_override = master_mainte_service.copy_data_real_to_dummy() - # レスポンス - session_key = session.session_key - templates_response = templates.TemplateResponse( - 'tableOverride.html', - { - 'request': request, - 'table_override': table_override - }, - headers={'session_key': session_key} - ) - return templates_response +# # レスポンス +# session_key = session.session_key +# templates_response = templates.TemplateResponse( +# 'tableOverride.html', +# { +# 'request': request, +# 'table_override': table_override +# }, +# headers={'session_key': session_key} +# ) +# return templates_response diff --git a/ecs/jskult-webapp/src/main.py b/ecs/jskult-webapp/src/main.py index f0e1807d..129a7593 100644 --- a/ecs/jskult-webapp/src/main.py +++ b/ecs/jskult-webapp/src/main.py @@ -32,7 +32,7 @@ app.include_router(ultmarc.router, prefix='/ultmarc') # クライアントから非同期呼出しされるため、共通ルーターとは異なる扱いとする。 app.include_router(bio_api.router, prefix='/bio') # マスタメンテ -app.include_router(master_mainte.router, prefix='/masterMainte') +# 削除予定app.include_router(master_mainte.router, prefix='/masterMainte') # ヘルスチェック用のルーター app.include_router(healthcheck.router, prefix='/healthcheck') diff --git a/ecs/jskult-webapp/src/model/db/master_mente_count.py b/ecs/jskult-webapp/src/model/db/master_mente_count.py index a3837819..a426f43b 100644 --- a/ecs/jskult-webapp/src/model/db/master_mente_count.py +++ b/ecs/jskult-webapp/src/model/db/master_mente_count.py @@ -1,9 +1,10 @@ -from typing import Optional +# ファイル削除予定 +# from typing import Optional -from src.model.db.base_db_model import BaseDBModel -from src.util.sanitize import sanitize +# from src.model.db.base_db_model import BaseDBModel +# from src.util.sanitize import sanitize -@sanitize -class MasterMenteCountModel(BaseDBModel): - count: Optional[int] +# @sanitize +# class MasterMenteCountModel(BaseDBModel): +# count: Optional[int] diff --git a/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py b/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py index a6146731..93cc3bca 100644 --- a/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py +++ b/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py @@ -1,707 +1,708 @@ -import csv -import json -from abc import ABCMeta, abstractmethod -from datetime import datetime -from io import TextIOWrapper - -from src.logging.get_logger import get_logger -from src.repositories.bu_master_cd_repository import BuMasterRepository -from src.repositories.emp_chg_inst_repository import EmpChgInstRepository -from src.repositories.emp_master_repository import EmpMasterRepository -from src.repositories.generic_kbn_mst_repository import GenericKbnMstRepository -from src.repositories.mst_inst_repository import MstInstRepository -from src.system_var import constants -from src.util.string_util import is_not_empty - -logger = get_logger('マスターメンテ') - - -class MasterMainteCSVItem(metaclass=ABCMeta): - - csv_row: list[str] - table_name: str - line_num: str - mst_inst_repository: MstInstRepository - emp_master_repository: EmpMasterRepository - bu_master_repository: BuMasterRepository - emp_chginst_repository: EmpChgInstRepository - generic_kbn_mst_repository: GenericKbnMstRepository - - def __init__( - self, - csv_row: list[str], - table_name: str, - line_num: str, - mst_inst_repository: MstInstRepository, - emp_master_repository: EmpMasterRepository, - bu_master_repository: BuMasterRepository, - emp_chginst_repository: EmpChgInstRepository, - generic_kbn_mst_repository: GenericKbnMstRepository - ): - self.csv_row = csv_row - self.table_name = table_name - self.line_num = line_num - self.mst_inst_repository = mst_inst_repository - self.emp_master_repository = emp_master_repository - self.bu_master_repository = bu_master_repository - self.emp_chginst_repository = emp_chginst_repository - self.generic_kbn_mst_repository = generic_kbn_mst_repository - - def validate(self) -> list[str]: - """ - 項目のバリデーションを行うテンプレートメソッド\n - 各チェックロジックはサブクラスで実装する - エラーが有る場合、[行数、項目名: エラー内容]のリストを返す - """ - error_list = [] - # 項目数チェック - error_list.extend(self.check_item_count()) - if len(error_list) == 0: - # 必須チェック 及び コメントエラーチェック - error_list.extend(self.check_require()) - # 施設コード存在チェック - error_list.extend(self.check_inst_cd_exists()) - # 領域コード存在チェック - error_list.extend(self.check_ta_cd_exists()) - # 担当者種別コード存在チェック - error_list.extend(self.check_emp_chg_type_cd_exists()) - # MUID存在チェック - error_list.extend(self.check_emp_cd_exists()) - # BuCd存在チェック - error_list.extend(self.check_bu_cd_exists()) - # 適用開始日 < 適用終了日、実在日チェック - error_list.extend(self.check_existing_date()) - # データ存在チェック - error_list.extend(self.check_data_exists()) - - # エラーのないリストを省いて返す - error_list = [error for error in error_list if len(error) != 0] - return error_list - - def check_csv_item_count(self, item_count: int) -> list[str]: - error_list = [] - - if not len(self.csv_row) == item_count: - error_list.append(f'{self.line_num}行目の項目数が一致しません。項目数を確認してください。') - - return error_list - - def emp_chg_inst_count(self, start_date: str): - return self.emp_chginst_repository.fetch_count(self.inst_cd, self.ta_cd, self.emp_chg_type_cd, start_date, self.table_name) - - def is_exist_emp_cd(self, start_date: str) -> bool: - if start_date is None or len(start_date) == 0: - return False - if self.emp_master_repository.fetch_count(self.emp_cd, start_date) == 0: - return True - return False - - def is_exist_inst_cd(self) -> bool: - return True if self.mst_inst_repository.fetch_count(self.inst_cd) > 0 else False - - def is_exist_emp_chg_type_cd(self, start_date: str) -> bool: - if start_date is None or len(start_date) == 0: - return False - if self.generic_kbn_mst_repository.fetch_count('emp_chg_type_cd', self.emp_chg_type_cd, start_date) > 0: - return True - return False - - def is_exist_ta_cd(self, start_date: str) -> bool: - if start_date is None or len(start_date) == 0: - return False - if self.generic_kbn_mst_repository.fetch_count('ta_cd', self.ta_cd, start_date) > 0: - return True - return False - - def is_exist_bu_cd(self) -> bool: - return True if self.bu_master_repository.fetch_count(self.bu_cd) > 0 else False - - def make_require_error_message(self, line_num: str, col_name: str) -> str: - return f'{line_num}行目の{col_name}が入力されておりません。' - - def make_data_exist_error_message(self, line_num: str, primary_key_col_names: list[str]) -> str: - return self.__make_check_data_exists_error_message(line_num, primary_key_col_names, 'がすべて同一のデータが既に登録されています。') - - def make_data_not_exist_error_message(self, line_num: str, primary_key_col_names: list[str]) -> str: - return self.__make_check_data_exists_error_message(line_num, primary_key_col_names, 'がすべて同一のデータが存在しないため更新できません。') - - def __make_check_data_exists_error_message(self, line_num: str, primary_key_col_names: list[str], suffix_message: str) -> str: - primary_key_logical_names = '、'.join(primary_key_col_names) - return f'{line_num}行目の{primary_key_logical_names}{suffix_message}' - - def __parse_str_to_date(self, check_date: str) -> tuple[bool, datetime]: - try: - check_date_time: datetime = datetime.strptime(check_date, '%Y%m%d') - except Exception as e: - logger.debug(f'適用期間の日付が不正{e}') - return (False, None) - - try: - reverse_check_date: str = check_date_time.strftime('%Y%m%d') - except Exception as e: - logger.debug(f'適用期間の日付が不正{e}') - return (False, None) - - if check_date != reverse_check_date: - return (False, None) - - return (True, check_date_time) - - def check_term_date(self, - start_date: str, - end_date: str, - start_date_col_name: str, - end_date_col_name: str) -> tuple[list[str], datetime, datetime]: - error_list = [] - - start_date_time: datetime = None - end_date_time: datetime = None - if is_not_empty(start_date): - (result, start_date_time) = self.__parse_str_to_date(start_date) - if result is False: - error_list.append(f'{self.line_num}行目の{start_date_col_name}が実在しない日付になっています。') - if is_not_empty(end_date): - (result, end_date_time) = self.__parse_str_to_date(end_date) - if result is False: - error_list.append(f'{self.line_num}行目の{end_date_col_name}が実在しない日付になっています。') - - return (error_list, start_date_time, end_date_time) - - def get_csv_value(self, column_no: int): - try: - column_value = self.csv_row[column_no] - except IndexError: - column_value = '' - - return column_value - - @abstractmethod - def csv_row_data(self) -> dict: - pass - ... - - @abstractmethod - def check_require(self) -> list[str]: - """必須チェック""" - pass - ... - - @abstractmethod - def check_inst_cd_exists(self) -> list[str]: - """InstCD存在チェック""" - pass - ... - - @abstractmethod - def check_emp_chg_type_cd_exists(self) -> list[str]: - """担当者種別コード存在チェック""" - pass - ... - - @abstractmethod - def check_ta_cd_exists(self) -> list[str]: - """領域コード存在チェック""" - pass - ... - - @abstractmethod - def check_emp_cd_exists(self) -> list[str]: - """MUID存在チェック""" - pass - ... - - @abstractmethod - def check_bu_cd_exists(self) -> list[str]: - """BuCd存在チェック""" - pass - ... - - @abstractmethod - def check_existing_date(self) -> list[str]: - """適用開始日 < 適用終了日、実在日チェック""" - - @abstractmethod - def check_item_count(self) -> list[str]: - """項目数チェック""" - pass - ... - - @abstractmethod - def check_data_exists(self) -> list[str]: - """データ存在チェック""" - pass - ... - - -class MasterMainteNewInstEmpCSVItem(MasterMainteCSVItem): - """新規施設担当者登録CSV""" - inst_name: str - emp_name_family: str - emp_name_first: str - start_date: str - end_date: str - - def __init__( - self, - csv_row: list[str], - table_name: str, - line_num: str, - mst_inst_repository: MstInstRepository, - emp_master_repository: EmpMasterRepository, - bu_master_repository: BuMasterRepository, - emp_chginst_repository: EmpChgInstRepository, - generic_kbn_mst_repository: GenericKbnMstRepository - ): - super().__init__( - csv_row, - table_name, - line_num, - mst_inst_repository, - emp_master_repository, - bu_master_repository, - emp_chginst_repository, - generic_kbn_mst_repository - ) - self.inst_cd = super().get_csv_value(constants.CSV_NEW_INST_CD_COL_NO) - self.inst_name = super().get_csv_value(constants.CSV_NEW_INST_NAME_COL_NO) - self.ta_cd = super().get_csv_value(constants.CSV_NEW_TA_CD_COL_NO) - self.emp_chg_type_cd = super().get_csv_value(constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO) - self.emp_cd = super().get_csv_value(constants.CSV_NEW_EMP_CD_COL_NO) - self.emp_name_family = super().get_csv_value(constants.CSV_NEW_EMP_NAME_FAMILY_COL_NO) - self.emp_name_first = super().get_csv_value(constants.CSV_NEW_EMP_NAME_FIRST_COL_NO) - self.bu_cd = super().get_csv_value(constants.CSV_NEW_BU_CD_COL_NO) - self.start_date = super().get_csv_value(constants.CSV_NEW_START_DATE) - self.end_date = super().get_csv_value(constants.CSV_NEW_END_DATE) - - def csv_row_data(self) -> dict: - return {constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[i]: self.csv_row[i] for i in range(len(self.csv_row))} - - def check_require(self) -> list[str]: - error_list = [] - if len(self.inst_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_INST_CD_COL_NO])) - if len(self.ta_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_TA_CD_COL_NO])) - if len(self.emp_chg_type_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO])) - if len(self.emp_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CD_COL_NO])) - if len(self.bu_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_BU_CD_COL_NO])) - if len(self.start_date) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE])) - if len(self.end_date) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_END_DATE])) - - return error_list - - def check_inst_cd_exists(self) -> list[str]: - error_list = [] - - if is_not_empty(self.inst_cd) and super().is_exist_inst_cd() is False: - error_list.append( - f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_INST_CD_COL_NO]}\ -は施設マスタに存在しないコードです。') - return error_list - - def check_emp_cd_exists(self) -> list[str]: - error_list = [] - if not self.start_date or not self.emp_cd: - return error_list - - if super().is_exist_emp_cd(self.start_date) is True: - error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CD_COL_NO]}\ -は従業員マスタに存在しない もしくは 適用期間外のIDです。') - return error_list - - def check_emp_chg_type_cd_exists(self) -> list[str]: - error_list = [] - if not self.start_date or not self.emp_chg_type_cd: - return error_list - - if is_not_empty(self.emp_chg_type_cd) and super().is_exist_emp_chg_type_cd(self.start_date) is False: - error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO]}\ -は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') - return error_list - - def check_ta_cd_exists(self) -> list[str]: - error_list = [] - if not self.start_date or not self.ta_cd: - return error_list - - if is_not_empty(self.ta_cd) and super().is_exist_ta_cd(self.start_date) is False: - error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_TA_CD_COL_NO]}\ -は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') - return error_list - - def check_bu_cd_exists(self) -> list[str]: - error_list = [] - - if is_not_empty(self.bu_cd) and super().is_exist_bu_cd() is False: - error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_BU_CD_COL_NO]}\ -はビジネスユニットマスタに存在しないコードです。') - return error_list - - def check_existing_date(self) -> list[str]: - error_list = [] - if not self.start_date or not self.end_date: - return error_list - - (error_list, start_date_time, end_date_time) = super().check_term_date( - self.start_date, - self.end_date, - constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE], - constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_END_DATE]) - if len(error_list) > 0: - return error_list - - if start_date_time > end_date_time: - error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE]}\ -が{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_END_DATE]}よりも後の日付になっています。') - return error_list - - def check_item_count(self) -> list[str]: - return super().check_csv_item_count(len(constants.NEW_INST_EMP_CSV_LOGICAL_NAMES)) - - def check_data_exists(self) -> list[str]: - error_list = [] - if super().emp_chg_inst_count(self.start_date) > 0: - error_list.append(super().make_data_exist_error_message( - self.line_num, - primary_key_col_names=[ - constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_INST_CD_COL_NO], - constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_TA_CD_COL_NO], - constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO], - constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE] - ] - )) - - return error_list - - -class MasterMainteChangeInstEmpCSVItem(MasterMainteCSVItem): - """施設担当者変更登録CSV""" - bu_name: str - org_cd: str - org_short_name: str - inst_name: str - explain: str - emp_full_name: str - inst_emp_start_date: str - inst_emp_end_date: str - change_end_date: str - comment: str - - def __init__( - self, - csv_row: list[str], - table_name: str, - line_num: str, - mst_inst_repository: MstInstRepository, - emp_master_repository: EmpMasterRepository, - bu_master_repository: BuMasterRepository, - emp_chginst_repository: EmpChgInstRepository, - generic_kbn_mst_repository: GenericKbnMstRepository - ): - super().__init__( - csv_row, - table_name, - line_num, - mst_inst_repository, - emp_master_repository, - bu_master_repository, - emp_chginst_repository, - generic_kbn_mst_repository - ) - self.bu_cd = super().get_csv_value(constants.CSV_CHANGE_BU_CD_COL_NO) - self.bu_name = super().get_csv_value(constants.CSV_CHANGE_BU_NAME_COL_NO) - self.org_cd = super().get_csv_value(constants.CSV_CHANGE_ORG_CD_COL_NO) - self.org_short_name = super().get_csv_value(constants.CSV_CHANGE_ORG_SHORT_NAME_COL_NO) - self.inst_cd = super().get_csv_value(constants.CSV_CHANGE_INST_CD_COL_NO) - self.inst_name = super().get_csv_value(constants.CSV_CHANGE_INST_NAME_COL_NO) - self.ta_cd = super().get_csv_value(constants.CSV_CHANGE_TA_CD_COL_NO) - self.explain = super().get_csv_value(constants.CSV_CHANGE_EXPLAIN_COL_NO) - self.emp_chg_type_cd = super().get_csv_value(constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO) - self.emp_cd = super().get_csv_value(constants.CSV_CHANGE_EMP_CD_COL_NO) - self.emp_full_name = super().get_csv_value(constants.CSV_CHANGE_EMP_FULL_NAME_COL_NO) - self.inst_emp_start_date = super().get_csv_value(constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO) - self.inst_emp_end_date = super().get_csv_value(constants.CSV_CHANGE_INST_EMP_END_DATE_COL_NO) - self.change_end_date = super().get_csv_value(constants.CSV_CHANGE_CHANGE_END_DATE_COL_NO) - self.comment = super().get_csv_value(constants.CSV_CHANGE_COMMENT) - - def csv_row_data(self) -> dict: - return {constants.CHANGE_INST_CSV_LOGICAL_NAMES[i]: self.csv_row[i] for i in range(len(self.csv_row))} - - def check_require(self) -> list[str]: - error_list = [] - if self.comment == '追加': - if len(self.bu_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_BU_CD_COL_NO])) - if len(self.inst_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO])) - if len(self.ta_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO])) - if len(self.emp_chg_type_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO])) - if len(self.emp_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CD_COL_NO])) - if len(self.inst_emp_start_date) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[ - constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO])) - if len(self.inst_emp_end_date) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[ - constants.CSV_CHANGE_INST_EMP_END_DATE_COL_NO])) - elif self.comment == '終了': - if len(self.inst_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO])) - if len(self.ta_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO])) - if len(self.emp_chg_type_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO])) - if len(self.inst_emp_start_date) == 0: - error_list.append(self.make_require_error_message( - self.line_num, - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO])) - if len(self.change_end_date) == 0: - error_list.append(self.make_require_error_message(self.line_num, - constants.CHANGE_INST_CSV_LOGICAL_NAMES[ - constants.CSV_CHANGE_CHANGE_END_DATE_COL_NO])) - elif self.comment == '担当者修正': - if len(self.inst_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO])) - if len(self.ta_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO])) - if len(self.emp_chg_type_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO])) - - if len(self.emp_cd) == 0: - error_list.append(self.make_require_error_message( - self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CD_COL_NO])) - if len(self.inst_emp_start_date) == 0: - error_list.append(self.make_require_error_message(self.line_num, - constants.CHANGE_INST_CSV_LOGICAL_NAMES[ - constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO])) - else: - error_list.append(f'{self.line_num}行目のコメントが不正です。 「追加」「終了」「担当者修正」のいずれかを入力してください。') - return error_list - - def check_inst_cd_exists(self) -> list[str]: - error_list = [] - - if is_not_empty(self.inst_cd) and super().is_exist_inst_cd() is False: - error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO]}\ -は施設マスタに存在しないコードです。') - return error_list - - def check_emp_cd_exists(self) -> list[str]: - error_list = [] - if not self.inst_emp_start_date or not self.emp_cd: - return error_list - - if self.comment != '追加' and self.comment != '担当者修正': - return error_list - - if super().is_exist_emp_cd(self.inst_emp_start_date) is True: - error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CD_COL_NO]}\ -は従業員マスタに存在しない もしくは 適用期間外のIDです。') - return error_list - - def check_emp_chg_type_cd_exists(self) -> list[str]: - error_list = [] - - if not self.inst_emp_start_date or not self.emp_chg_type_cd: - return error_list - - if is_not_empty(self.emp_chg_type_cd) and super().is_exist_emp_chg_type_cd(self.inst_emp_start_date) is False: - error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO]}\ -は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') - return error_list - - def check_ta_cd_exists(self) -> list[str]: - error_list = [] - - if not self.inst_emp_start_date or not self.ta_cd: - return error_list - - if is_not_empty(self.ta_cd) and super().is_exist_ta_cd(self.inst_emp_start_date) is False: - error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO]}\ -は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') - return error_list - - def check_bu_cd_exists(self) -> list[str]: - error_list = [] - - if is_not_empty(self.bu_cd) and self.comment == '追加'\ - and super().is_exist_bu_cd() is False: - error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_BU_CD_COL_NO]}\ -はビジネスユニットマスタに存在しないコードです。') - return error_list - - def check_existing_date(self) -> list[str]: - error_list = [] - start_date = self.inst_emp_start_date - if self.comment == '追加' or self.comment == '終了': - if self.comment == '追加': - end_date = self.inst_emp_end_date - end_date_col_name = constants.CHANGE_INST_CSV_LOGICAL_NAMES[ - constants.CSV_CHANGE_INST_EMP_END_DATE_COL_NO] - compare_error_message = f'\ -{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO]}が\ -{end_date_col_name}よりも後の日付になっています。' - else: - end_date = self.change_end_date - end_date_col_name = constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_CHANGE_END_DATE_COL_NO] - compare_error_message = f'\ -{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO]}が\ -{end_date_col_name}よりも後の日付になっています。' - - if not start_date or not end_date: - return error_list - - (error_list, start_date_time, end_date_time) = super().check_term_date( - start_date, - end_date, - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO], - end_date_col_name) - if len(error_list) > 0: - return error_list - - if start_date_time > end_date_time: - error_list.append(f'{self.line_num}行目の{compare_error_message}') - return error_list - - def check_item_count(self) -> list[str]: - return super().check_csv_item_count(len(constants.CHANGE_INST_CSV_LOGICAL_NAMES)) - - def check_data_exists(self) -> list[str]: - error_list = [] - emp_chg_inst_count = super().emp_chg_inst_count(self.inst_emp_start_date) - if self.comment == '追加' and emp_chg_inst_count > 0: - error_list.append(super().make_data_exist_error_message( - self.line_num, - primary_key_col_names=[ - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO], - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO], - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO], - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO] - ] - )) - - elif (self.comment == '終了' or self.comment == '担当者修正') and emp_chg_inst_count == 0: - error_list.append(super().make_data_not_exist_error_message( - self.line_num, - primary_key_col_names=[ - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO], - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO], - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO], - constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO] - ] - )) - - return error_list - - -class MasterMainteCSVItems: - """施設担当者CSVをループで回すためのもの""" - lines: list[MasterMainteCSVItem] - __i: int = 0 - - def to_json(self): - # CSVをjsonに変換 - csv_row_dict_list: list[dict] = self.to_dict() - - # json作成 - return json.dumps(csv_row_dict_list, ensure_ascii=False) - - def to_dict(self): - return [row_item.csv_row_data() for row_item in self.lines] - - def __iter__(self): - return self - - def __next__(self) -> MasterMainteCSVItem: - if self.__i == len(self.lines): - raise StopIteration() - line = self.lines[self.__i] - self.__i += 1 - return line - - def __init__( - self, - file: TextIOWrapper, - select_function: str, - table_name: str, - mst_inst_repository: MstInstRepository, - emp_master_repository: EmpMasterRepository, - bu_master_repository: BuMasterRepository, - emp_chginst_repository: EmpChgInstRepository, - generic_kbn_mst_repository: GenericKbnMstRepository - ) -> None: - reader = csv.reader(file) - csv_rows = [] - for line_num, row in enumerate(reader, start=0): - if line_num == 0: - continue - csv_rows.append(self.__select_function( - select_function, - row, - table_name, - line_num, - mst_inst_repository, - emp_master_repository, - bu_master_repository, - emp_chginst_repository, - generic_kbn_mst_repository - )) - self.lines = csv_rows - - def __select_function(self, - function_type: str, - row: list[str], - table_name: str, - line_num: int, - mst_inst_repository: MstInstRepository, - emp_master_repository: EmpMasterRepository, - bu_master_repository: BuMasterRepository, - emp_chginst_repository: EmpChgInstRepository, - generic_kbn_mst_repository: GenericKbnMstRepository) -> MasterMainteCSVItem: - if function_type == 'new': - return MasterMainteNewInstEmpCSVItem( - row, - table_name, - str(line_num), - mst_inst_repository, - emp_master_repository, - bu_master_repository, - emp_chginst_repository, - generic_kbn_mst_repository) - elif function_type == 'change': - return MasterMainteChangeInstEmpCSVItem( - row, - table_name, - str(line_num), - mst_inst_repository, - emp_master_repository, - bu_master_repository, - emp_chginst_repository, - generic_kbn_mst_repository) +# # ファイル削除予定 +# import csv +# import json +# from abc import ABCMeta, abstractmethod +# from datetime import datetime +# from io import TextIOWrapper + +# from src.logging.get_logger import get_logger +# from src.repositories.bu_master_cd_repository import BuMasterRepository +# from src.repositories.emp_chg_inst_repository import EmpChgInstRepository +# from src.repositories.emp_master_repository import EmpMasterRepository +# from src.repositories.generic_kbn_mst_repository import GenericKbnMstRepository +# from src.repositories.mst_inst_repository import MstInstRepository +# from src.system_var import constants +# from src.util.string_util import is_not_empty + +# logger = get_logger('マスターメンテ') + + +# class MasterMainteCSVItem(metaclass=ABCMeta): + +# csv_row: list[str] +# table_name: str +# line_num: str +# mst_inst_repository: MstInstRepository +# emp_master_repository: EmpMasterRepository +# bu_master_repository: BuMasterRepository +# emp_chginst_repository: EmpChgInstRepository +# generic_kbn_mst_repository: GenericKbnMstRepository + +# def __init__( +# self, +# csv_row: list[str], +# table_name: str, +# line_num: str, +# mst_inst_repository: MstInstRepository, +# emp_master_repository: EmpMasterRepository, +# bu_master_repository: BuMasterRepository, +# emp_chginst_repository: EmpChgInstRepository, +# generic_kbn_mst_repository: GenericKbnMstRepository +# ): +# self.csv_row = csv_row +# self.table_name = table_name +# self.line_num = line_num +# self.mst_inst_repository = mst_inst_repository +# self.emp_master_repository = emp_master_repository +# self.bu_master_repository = bu_master_repository +# self.emp_chginst_repository = emp_chginst_repository +# self.generic_kbn_mst_repository = generic_kbn_mst_repository + +# def validate(self) -> list[str]: +# """ +# 項目のバリデーションを行うテンプレートメソッド\n +# 各チェックロジックはサブクラスで実装する +# エラーが有る場合、[行数、項目名: エラー内容]のリストを返す +# """ +# error_list = [] +# # 項目数チェック +# error_list.extend(self.check_item_count()) +# if len(error_list) == 0: +# # 必須チェック 及び コメントエラーチェック +# error_list.extend(self.check_require()) +# # 施設コード存在チェック +# error_list.extend(self.check_inst_cd_exists()) +# # 領域コード存在チェック +# error_list.extend(self.check_ta_cd_exists()) +# # 担当者種別コード存在チェック +# error_list.extend(self.check_emp_chg_type_cd_exists()) +# # MUID存在チェック +# error_list.extend(self.check_emp_cd_exists()) +# # BuCd存在チェック +# error_list.extend(self.check_bu_cd_exists()) +# # 適用開始日 < 適用終了日、実在日チェック +# error_list.extend(self.check_existing_date()) +# # データ存在チェック +# error_list.extend(self.check_data_exists()) + +# # エラーのないリストを省いて返す +# error_list = [error for error in error_list if len(error) != 0] +# return error_list + +# def check_csv_item_count(self, item_count: int) -> list[str]: +# error_list = [] + +# if not len(self.csv_row) == item_count: +# error_list.append(f'{self.line_num}行目の項目数が一致しません。項目数を確認してください。') + +# return error_list + +# def emp_chg_inst_count(self, start_date: str): +# return self.emp_chginst_repository.fetch_count(self.inst_cd, self.ta_cd, self.emp_chg_type_cd, start_date, self.table_name) + +# def is_exist_emp_cd(self, start_date: str) -> bool: +# if start_date is None or len(start_date) == 0: +# return False +# if self.emp_master_repository.fetch_count(self.emp_cd, start_date) == 0: +# return True +# return False + +# def is_exist_inst_cd(self) -> bool: +# return True if self.mst_inst_repository.fetch_count(self.inst_cd) > 0 else False + +# def is_exist_emp_chg_type_cd(self, start_date: str) -> bool: +# if start_date is None or len(start_date) == 0: +# return False +# if self.generic_kbn_mst_repository.fetch_count('emp_chg_type_cd', self.emp_chg_type_cd, start_date) > 0: +# return True +# return False + +# def is_exist_ta_cd(self, start_date: str) -> bool: +# if start_date is None or len(start_date) == 0: +# return False +# if self.generic_kbn_mst_repository.fetch_count('ta_cd', self.ta_cd, start_date) > 0: +# return True +# return False + +# def is_exist_bu_cd(self) -> bool: +# return True if self.bu_master_repository.fetch_count(self.bu_cd) > 0 else False + +# def make_require_error_message(self, line_num: str, col_name: str) -> str: +# return f'{line_num}行目の{col_name}が入力されておりません。' + +# def make_data_exist_error_message(self, line_num: str, primary_key_col_names: list[str]) -> str: +# return self.__make_check_data_exists_error_message(line_num, primary_key_col_names, 'がすべて同一のデータが既に登録されています。') + +# def make_data_not_exist_error_message(self, line_num: str, primary_key_col_names: list[str]) -> str: +# return self.__make_check_data_exists_error_message(line_num, primary_key_col_names, 'がすべて同一のデータが存在しないため更新できません。') + +# def __make_check_data_exists_error_message(self, line_num: str, primary_key_col_names: list[str], suffix_message: str) -> str: +# primary_key_logical_names = '、'.join(primary_key_col_names) +# return f'{line_num}行目の{primary_key_logical_names}{suffix_message}' + +# def __parse_str_to_date(self, check_date: str) -> tuple[bool, datetime]: +# try: +# check_date_time: datetime = datetime.strptime(check_date, '%Y%m%d') +# except Exception as e: +# logger.debug(f'適用期間の日付が不正{e}') +# return (False, None) + +# try: +# reverse_check_date: str = check_date_time.strftime('%Y%m%d') +# except Exception as e: +# logger.debug(f'適用期間の日付が不正{e}') +# return (False, None) + +# if check_date != reverse_check_date: +# return (False, None) + +# return (True, check_date_time) + +# def check_term_date(self, +# start_date: str, +# end_date: str, +# start_date_col_name: str, +# end_date_col_name: str) -> tuple[list[str], datetime, datetime]: +# error_list = [] + +# start_date_time: datetime = None +# end_date_time: datetime = None +# if is_not_empty(start_date): +# (result, start_date_time) = self.__parse_str_to_date(start_date) +# if result is False: +# error_list.append(f'{self.line_num}行目の{start_date_col_name}が実在しない日付になっています。') +# if is_not_empty(end_date): +# (result, end_date_time) = self.__parse_str_to_date(end_date) +# if result is False: +# error_list.append(f'{self.line_num}行目の{end_date_col_name}が実在しない日付になっています。') + +# return (error_list, start_date_time, end_date_time) + +# def get_csv_value(self, column_no: int): +# try: +# column_value = self.csv_row[column_no] +# except IndexError: +# column_value = '' + +# return column_value + +# @abstractmethod +# def csv_row_data(self) -> dict: +# pass +# ... + +# @abstractmethod +# def check_require(self) -> list[str]: +# """必須チェック""" +# pass +# ... + +# @abstractmethod +# def check_inst_cd_exists(self) -> list[str]: +# """InstCD存在チェック""" +# pass +# ... + +# @abstractmethod +# def check_emp_chg_type_cd_exists(self) -> list[str]: +# """担当者種別コード存在チェック""" +# pass +# ... + +# @abstractmethod +# def check_ta_cd_exists(self) -> list[str]: +# """領域コード存在チェック""" +# pass +# ... + +# @abstractmethod +# def check_emp_cd_exists(self) -> list[str]: +# """MUID存在チェック""" +# pass +# ... + +# @abstractmethod +# def check_bu_cd_exists(self) -> list[str]: +# """BuCd存在チェック""" +# pass +# ... + +# @abstractmethod +# def check_existing_date(self) -> list[str]: +# """適用開始日 < 適用終了日、実在日チェック""" + +# @abstractmethod +# def check_item_count(self) -> list[str]: +# """項目数チェック""" +# pass +# ... + +# @abstractmethod +# def check_data_exists(self) -> list[str]: +# """データ存在チェック""" +# pass +# ... + + +# class MasterMainteNewInstEmpCSVItem(MasterMainteCSVItem): +# """新規施設担当者登録CSV""" +# inst_name: str +# emp_name_family: str +# emp_name_first: str +# start_date: str +# end_date: str + +# def __init__( +# self, +# csv_row: list[str], +# table_name: str, +# line_num: str, +# mst_inst_repository: MstInstRepository, +# emp_master_repository: EmpMasterRepository, +# bu_master_repository: BuMasterRepository, +# emp_chginst_repository: EmpChgInstRepository, +# generic_kbn_mst_repository: GenericKbnMstRepository +# ): +# super().__init__( +# csv_row, +# table_name, +# line_num, +# mst_inst_repository, +# emp_master_repository, +# bu_master_repository, +# emp_chginst_repository, +# generic_kbn_mst_repository +# ) +# self.inst_cd = super().get_csv_value(constants.CSV_NEW_INST_CD_COL_NO) +# self.inst_name = super().get_csv_value(constants.CSV_NEW_INST_NAME_COL_NO) +# self.ta_cd = super().get_csv_value(constants.CSV_NEW_TA_CD_COL_NO) +# self.emp_chg_type_cd = super().get_csv_value(constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO) +# self.emp_cd = super().get_csv_value(constants.CSV_NEW_EMP_CD_COL_NO) +# self.emp_name_family = super().get_csv_value(constants.CSV_NEW_EMP_NAME_FAMILY_COL_NO) +# self.emp_name_first = super().get_csv_value(constants.CSV_NEW_EMP_NAME_FIRST_COL_NO) +# self.bu_cd = super().get_csv_value(constants.CSV_NEW_BU_CD_COL_NO) +# self.start_date = super().get_csv_value(constants.CSV_NEW_START_DATE) +# self.end_date = super().get_csv_value(constants.CSV_NEW_END_DATE) + +# def csv_row_data(self) -> dict: +# return {constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[i]: self.csv_row[i] for i in range(len(self.csv_row))} + +# def check_require(self) -> list[str]: +# error_list = [] +# if len(self.inst_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_INST_CD_COL_NO])) +# if len(self.ta_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_TA_CD_COL_NO])) +# if len(self.emp_chg_type_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO])) +# if len(self.emp_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CD_COL_NO])) +# if len(self.bu_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_BU_CD_COL_NO])) +# if len(self.start_date) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE])) +# if len(self.end_date) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_END_DATE])) + +# return error_list + +# def check_inst_cd_exists(self) -> list[str]: +# error_list = [] + +# if is_not_empty(self.inst_cd) and super().is_exist_inst_cd() is False: +# error_list.append( +# f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_INST_CD_COL_NO]}\ +# は施設マスタに存在しないコードです。') +# return error_list + +# def check_emp_cd_exists(self) -> list[str]: +# error_list = [] +# if not self.start_date or not self.emp_cd: +# return error_list + +# if super().is_exist_emp_cd(self.start_date) is True: +# error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CD_COL_NO]}\ +# は従業員マスタに存在しない もしくは 適用期間外のIDです。') +# return error_list + +# def check_emp_chg_type_cd_exists(self) -> list[str]: +# error_list = [] +# if not self.start_date or not self.emp_chg_type_cd: +# return error_list + +# if is_not_empty(self.emp_chg_type_cd) and super().is_exist_emp_chg_type_cd(self.start_date) is False: +# error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO]}\ +# は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') +# return error_list + +# def check_ta_cd_exists(self) -> list[str]: +# error_list = [] +# if not self.start_date or not self.ta_cd: +# return error_list + +# if is_not_empty(self.ta_cd) and super().is_exist_ta_cd(self.start_date) is False: +# error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_TA_CD_COL_NO]}\ +# は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') +# return error_list + +# def check_bu_cd_exists(self) -> list[str]: +# error_list = [] + +# if is_not_empty(self.bu_cd) and super().is_exist_bu_cd() is False: +# error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_BU_CD_COL_NO]}\ +# はビジネスユニットマスタに存在しないコードです。') +# return error_list + +# def check_existing_date(self) -> list[str]: +# error_list = [] +# if not self.start_date or not self.end_date: +# return error_list + +# (error_list, start_date_time, end_date_time) = super().check_term_date( +# self.start_date, +# self.end_date, +# constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE], +# constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_END_DATE]) +# if len(error_list) > 0: +# return error_list + +# if start_date_time > end_date_time: +# error_list.append(f'{self.line_num}行目の{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE]}\ +# が{constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_END_DATE]}よりも後の日付になっています。') +# return error_list + +# def check_item_count(self) -> list[str]: +# return super().check_csv_item_count(len(constants.NEW_INST_EMP_CSV_LOGICAL_NAMES)) + +# def check_data_exists(self) -> list[str]: +# error_list = [] +# if super().emp_chg_inst_count(self.start_date) > 0: +# error_list.append(super().make_data_exist_error_message( +# self.line_num, +# primary_key_col_names=[ +# constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_INST_CD_COL_NO], +# constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_TA_CD_COL_NO], +# constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_EMP_CHG_TYPE_CD_COL_NO], +# constants.NEW_INST_EMP_CSV_LOGICAL_NAMES[constants.CSV_NEW_START_DATE] +# ] +# )) + +# return error_list + + +# class MasterMainteChangeInstEmpCSVItem(MasterMainteCSVItem): +# """施設担当者変更登録CSV""" +# bu_name: str +# org_cd: str +# org_short_name: str +# inst_name: str +# explain: str +# emp_full_name: str +# inst_emp_start_date: str +# inst_emp_end_date: str +# change_end_date: str +# comment: str + +# def __init__( +# self, +# csv_row: list[str], +# table_name: str, +# line_num: str, +# mst_inst_repository: MstInstRepository, +# emp_master_repository: EmpMasterRepository, +# bu_master_repository: BuMasterRepository, +# emp_chginst_repository: EmpChgInstRepository, +# generic_kbn_mst_repository: GenericKbnMstRepository +# ): +# super().__init__( +# csv_row, +# table_name, +# line_num, +# mst_inst_repository, +# emp_master_repository, +# bu_master_repository, +# emp_chginst_repository, +# generic_kbn_mst_repository +# ) +# self.bu_cd = super().get_csv_value(constants.CSV_CHANGE_BU_CD_COL_NO) +# self.bu_name = super().get_csv_value(constants.CSV_CHANGE_BU_NAME_COL_NO) +# self.org_cd = super().get_csv_value(constants.CSV_CHANGE_ORG_CD_COL_NO) +# self.org_short_name = super().get_csv_value(constants.CSV_CHANGE_ORG_SHORT_NAME_COL_NO) +# self.inst_cd = super().get_csv_value(constants.CSV_CHANGE_INST_CD_COL_NO) +# self.inst_name = super().get_csv_value(constants.CSV_CHANGE_INST_NAME_COL_NO) +# self.ta_cd = super().get_csv_value(constants.CSV_CHANGE_TA_CD_COL_NO) +# self.explain = super().get_csv_value(constants.CSV_CHANGE_EXPLAIN_COL_NO) +# self.emp_chg_type_cd = super().get_csv_value(constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO) +# self.emp_cd = super().get_csv_value(constants.CSV_CHANGE_EMP_CD_COL_NO) +# self.emp_full_name = super().get_csv_value(constants.CSV_CHANGE_EMP_FULL_NAME_COL_NO) +# self.inst_emp_start_date = super().get_csv_value(constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO) +# self.inst_emp_end_date = super().get_csv_value(constants.CSV_CHANGE_INST_EMP_END_DATE_COL_NO) +# self.change_end_date = super().get_csv_value(constants.CSV_CHANGE_CHANGE_END_DATE_COL_NO) +# self.comment = super().get_csv_value(constants.CSV_CHANGE_COMMENT) + +# def csv_row_data(self) -> dict: +# return {constants.CHANGE_INST_CSV_LOGICAL_NAMES[i]: self.csv_row[i] for i in range(len(self.csv_row))} + +# def check_require(self) -> list[str]: +# error_list = [] +# if self.comment == '追加': +# if len(self.bu_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_BU_CD_COL_NO])) +# if len(self.inst_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO])) +# if len(self.ta_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO])) +# if len(self.emp_chg_type_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO])) +# if len(self.emp_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CD_COL_NO])) +# if len(self.inst_emp_start_date) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[ +# constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO])) +# if len(self.inst_emp_end_date) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[ +# constants.CSV_CHANGE_INST_EMP_END_DATE_COL_NO])) +# elif self.comment == '終了': +# if len(self.inst_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO])) +# if len(self.ta_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO])) +# if len(self.emp_chg_type_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO])) +# if len(self.inst_emp_start_date) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO])) +# if len(self.change_end_date) == 0: +# error_list.append(self.make_require_error_message(self.line_num, +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[ +# constants.CSV_CHANGE_CHANGE_END_DATE_COL_NO])) +# elif self.comment == '担当者修正': +# if len(self.inst_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO])) +# if len(self.ta_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO])) +# if len(self.emp_chg_type_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO])) + +# if len(self.emp_cd) == 0: +# error_list.append(self.make_require_error_message( +# self.line_num, constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CD_COL_NO])) +# if len(self.inst_emp_start_date) == 0: +# error_list.append(self.make_require_error_message(self.line_num, +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[ +# constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO])) +# else: +# error_list.append(f'{self.line_num}行目のコメントが不正です。 「追加」「終了」「担当者修正」のいずれかを入力してください。') +# return error_list + +# def check_inst_cd_exists(self) -> list[str]: +# error_list = [] + +# if is_not_empty(self.inst_cd) and super().is_exist_inst_cd() is False: +# error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO]}\ +# は施設マスタに存在しないコードです。') +# return error_list + +# def check_emp_cd_exists(self) -> list[str]: +# error_list = [] +# if not self.inst_emp_start_date or not self.emp_cd: +# return error_list + +# if self.comment != '追加' and self.comment != '担当者修正': +# return error_list + +# if super().is_exist_emp_cd(self.inst_emp_start_date) is True: +# error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CD_COL_NO]}\ +# は従業員マスタに存在しない もしくは 適用期間外のIDです。') +# return error_list + +# def check_emp_chg_type_cd_exists(self) -> list[str]: +# error_list = [] + +# if not self.inst_emp_start_date or not self.emp_chg_type_cd: +# return error_list + +# if is_not_empty(self.emp_chg_type_cd) and super().is_exist_emp_chg_type_cd(self.inst_emp_start_date) is False: +# error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO]}\ +# は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') +# return error_list + +# def check_ta_cd_exists(self) -> list[str]: +# error_list = [] + +# if not self.inst_emp_start_date or not self.ta_cd: +# return error_list + +# if is_not_empty(self.ta_cd) and super().is_exist_ta_cd(self.inst_emp_start_date) is False: +# error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO]}\ +# は汎用区分マスタに存在しない もしくは 適用期間外のコードです。') +# return error_list + +# def check_bu_cd_exists(self) -> list[str]: +# error_list = [] + +# if is_not_empty(self.bu_cd) and self.comment == '追加'\ +# and super().is_exist_bu_cd() is False: +# error_list.append(f'{self.line_num}行目の{constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_BU_CD_COL_NO]}\ +# はビジネスユニットマスタに存在しないコードです。') +# return error_list + +# def check_existing_date(self) -> list[str]: +# error_list = [] +# start_date = self.inst_emp_start_date +# if self.comment == '追加' or self.comment == '終了': +# if self.comment == '追加': +# end_date = self.inst_emp_end_date +# end_date_col_name = constants.CHANGE_INST_CSV_LOGICAL_NAMES[ +# constants.CSV_CHANGE_INST_EMP_END_DATE_COL_NO] +# compare_error_message = f'\ +# {constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO]}が\ +# {end_date_col_name}よりも後の日付になっています。' +# else: +# end_date = self.change_end_date +# end_date_col_name = constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_CHANGE_END_DATE_COL_NO] +# compare_error_message = f'\ +# {constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO]}が\ +# {end_date_col_name}よりも後の日付になっています。' + +# if not start_date or not end_date: +# return error_list + +# (error_list, start_date_time, end_date_time) = super().check_term_date( +# start_date, +# end_date, +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO], +# end_date_col_name) +# if len(error_list) > 0: +# return error_list + +# if start_date_time > end_date_time: +# error_list.append(f'{self.line_num}行目の{compare_error_message}') +# return error_list + +# def check_item_count(self) -> list[str]: +# return super().check_csv_item_count(len(constants.CHANGE_INST_CSV_LOGICAL_NAMES)) + +# def check_data_exists(self) -> list[str]: +# error_list = [] +# emp_chg_inst_count = super().emp_chg_inst_count(self.inst_emp_start_date) +# if self.comment == '追加' and emp_chg_inst_count > 0: +# error_list.append(super().make_data_exist_error_message( +# self.line_num, +# primary_key_col_names=[ +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO], +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO], +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO], +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO] +# ] +# )) + +# elif (self.comment == '終了' or self.comment == '担当者修正') and emp_chg_inst_count == 0: +# error_list.append(super().make_data_not_exist_error_message( +# self.line_num, +# primary_key_col_names=[ +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_CD_COL_NO], +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_TA_CD_COL_NO], +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO], +# constants.CHANGE_INST_CSV_LOGICAL_NAMES[constants.CSV_CHANGE_INST_EMP_START_DATE_COL_NO] +# ] +# )) + +# return error_list + + +# class MasterMainteCSVItems: +# """施設担当者CSVをループで回すためのもの""" +# lines: list[MasterMainteCSVItem] +# __i: int = 0 + +# def to_json(self): +# # CSVをjsonに変換 +# csv_row_dict_list: list[dict] = self.to_dict() + +# # json作成 +# return json.dumps(csv_row_dict_list, ensure_ascii=False) + +# def to_dict(self): +# return [row_item.csv_row_data() for row_item in self.lines] + +# def __iter__(self): +# return self + +# def __next__(self) -> MasterMainteCSVItem: +# if self.__i == len(self.lines): +# raise StopIteration() +# line = self.lines[self.__i] +# self.__i += 1 +# return line + +# def __init__( +# self, +# file: TextIOWrapper, +# select_function: str, +# table_name: str, +# mst_inst_repository: MstInstRepository, +# emp_master_repository: EmpMasterRepository, +# bu_master_repository: BuMasterRepository, +# emp_chginst_repository: EmpChgInstRepository, +# generic_kbn_mst_repository: GenericKbnMstRepository +# ) -> None: +# reader = csv.reader(file) +# csv_rows = [] +# for line_num, row in enumerate(reader, start=0): +# if line_num == 0: +# continue +# csv_rows.append(self.__select_function( +# select_function, +# row, +# table_name, +# line_num, +# mst_inst_repository, +# emp_master_repository, +# bu_master_repository, +# emp_chginst_repository, +# generic_kbn_mst_repository +# )) +# self.lines = csv_rows + +# def __select_function(self, +# function_type: str, +# row: list[str], +# table_name: str, +# line_num: int, +# mst_inst_repository: MstInstRepository, +# emp_master_repository: EmpMasterRepository, +# bu_master_repository: BuMasterRepository, +# emp_chginst_repository: EmpChgInstRepository, +# generic_kbn_mst_repository: GenericKbnMstRepository) -> MasterMainteCSVItem: +# if function_type == 'new': +# return MasterMainteNewInstEmpCSVItem( +# row, +# table_name, +# str(line_num), +# mst_inst_repository, +# emp_master_repository, +# bu_master_repository, +# emp_chginst_repository, +# generic_kbn_mst_repository) +# elif function_type == 'change': +# return MasterMainteChangeInstEmpCSVItem( +# row, +# table_name, +# str(line_num), +# mst_inst_repository, +# emp_master_repository, +# bu_master_repository, +# emp_chginst_repository, +# generic_kbn_mst_repository) diff --git a/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py b/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py index 1c79b75e..546d4a80 100644 --- a/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py +++ b/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py @@ -1,166 +1,167 @@ -from abc import ABCMeta, abstractmethod -from src.repositories.emp_chg_inst_repository import EmpChgInstRepository -from src.logging.get_logger import get_logger +# # ファイル削除予定 +# from abc import ABCMeta, abstractmethod +# from src.repositories.emp_chg_inst_repository import EmpChgInstRepository +# from src.logging.get_logger import get_logger -logger = get_logger('マスターメンテ') +# logger = get_logger('マスターメンテ') -class MasterMainteEmpChgInstFunction(metaclass=ABCMeta): - insert_data: list[dict] - table_name: str - select_table_message: str - user_name: str - emp_chginst_repository: EmpChgInstRepository +# class MasterMainteEmpChgInstFunction(metaclass=ABCMeta): +# insert_data: list[dict] +# table_name: str +# select_table_message: str +# user_name: str +# emp_chginst_repository: EmpChgInstRepository - def __init__( - self, - insert_data, - table_name: str, - select_table_message: str, - user_name: str, - emp_chginst_repository: EmpChgInstRepository - ): - self.insert_data = insert_data - self.table_name = table_name - self.select_table_message = select_table_message - self.user_name = user_name - self.emp_chginst_repository = emp_chginst_repository +# def __init__( +# self, +# insert_data, +# table_name: str, +# select_table_message: str, +# user_name: str, +# emp_chginst_repository: EmpChgInstRepository +# ): +# self.insert_data = insert_data +# self.table_name = table_name +# self.select_table_message = select_table_message +# self.user_name = user_name +# self.emp_chginst_repository = emp_chginst_repository - def save(self): - error_list = [] - try: - self.emp_chginst_repository.begin() - self.emp_chginst_repository.to_jst() - (result_message, error_list) = self.write_emp_chg_inst_table() - if len(error_list) > 0: - self.emp_chginst_repository.rollback() - else: - self.emp_chginst_repository.commit() - except Exception as e: - self.emp_chginst_repository.rollback() - raise e +# def save(self): +# error_list = [] +# try: +# self.emp_chginst_repository.begin() +# self.emp_chginst_repository.to_jst() +# (result_message, error_list) = self.write_emp_chg_inst_table() +# if len(error_list) > 0: +# self.emp_chginst_repository.rollback() +# else: +# self.emp_chginst_repository.commit() +# except Exception as e: +# self.emp_chginst_repository.rollback() +# raise e - return (result_message, error_list) +# return (result_message, error_list) - def add_emp_chg_inst_table(self, data, start_date, end_date): - self.emp_chginst_repository.insert_emp_chg_inst( - data['施設コード'], - data['領域コード'], - data['担当者種別コード'], - data['MUID'], - data['ビジネスユニットコード'], - start_date, - end_date, - self.user_name, - self.table_name) +# def add_emp_chg_inst_table(self, data, start_date, end_date): +# self.emp_chginst_repository.insert_emp_chg_inst( +# data['施設コード'], +# data['領域コード'], +# data['担当者種別コード'], +# data['MUID'], +# data['ビジネスユニットコード'], +# start_date, +# end_date, +# self.user_name, +# self.table_name) - @abstractmethod - def write_emp_chg_inst_table(self): - pass +# @abstractmethod +# def write_emp_chg_inst_table(self): +# pass -class NewEmpChgInstFunction(MasterMainteEmpChgInstFunction): +# class NewEmpChgInstFunction(MasterMainteEmpChgInstFunction): - def __init__( - self, - insert_data_list: list[dict], - table_name: str, - select_table_message: str, - user_name: str, - emp_chginst_repository: EmpChgInstRepository - ): - super().__init__( - insert_data_list, - table_name, - select_table_message, - user_name, - emp_chginst_repository - ) +# def __init__( +# self, +# insert_data_list: list[dict], +# table_name: str, +# select_table_message: str, +# user_name: str, +# emp_chginst_repository: EmpChgInstRepository +# ): +# super().__init__( +# insert_data_list, +# table_name, +# select_table_message, +# user_name, +# emp_chginst_repository +# ) - def write_emp_chg_inst_table(self): - error_list = [] +# def write_emp_chg_inst_table(self): +# error_list = [] - add_count = 0 - for row_no, data in enumerate(self.insert_data, start=1): - try: - self.add_emp_chg_inst_table(data, data['適用開始日'], data['適用終了日']) - add_count += 1 - except Exception as e: - error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。') - logger.info(f'新規施設登録時に{row_no}行目でエラーが発生しました: {e}') +# add_count = 0 +# for row_no, data in enumerate(self.insert_data, start=1): +# try: +# self.add_emp_chg_inst_table(data, data['適用開始日'], data['適用終了日']) +# add_count += 1 +# except Exception as e: +# error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。') +# logger.info(f'新規施設登録時に{row_no}行目でエラーが発生しました: {e}') - result_message_list = [] - if len(error_list) == 0: - result_message_list.append('新規施設登録を行いました') - result_message_list.append('対象:' + self.select_table_message) - result_message_list.append('追加:' + str(add_count) + '件') - return (result_message_list, error_list) +# result_message_list = [] +# if len(error_list) == 0: +# result_message_list.append('新規施設登録を行いました') +# result_message_list.append('対象:' + self.select_table_message) +# result_message_list.append('追加:' + str(add_count) + '件') +# return (result_message_list, error_list) -class ChangeEmpChgInstFunction(MasterMainteEmpChgInstFunction): +# class ChangeEmpChgInstFunction(MasterMainteEmpChgInstFunction): - def __init__( - self, - insert_data: list[dict], - table_name: str, - select_table_message: str, - user_name: str, - emp_chginst_repository: EmpChgInstRepository +# def __init__( +# self, +# insert_data: list[dict], +# table_name: str, +# select_table_message: str, +# user_name: str, +# emp_chginst_repository: EmpChgInstRepository - ): - super().__init__( - insert_data, - table_name, - select_table_message, - user_name, - emp_chginst_repository - ) +# ): +# super().__init__( +# insert_data, +# table_name, +# select_table_message, +# user_name, +# emp_chginst_repository +# ) - def write_emp_chg_inst_table(self): - add_count = 0 - end_count = 0 - modify_count = 0 - error_list = [] - for row_no, data in enumerate(self.insert_data, start=1): - try: - if data['コメント'] == '追加': - self.add_emp_chg_inst_table(data, data['施設担当_開始日'], data['施設担当_終了日']) - add_count += 1 - elif data['コメント'] == '終了': - self.__end_emp_chg_inst(data) - end_count += 1 - elif data['コメント'] == '担当者修正': - self.__modify_emp_chg_inst(data) - modify_count += 1 - except Exception as e: - error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。') - logger.info(f'施設担当者変更時に{row_no}行目でエラーが発生しました: {e}') +# def write_emp_chg_inst_table(self): +# add_count = 0 +# end_count = 0 +# modify_count = 0 +# error_list = [] +# for row_no, data in enumerate(self.insert_data, start=1): +# try: +# if data['コメント'] == '追加': +# self.add_emp_chg_inst_table(data, data['施設担当_開始日'], data['施設担当_終了日']) +# add_count += 1 +# elif data['コメント'] == '終了': +# self.__end_emp_chg_inst(data) +# end_count += 1 +# elif data['コメント'] == '担当者修正': +# self.__modify_emp_chg_inst(data) +# modify_count += 1 +# except Exception as e: +# error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。') +# logger.info(f'施設担当者変更時に{row_no}行目でエラーが発生しました: {e}') - result_message_list = [] - if len(error_list) == 0: - result_message_list.append('施設担当者変更を行いました') - result_message_list.append('対象:' + self.select_table_message) - result_message_list.append('追加:' + str(add_count) + '件') - result_message_list.append('修正:' + str(modify_count) + '件') - result_message_list.append('終了:' + str(end_count) + '件') - return (result_message_list, error_list) +# result_message_list = [] +# if len(error_list) == 0: +# result_message_list.append('施設担当者変更を行いました') +# result_message_list.append('対象:' + self.select_table_message) +# result_message_list.append('追加:' + str(add_count) + '件') +# result_message_list.append('修正:' + str(modify_count) + '件') +# result_message_list.append('終了:' + str(end_count) + '件') +# return (result_message_list, error_list) - def __end_emp_chg_inst(self, data: dict): - self.emp_chginst_repository.end_emp_chg_inst( - data['施設コード'], - data['領域コード'], - data['担当者種別コード'], - data['施設担当_開始日'], - data['終了日の変更'], - self.user_name, - self.table_name) +# def __end_emp_chg_inst(self, data: dict): +# self.emp_chginst_repository.end_emp_chg_inst( +# data['施設コード'], +# data['領域コード'], +# data['担当者種別コード'], +# data['施設担当_開始日'], +# data['終了日の変更'], +# self.user_name, +# self.table_name) - def __modify_emp_chg_inst(self, data: dict): - self.emp_chginst_repository.modify_emp_chg_inst( - data['施設コード'], - data['領域コード'], - data['施設担当_開始日'], - data['担当者種別コード'], - data['MUID'], - self.user_name, - self.table_name) +# def __modify_emp_chg_inst(self, data: dict): +# self.emp_chginst_repository.modify_emp_chg_inst( +# data['施設コード'], +# data['領域コード'], +# data['施設担当_開始日'], +# data['担当者種別コード'], +# data['MUID'], +# self.user_name, +# self.table_name) diff --git a/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py b/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py index 0ec43a10..450c7bfb 100644 --- a/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py +++ b/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py @@ -1,177 +1,178 @@ -from typing import Optional +# ファイル削除予定 +# from typing import Optional -from fastapi import Form +# from fastapi import Form -from src.model.request.request_base_model import RequestBaseModel -from src.util.sanitize import sanitize -from src.util.string_util import is_not_empty +# from src.model.request.request_base_model import RequestBaseModel +# from src.util.sanitize import sanitize +# from src.util.string_util import is_not_empty -@sanitize -class MasterMainteCsvDlModel(RequestBaseModel): - # adaptは検索に使用する値 - ta_cd: Optional[str] - adapt_ta_cd: Optional[str] - inst_cd: Optional[str] - adapt_inst_cd: Optional[str] - emp_cd: Optional[str] - adapt_emp_cd: Optional[str] - emp_chg_type_cd: Optional[str] - adapt_emp_chg_type_cd: Optional[str] - apply_date_from: Optional[str] - adapt_apply_date_from: Optional[str] - start_date_from: Optional[str] - adapt_start_date_from: Optional[str] - start_date_to: Optional[str] - adapt_start_date_to: Optional[str] - end_date_from: Optional[str] - adapt_end_date_from: Optional[str] - end_date_to: Optional[str] - adapt_end_date_to: Optional[str] - select_table: Optional[str] - create_date_from: Optional[str] - adapt_create_date_from: Optional[str] - create_date_to: Optional[str] - adapt_create_date_to: Optional[str] - update_date_from: Optional[str] - adapt_update_date_from: Optional[str] - update_date_to: Optional[str] - adapt_update_date_to: Optional[str] +# @sanitize +# class MasterMainteCsvDlModel(RequestBaseModel): +# # adaptは検索に使用する値 +# ta_cd: Optional[str] +# adapt_ta_cd: Optional[str] +# inst_cd: Optional[str] +# adapt_inst_cd: Optional[str] +# emp_cd: Optional[str] +# adapt_emp_cd: Optional[str] +# emp_chg_type_cd: Optional[str] +# adapt_emp_chg_type_cd: Optional[str] +# apply_date_from: Optional[str] +# adapt_apply_date_from: Optional[str] +# start_date_from: Optional[str] +# adapt_start_date_from: Optional[str] +# start_date_to: Optional[str] +# adapt_start_date_to: Optional[str] +# end_date_from: Optional[str] +# adapt_end_date_from: Optional[str] +# end_date_to: Optional[str] +# adapt_end_date_to: Optional[str] +# select_table: Optional[str] +# create_date_from: Optional[str] +# adapt_create_date_from: Optional[str] +# create_date_to: Optional[str] +# adapt_create_date_to: Optional[str] +# update_date_from: Optional[str] +# adapt_update_date_from: Optional[str] +# update_date_to: Optional[str] +# adapt_update_date_to: Optional[str] - @classmethod - def as_form( - cls, - ctrl_ta_cd: Optional[str] = Form(None), - ctrl_inst_cd: Optional[str] = Form(None), - ctrl_emp_cd: Optional[str] = Form(None), - ctrl_emp_chg_type_cd: Optional[str] = Form(None), - ctrl_apply_date_from: Optional[str] = Form(None), - ctrl_start_date_from: Optional[str] = Form(None), - ctrl_start_date_to: Optional[str] = Form(None), - ctrl_end_date_from: Optional[str] = Form(None), - ctrl_end_date_to: Optional[str] = Form(None), - radio_select_table: Optional[str] = Form(None), - ctrl_create_date_from: Optional[str] = Form(None), - ctrl_create_date_to: Optional[str] = Form(None), - ctrl_update_date_from: Optional[str] = Form(None), - ctrl_update_date_to: Optional[str] = Form(None) - ): - return cls.__convert_request_param( - cls, - ctrl_ta_cd, - ctrl_inst_cd, - ctrl_emp_cd, - ctrl_emp_chg_type_cd, - ctrl_apply_date_from, - ctrl_start_date_from, - ctrl_start_date_to, - ctrl_end_date_from, - ctrl_end_date_to, - radio_select_table, - ctrl_create_date_from, - ctrl_create_date_to, - ctrl_update_date_from, - ctrl_update_date_to - ) +# @classmethod +# def as_form( +# cls, +# ctrl_ta_cd: Optional[str] = Form(None), +# ctrl_inst_cd: Optional[str] = Form(None), +# ctrl_emp_cd: Optional[str] = Form(None), +# ctrl_emp_chg_type_cd: Optional[str] = Form(None), +# ctrl_apply_date_from: Optional[str] = Form(None), +# ctrl_start_date_from: Optional[str] = Form(None), +# ctrl_start_date_to: Optional[str] = Form(None), +# ctrl_end_date_from: Optional[str] = Form(None), +# ctrl_end_date_to: Optional[str] = Form(None), +# radio_select_table: Optional[str] = Form(None), +# ctrl_create_date_from: Optional[str] = Form(None), +# ctrl_create_date_to: Optional[str] = Form(None), +# ctrl_update_date_from: Optional[str] = Form(None), +# ctrl_update_date_to: Optional[str] = Form(None) +# ): +# return cls.__convert_request_param( +# cls, +# ctrl_ta_cd, +# ctrl_inst_cd, +# ctrl_emp_cd, +# ctrl_emp_chg_type_cd, +# ctrl_apply_date_from, +# ctrl_start_date_from, +# ctrl_start_date_to, +# ctrl_end_date_from, +# ctrl_end_date_to, +# radio_select_table, +# ctrl_create_date_from, +# ctrl_create_date_to, +# ctrl_update_date_from, +# ctrl_update_date_to +# ) - def __convert_request_param( - cls, - ctrl_ta_cd: str, - ctrl_inst_cd: str, - ctrl_emp_cd: str, - ctrl_emp_chg_type_cd, - ctrl_apply_date_from: str, - ctrl_start_date_from: str, - ctrl_start_date_to: str, - ctrl_end_date_from: str, - ctrl_end_date_to: str, - radio_select_table: str, - ctrl_create_date_from: str, - ctrl_create_date_to: str, - ctrl_update_date_from: str, - ctrl_update_date_to: str - ): - ctrl_ta_cd = ctrl_ta_cd if is_not_empty(ctrl_ta_cd) else '' - ctrl_inst_cd = ctrl_inst_cd if is_not_empty(ctrl_inst_cd) else '' - ctrl_emp_cd = ctrl_emp_cd if is_not_empty(ctrl_emp_cd) else '' - ctrl_emp_chg_type_cd = ctrl_emp_chg_type_cd if is_not_empty(ctrl_emp_chg_type_cd) else '' +# def __convert_request_param( +# cls, +# ctrl_ta_cd: str, +# ctrl_inst_cd: str, +# ctrl_emp_cd: str, +# ctrl_emp_chg_type_cd, +# ctrl_apply_date_from: str, +# ctrl_start_date_from: str, +# ctrl_start_date_to: str, +# ctrl_end_date_from: str, +# ctrl_end_date_to: str, +# radio_select_table: str, +# ctrl_create_date_from: str, +# ctrl_create_date_to: str, +# ctrl_update_date_from: str, +# ctrl_update_date_to: str +# ): +# ctrl_ta_cd = ctrl_ta_cd if is_not_empty(ctrl_ta_cd) else '' +# ctrl_inst_cd = ctrl_inst_cd if is_not_empty(ctrl_inst_cd) else '' +# ctrl_emp_cd = ctrl_emp_cd if is_not_empty(ctrl_emp_cd) else '' +# ctrl_emp_chg_type_cd = ctrl_emp_chg_type_cd if is_not_empty(ctrl_emp_chg_type_cd) else '' - adapt_apply_date_from = '' - if is_not_empty(ctrl_apply_date_from): - adapt_apply_date_from = ctrl_apply_date_from.replace('/', '') - else: - ctrl_apply_date_from = '' +# adapt_apply_date_from = '' +# if is_not_empty(ctrl_apply_date_from): +# adapt_apply_date_from = ctrl_apply_date_from.replace('/', '') +# else: +# ctrl_apply_date_from = '' - adapt_start_date_from = '' - adapt_start_date_to = '' - if is_not_empty(ctrl_start_date_from): - adapt_start_date_from = ctrl_start_date_from.replace('/', '') - else: - ctrl_start_date_from = '' - if is_not_empty(ctrl_start_date_to): - adapt_start_date_to = ctrl_start_date_to.replace('/', '') - else: - ctrl_start_date_to = '' +# adapt_start_date_from = '' +# adapt_start_date_to = '' +# if is_not_empty(ctrl_start_date_from): +# adapt_start_date_from = ctrl_start_date_from.replace('/', '') +# else: +# ctrl_start_date_from = '' +# if is_not_empty(ctrl_start_date_to): +# adapt_start_date_to = ctrl_start_date_to.replace('/', '') +# else: +# ctrl_start_date_to = '' - adapt_end_date_from = '' - adapt_end_date_to = '' - if is_not_empty(ctrl_end_date_from): - adapt_end_date_from = ctrl_end_date_from.replace('/', '') - else: - ctrl_end_date_from = '' - if is_not_empty(ctrl_end_date_to): - adapt_end_date_to = ctrl_end_date_to.replace('/', '') - else: - ctrl_end_date_to = '' +# adapt_end_date_from = '' +# adapt_end_date_to = '' +# if is_not_empty(ctrl_end_date_from): +# adapt_end_date_from = ctrl_end_date_from.replace('/', '') +# else: +# ctrl_end_date_from = '' +# if is_not_empty(ctrl_end_date_to): +# adapt_end_date_to = ctrl_end_date_to.replace('/', '') +# else: +# ctrl_end_date_to = '' - adapt_create_date_from = '' - adapt_create_date_to = '' - if is_not_empty(ctrl_create_date_from): - adapt_create_date_from = ctrl_create_date_from.replace('/', '-') + ' 00:00:00' - else: - ctrl_create_date_from = '' - if is_not_empty(ctrl_create_date_to): - adapt_create_date_to = ctrl_create_date_to.replace('/', '-') + ' 23:59:59' - else: - ctrl_create_date_to = '' +# adapt_create_date_from = '' +# adapt_create_date_to = '' +# if is_not_empty(ctrl_create_date_from): +# adapt_create_date_from = ctrl_create_date_from.replace('/', '-') + ' 00:00:00' +# else: +# ctrl_create_date_from = '' +# if is_not_empty(ctrl_create_date_to): +# adapt_create_date_to = ctrl_create_date_to.replace('/', '-') + ' 23:59:59' +# else: +# ctrl_create_date_to = '' - adapt_update_date_from = '' - adapt_update_date_to = '' - if is_not_empty(ctrl_update_date_from): - adapt_update_date_from = ctrl_update_date_from.replace('/', '-') + ' 00:00:00' - else: - ctrl_update_date_from = '' - if is_not_empty(ctrl_update_date_to): - adapt_update_date_to = ctrl_update_date_to.replace('/', '-') + ' 23:59:59' - else: - ctrl_update_date_to = '' +# adapt_update_date_from = '' +# adapt_update_date_to = '' +# if is_not_empty(ctrl_update_date_from): +# adapt_update_date_from = ctrl_update_date_from.replace('/', '-') + ' 00:00:00' +# else: +# ctrl_update_date_from = '' +# if is_not_empty(ctrl_update_date_to): +# adapt_update_date_to = ctrl_update_date_to.replace('/', '-') + ' 23:59:59' +# else: +# ctrl_update_date_to = '' - return cls( - ta_cd=ctrl_ta_cd, - adapt_ta_cd=ctrl_ta_cd, - inst_cd=ctrl_inst_cd, - adapt_inst_cd=ctrl_inst_cd, - emp_cd=ctrl_emp_cd, - adapt_emp_cd=ctrl_emp_cd, - emp_chg_type_cd=ctrl_emp_chg_type_cd, - adapt_emp_chg_type_cd=ctrl_emp_chg_type_cd, - apply_date_from=ctrl_apply_date_from, - adapt_apply_date_from=adapt_apply_date_from, - start_date_from=ctrl_start_date_from, - adapt_start_date_from=adapt_start_date_from, - start_date_to=ctrl_start_date_to, - adapt_start_date_to=adapt_start_date_to, - select_table=radio_select_table, - end_date_from=ctrl_end_date_from, - adapt_end_date_from=adapt_end_date_from, - end_date_to=ctrl_end_date_to, - adapt_end_date_to=adapt_end_date_to, - create_date_from=ctrl_create_date_from, - adapt_create_date_from=adapt_create_date_from, - create_date_to=ctrl_create_date_to, - adapt_create_date_to=adapt_create_date_to, - update_date_from=ctrl_update_date_from, - adapt_update_date_from=adapt_update_date_from, - update_date_to=ctrl_update_date_to, - adapt_update_date_to=adapt_update_date_to - ) +# return cls( +# ta_cd=ctrl_ta_cd, +# adapt_ta_cd=ctrl_ta_cd, +# inst_cd=ctrl_inst_cd, +# adapt_inst_cd=ctrl_inst_cd, +# emp_cd=ctrl_emp_cd, +# adapt_emp_cd=ctrl_emp_cd, +# emp_chg_type_cd=ctrl_emp_chg_type_cd, +# adapt_emp_chg_type_cd=ctrl_emp_chg_type_cd, +# apply_date_from=ctrl_apply_date_from, +# adapt_apply_date_from=adapt_apply_date_from, +# start_date_from=ctrl_start_date_from, +# adapt_start_date_from=adapt_start_date_from, +# start_date_to=ctrl_start_date_to, +# adapt_start_date_to=adapt_start_date_to, +# select_table=radio_select_table, +# end_date_from=ctrl_end_date_from, +# adapt_end_date_from=adapt_end_date_from, +# end_date_to=ctrl_end_date_to, +# adapt_end_date_to=adapt_end_date_to, +# create_date_from=ctrl_create_date_from, +# adapt_create_date_from=adapt_create_date_from, +# create_date_to=ctrl_create_date_to, +# adapt_create_date_to=adapt_create_date_to, +# update_date_from=ctrl_update_date_from, +# adapt_update_date_from=adapt_update_date_from, +# update_date_to=ctrl_update_date_to, +# adapt_update_date_to=adapt_update_date_to +# ) diff --git a/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py b/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py index 2f24bd8b..26de9648 100644 --- a/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py +++ b/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py @@ -1,31 +1,32 @@ -from typing import Optional, Annotated +# # ファイル削除予定 +# from typing import Optional, Annotated -from fastapi import Form +# from fastapi import Form -from src.util.sanitize import sanitize -from fastapi import File, UploadFile +# from src.util.sanitize import sanitize +# from fastapi import File, UploadFile -from src.model.request.request_base_model import RequestBaseModel +# from src.model.request.request_base_model import RequestBaseModel -@sanitize -class MasterMainteCsvUpModel(RequestBaseModel): - csv_file: Optional[Annotated[UploadFile, File()]] - select_function: Optional[str] - select_table: Optional[str] - json_upload_data: Optional[str] +# @sanitize +# class MasterMainteCsvUpModel(RequestBaseModel): +# csv_file: Optional[Annotated[UploadFile, File()]] +# select_function: Optional[str] +# select_table: Optional[str] +# json_upload_data: Optional[str] - @classmethod - def as_form( - cls, - ctrl_csv_file: UploadFile = Form(None), - ctrl_select_function: Optional[str] = Form(None), - ctrl_select_table: Optional[str] = Form(None), - ctrl_json_upload_data: Optional[str] = Form(None) - ): - return cls( - csv_file=ctrl_csv_file, - select_function=ctrl_select_function, - select_table=ctrl_select_table, - json_upload_data=ctrl_json_upload_data - ) +# @classmethod +# def as_form( +# cls, +# ctrl_csv_file: UploadFile = Form(None), +# ctrl_select_function: Optional[str] = Form(None), +# ctrl_select_table: Optional[str] = Form(None), +# ctrl_json_upload_data: Optional[str] = Form(None) +# ): +# return cls( +# csv_file=ctrl_csv_file, +# select_function=ctrl_select_function, +# select_table=ctrl_select_table, +# json_upload_data=ctrl_json_upload_data +# ) diff --git a/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py b/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py index e7ac331a..4bbc8ec5 100644 --- a/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py +++ b/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py @@ -1,32 +1,33 @@ -from pydantic import BaseModel +#ファイル削除予定 +# from pydantic import BaseModel -from src.util.string_util import is_not_empty +# from src.util.string_util import is_not_empty -class InstEmpCsvDownloadViewModel(BaseModel): - subtitle: str = '施設担当者データCSVダウンロード' - is_search: bool = False - ta_cd: str = '' - inst_cd: str = '' - emp_cd: str = '' - emp_chg_type_cd: str = '' - apply_date_from: str = '' - start_date_from: str = '' - start_date_to: str = '' - end_date_from: str = '' - end_date_to: str = '' - create_date_from: str = '' - create_date_to: str = '' - update_date_from: str = '' - update_date_to: str = '' - select_table: str = '' - data_count: int = 0 - result_msg: str = '' - download_file_url: str = '' - file_name: str = '' +# class InstEmpCsvDownloadViewModel(BaseModel): +# subtitle: str = '施設担当者データCSVダウンロード' +# is_search: bool = False +# ta_cd: str = '' +# inst_cd: str = '' +# emp_cd: str = '' +# emp_chg_type_cd: str = '' +# apply_date_from: str = '' +# start_date_from: str = '' +# start_date_to: str = '' +# end_date_from: str = '' +# end_date_to: str = '' +# create_date_from: str = '' +# create_date_to: str = '' +# update_date_from: str = '' +# update_date_to: str = '' +# select_table: str = '' +# data_count: int = 0 +# result_msg: str = '' +# download_file_url: str = '' +# file_name: str = '' - def is_select_table_empty(self): - return not is_not_empty(self.select_table) +# def is_select_table_empty(self): +# return not is_not_empty(self.select_table) - def is_download_file_url_empty(self): - return not is_not_empty(self.download_file_url) +# def is_download_file_url_empty(self): +# return not is_not_empty(self.download_file_url) diff --git a/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py b/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py index f7d40c00..4ceadd31 100644 --- a/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py +++ b/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py @@ -1,48 +1,49 @@ -from pydantic import BaseModel +#ファイル削除予定 +# from pydantic import BaseModel -from src.system_var import constants +# from src.system_var import constants -class InstEmpCsvUploadViewModel(BaseModel): - subtitle: str = '施設担当者データCSVアップロード' - is_verified: bool = False - is_insert: bool = False - error_message_list: list[str] = None - select_function: str = None - select_table: str = None - csv_file_name: str = None - csv_upload_list: list[dict] = None - json_upload_data: str = None - result_message_list: list[str] = None - select_function_message: str = None +# class InstEmpCsvUploadViewModel(BaseModel): +# subtitle: str = '施設担当者データCSVアップロード' +# is_verified: bool = False +# is_insert: bool = False +# error_message_list: list[str] = None +# select_function: str = None +# select_table: str = None +# csv_file_name: str = None +# csv_upload_list: list[dict] = None +# json_upload_data: str = None +# result_message_list: list[str] = None +# select_function_message: str = None - def select_table_message(self): - return self.__dummy_table() if self.select_table == 'dummy' else self.__real_table() +# def select_table_message(self): +# return self.__dummy_table() if self.select_table == 'dummy' else self.__real_table() - def upload_data_columns(self) -> list[str]: - return self.__inst_emp_columns() +# def upload_data_columns(self) -> list[str]: +# return self.__inst_emp_columns() - def is_select_function_empty(self): - return self.select_function is None or len(self.select_function) == 0 +# def is_select_function_empty(self): +# return self.select_function is None or len(self.select_function) == 0 - def is_select_table_empty(self): - return self.select_table is None or len(self.select_table) == 0 +# def is_select_table_empty(self): +# return self.select_table is None or len(self.select_table) == 0 - def is_error_message_list_empty(self): - return self.error_message_list is None or len(self.error_message_list) == 0 +# def is_error_message_list_empty(self): +# return self.error_message_list is None or len(self.error_message_list) == 0 - def csv_data_count(self): - return 0 if self.csv_upload_list is None else len(self.csv_upload_list) +# def csv_data_count(self): +# return 0 if self.csv_upload_list is None else len(self.csv_upload_list) - def __inst_emp_columns(self) -> list[str]: - if self.select_function == 'new': - return constants.NEW_INST_EMP_CSV_LOGICAL_NAMES - if self.select_function == 'change': - return constants.CHANGE_INST_CSV_LOGICAL_NAMES - return [] +# def __inst_emp_columns(self) -> list[str]: +# if self.select_function == 'new': +# return constants.NEW_INST_EMP_CSV_LOGICAL_NAMES +# if self.select_function == 'change': +# return constants.CHANGE_INST_CSV_LOGICAL_NAMES +# return [] - def __real_table(self): - return constants.CSV_REAL_TABLE_NAME +# def __real_table(self): +# return constants.CSV_REAL_TABLE_NAME - def __dummy_table(self): - return constants.CSV_CHANGE_TABLE_NAME +# def __dummy_table(self): +# return constants.CSV_CHANGE_TABLE_NAME diff --git a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py index 2b1629b1..68da3996 100644 --- a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py +++ b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py @@ -1,5 +1,6 @@ -from pydantic import BaseModel +# ファイル削除予定 +# from pydantic import BaseModel -class MasterMainteMenuViewModel(BaseModel): - subtitle: str = 'MeDaCA マスターメンテメニュー' +# class MasterMainteMenuViewModel(BaseModel): +# subtitle: str = 'MeDaCA マスターメンテメニュー' diff --git a/ecs/jskult-webapp/src/model/view/menu_view_model.py b/ecs/jskult-webapp/src/model/view/menu_view_model.py index 06d62ab9..468ec5ec 100644 --- a/ecs/jskult-webapp/src/model/view/menu_view_model.py +++ b/ecs/jskult-webapp/src/model/view/menu_view_model.py @@ -13,8 +13,8 @@ class MenuViewModel(BaseModel): def is_batch_processing(self): return self.batch_status == constants.BATCH_STATUS_PROCESSING - def is_backup_processing(self): - return self.dump_status != constants.DUMP_STATUS_UNPROCESSED + #TODO 削除予定 def is_backup_processing(self): + # return self.dump_status != constants.DUMP_STATUS_UNPROCESSED def is_available_ult_doctor_menu(self): return self.user_model.has_ult_doctor_permission() @@ -24,6 +24,7 @@ class MenuViewModel(BaseModel): def is_available_bio_menu(self): return self.user_model.has_bio_permission() + + #TODO 削除予定 def is_available_master_maintenance_menu(self): + # return self.user_model.has_master_maintenance_permission() - def is_available_master_maintenance_menu(self): - return self.user_model.has_master_maintenance_permission() diff --git a/ecs/jskult-webapp/src/model/view/table_override_view_model.py b/ecs/jskult-webapp/src/model/view/table_override_view_model.py index 2301527a..1ef3d6ac 100644 --- a/ecs/jskult-webapp/src/model/view/table_override_view_model.py +++ b/ecs/jskult-webapp/src/model/view/table_override_view_model.py @@ -1,7 +1,8 @@ -from pydantic import BaseModel +# ファイル削除予定 +# from pydantic import BaseModel -class TableOverrideViewModel(BaseModel): - subtitle: str = 'テーブル上書きコピー' +# class TableOverrideViewModel(BaseModel): +# subtitle: str = 'テーブル上書きコピー' - is_override: bool = False +# is_override: bool = False diff --git a/ecs/jskult-webapp/src/model/view/user_view_model.py b/ecs/jskult-webapp/src/model/view/user_view_model.py index 0dcf401d..bae3af73 100644 --- a/ecs/jskult-webapp/src/model/view/user_view_model.py +++ b/ecs/jskult-webapp/src/model/view/user_view_model.py @@ -19,6 +19,7 @@ class UserViewModel(BaseModel): def has_bio_permission(self): return self.bio_flg == constants.PERMISSION_ENABLED + + #TODO 削除予定 def has_master_maintenance_permission(self): + # return self.master_mainte_flg == constants.PERMISSION_ENABLED - def has_master_maintenance_permission(self): - return self.master_mainte_flg == constants.PERMISSION_ENABLED diff --git a/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py b/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py index 319918e0..3580d2f7 100644 --- a/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py +++ b/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py @@ -1,29 +1,30 @@ -from src.repositories.base_repository import BaseRepository -from src.model.db.master_mente_count import MasterMenteCountModel -from src.logging.get_logger import get_logger +# # file削除予定 +# from src.repositories.base_repository import BaseRepository +# from src.model.db.master_mente_count import MasterMenteCountModel +# from src.logging.get_logger import get_logger -logger = get_logger('ビジネスユニットマスタ') +# logger = get_logger('ビジネスユニットマスタ') -class BuMasterRepository(BaseRepository): +# class BuMasterRepository(BaseRepository): - FETCH_COUNT_SQL = """\ - SELECT - COUNT(*) AS count - FROM - src05.bu - WHERE - bu.bu_cd = :bu_cd - """ +# FETCH_COUNT_SQL = """\ +# SELECT +# COUNT(*) AS count +# FROM +# src05.bu +# WHERE +# bu.bu_cd = :bu_cd +# """ - def fetch_count(self, bu_cd) -> MasterMenteCountModel: - try: - query = self.FETCH_COUNT_SQL - result = self._database.execute_select(query, {'bu_cd': bu_cd}) - models = [MasterMenteCountModel(**r) for r in result] - if len(models) == 0: - return 0 - return models[0].count - except Exception as e: - logger.exception(f"DB Error : Exception={e.args}") - raise e +# def fetch_count(self, bu_cd) -> MasterMenteCountModel: +# try: +# query = self.FETCH_COUNT_SQL +# result = self._database.execute_select(query, {'bu_cd': bu_cd}) +# models = [MasterMenteCountModel(**r) for r in result] +# if len(models) == 0: +# return 0 +# return models[0].count +# except Exception as e: +# logger.exception(f"DB Error : Exception={e.args}") +# raise e diff --git a/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py b/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py index fe5329f2..64097cb1 100644 --- a/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py +++ b/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py @@ -1,289 +1,290 @@ -from src.db import sql_condition as condition -from src.db.sql_condition import SQLCondition -from src.logging.get_logger import get_logger -from src.model.db.master_mente_count import MasterMenteCountModel -from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel -from src.repositories.base_repository import BaseRepository -from src.util.string_util import is_not_empty +# ファイル削除予定 +# from src.db import sql_condition as condition +# from src.db.sql_condition import SQLCondition +# from src.logging.get_logger import get_logger +# from src.model.db.master_mente_count import MasterMenteCountModel +# from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel +# from src.repositories.base_repository import BaseRepository +# from src.util.string_util import is_not_empty -logger = get_logger('従業員担当施設マスタ') +# logger = get_logger('従業員担当施設マスタ') -class EmpChgInstRepository(BaseRepository): +# class EmpChgInstRepository(BaseRepository): - def to_jst(self): - self._database.to_jst() +# def to_jst(self): +# self._database.to_jst() - def begin(self): - self._database.begin() +# def begin(self): +# self._database.begin() - def commit(self): - self._database.commit() +# def commit(self): +# self._database.commit() - def rollback(self): - self._database.rollback() +# def rollback(self): +# self._database.rollback() - INSERT_SQL = """\ - INSERT INTO {table_name} - ( - inst_cd, - ta_cd, - emp_chg_type_cd, - emp_cd, - bu_cd, - start_date, - end_date, - main_chg_flg, - enabled_flg, - creater, - create_date, - updater, - update_date - ) - VALUES ( - :inst_cd, - :ta_cd, - :emp_chg_type_cd, - :emp_cd, - :bu_cd, - :start_date, - :end_date, - '1', - 'Y', - :create_user_name, - NOW(), - :update_user_name, - NOW() - ) - """ +# INSERT_SQL = """\ +# INSERT INTO {table_name} +# ( +# inst_cd, +# ta_cd, +# emp_chg_type_cd, +# emp_cd, +# bu_cd, +# start_date, +# end_date, +# main_chg_flg, +# enabled_flg, +# creater, +# create_date, +# updater, +# update_date +# ) +# VALUES ( +# :inst_cd, +# :ta_cd, +# :emp_chg_type_cd, +# :emp_cd, +# :bu_cd, +# :start_date, +# :end_date, +# '1', +# 'Y', +# :create_user_name, +# NOW(), +# :update_user_name, +# NOW() +# ) +# """ - def insert_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, emp_cd, bu_cd, start_date, - end_date, create_user_name, table_name): - try: - query = self.INSERT_SQL.format(table_name=table_name) - self._database.execute(query, { - 'inst_cd': inst_cd, - 'ta_cd': ta_cd, - 'emp_chg_type_cd': emp_chg_type_cd, - 'emp_cd': emp_cd, - 'bu_cd': bu_cd, - 'start_date': start_date, - 'end_date': end_date, - 'create_user_name': create_user_name, - 'update_user_name': create_user_name - }) - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def insert_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, emp_cd, bu_cd, start_date, +# end_date, create_user_name, table_name): +# try: +# query = self.INSERT_SQL.format(table_name=table_name) +# self._database.execute(query, { +# 'inst_cd': inst_cd, +# 'ta_cd': ta_cd, +# 'emp_chg_type_cd': emp_chg_type_cd, +# 'emp_cd': emp_cd, +# 'bu_cd': bu_cd, +# 'start_date': start_date, +# 'end_date': end_date, +# 'create_user_name': create_user_name, +# 'update_user_name': create_user_name +# }) +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e - UPDATE_END_DATE_SQL = """\ - UPDATE - {table_name} - SET - end_date = :end_date, - updater = :update_user_name, - update_date = NOW() - WHERE - inst_cd = :inst_cd - AND ta_cd = :ta_cd - AND emp_chg_type_cd = :emp_chg_type_cd - AND start_date = :start_date - """ +# UPDATE_END_DATE_SQL = """\ +# UPDATE +# {table_name} +# SET +# end_date = :end_date, +# updater = :update_user_name, +# update_date = NOW() +# WHERE +# inst_cd = :inst_cd +# AND ta_cd = :ta_cd +# AND emp_chg_type_cd = :emp_chg_type_cd +# AND start_date = :start_date +# """ - def end_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, start_date, - end_date, update_user_name, table_name): - try: - query = self.UPDATE_END_DATE_SQL.format(table_name=table_name) - self._database.execute(query, { - 'inst_cd': inst_cd, - 'ta_cd': ta_cd, - 'emp_chg_type_cd': emp_chg_type_cd, - 'start_date': start_date, - 'end_date': end_date, - 'update_user_name': update_user_name - }) - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def end_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, start_date, +# end_date, update_user_name, table_name): +# try: +# query = self.UPDATE_END_DATE_SQL.format(table_name=table_name) +# self._database.execute(query, { +# 'inst_cd': inst_cd, +# 'ta_cd': ta_cd, +# 'emp_chg_type_cd': emp_chg_type_cd, +# 'start_date': start_date, +# 'end_date': end_date, +# 'update_user_name': update_user_name +# }) +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e - UPDATE_EMP_CD_SQL = """\ - UPDATE - {table_name} - SET - emp_cd = :emp_cd, - updater = :update_user_name, - update_date = NOW() - where - inst_cd = :inst_cd - AND ta_cd = :ta_cd - AND emp_chg_type_cd = :emp_chg_type_cd - AND start_date = :start_date - """ +# UPDATE_EMP_CD_SQL = """\ +# UPDATE +# {table_name} +# SET +# emp_cd = :emp_cd, +# updater = :update_user_name, +# update_date = NOW() +# where +# inst_cd = :inst_cd +# AND ta_cd = :ta_cd +# AND emp_chg_type_cd = :emp_chg_type_cd +# AND start_date = :start_date +# """ - def modify_emp_chg_inst(self, inst_cd, ta_cd, start_date, emp_chg_type_cd, emp_cd, update_user_name, table_name): - try: - query = self.UPDATE_EMP_CD_SQL.format(table_name=table_name) - self._database.execute(query, { - 'inst_cd': inst_cd, - 'ta_cd': ta_cd, - 'emp_chg_type_cd': emp_chg_type_cd, - 'start_date': start_date, - 'emp_cd': emp_cd, - 'update_user_name': update_user_name - }) - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def modify_emp_chg_inst(self, inst_cd, ta_cd, start_date, emp_chg_type_cd, emp_cd, update_user_name, table_name): +# try: +# query = self.UPDATE_EMP_CD_SQL.format(table_name=table_name) +# self._database.execute(query, { +# 'inst_cd': inst_cd, +# 'ta_cd': ta_cd, +# 'emp_chg_type_cd': emp_chg_type_cd, +# 'start_date': start_date, +# 'emp_cd': emp_cd, +# 'update_user_name': update_user_name +# }) +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e - FETCH_COUNT_SQL = """\ - SELECT - COUNT(*) AS count - FROM - {table_name} - WHERE - inst_cd = :inst_cd - AND ta_cd = :ta_cd - AND emp_chg_type_cd = :emp_chg_type_cd - AND start_date = :start_date - """ +# FETCH_COUNT_SQL = """\ +# SELECT +# COUNT(*) AS count +# FROM +# {table_name} +# WHERE +# inst_cd = :inst_cd +# AND ta_cd = :ta_cd +# AND emp_chg_type_cd = :emp_chg_type_cd +# AND start_date = :start_date +# """ - def fetch_count(self, inst_cd, ta_cd, emp_chg_type_cd, start_date, table_name) -> MasterMenteCountModel: - try: - query = self.FETCH_COUNT_SQL.format(table_name=table_name) - result = self._database.execute_select(query, {'inst_cd': inst_cd, 'ta_cd': ta_cd, - 'emp_chg_type_cd': emp_chg_type_cd, 'start_date': start_date}) - models = [MasterMenteCountModel(**r) for r in result] - if len(models) == 0: - return 0 - return models[0].count - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def fetch_count(self, inst_cd, ta_cd, emp_chg_type_cd, start_date, table_name) -> MasterMenteCountModel: +# try: +# query = self.FETCH_COUNT_SQL.format(table_name=table_name) +# result = self._database.execute_select(query, {'inst_cd': inst_cd, 'ta_cd': ta_cd, +# 'emp_chg_type_cd': emp_chg_type_cd, 'start_date': start_date}) +# models = [MasterMenteCountModel(**r) for r in result] +# if len(models) == 0: +# return 0 +# return models[0].count +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e - FETCH_SQL = """\ - SELECT DISTINCT - eci.inst_cd AS inst_cd, - mi.inst_name AS inst_name, - eci.ta_cd AS ta_cd, - eci.emp_chg_type_cd AS emp_chg_type_cd, - eci.emp_cd AS emp_cd, - CONCAT(emp.emp_name_family, " ", emp.emp_name_first) AS emp_name_full, - eci.bu_cd AS bu_cd, - bu.bu_name AS bu_name, - eci.start_date AS start_date, - eci.end_date AS end_date, - eci.creater AS creater, - eci.create_date AS create_date, - eci.updater AS updater, - eci.update_date AS update_date - FROM - {table_name} AS eci - LEFT JOIN mst_inst AS mi - ON eci.inst_cd = mi.inst_cd - LEFT JOIN emp - ON eci.emp_cd = emp.emp_cd - LEFT JOIN bu - ON eci.bu_cd = bu.bu_cd - WHERE - {where_clause} - """ +# FETCH_SQL = """\ +# SELECT DISTINCT +# eci.inst_cd AS inst_cd, +# mi.inst_name AS inst_name, +# eci.ta_cd AS ta_cd, +# eci.emp_chg_type_cd AS emp_chg_type_cd, +# eci.emp_cd AS emp_cd, +# CONCAT(emp.emp_name_family, " ", emp.emp_name_first) AS emp_name_full, +# eci.bu_cd AS bu_cd, +# bu.bu_name AS bu_name, +# eci.start_date AS start_date, +# eci.end_date AS end_date, +# eci.creater AS creater, +# eci.create_date AS create_date, +# eci.updater AS updater, +# eci.update_date AS update_date +# FROM +# {table_name} AS eci +# LEFT JOIN mst_inst AS mi +# ON eci.inst_cd = mi.inst_cd +# LEFT JOIN emp +# ON eci.emp_cd = emp.emp_cd +# LEFT JOIN bu +# ON eci.bu_cd = bu.bu_cd +# WHERE +# {where_clause} +# """ - def fetch_as_data_frame(self, table_name: str, parameter: MasterMainteCsvDlModel): - try: - where_clause = self.__build_condition(parameter) - query = self.FETCH_SQL.format(table_name=table_name, where_clause=where_clause) - logger.debug(f'SQL: {query}') - df = self._to_data_frame(query, parameter) - logger.debug(f'count= {df.shape[0]}') - return df - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def fetch_as_data_frame(self, table_name: str, parameter: MasterMainteCsvDlModel): +# try: +# where_clause = self.__build_condition(parameter) +# query = self.FETCH_SQL.format(table_name=table_name, where_clause=where_clause) +# logger.debug(f'SQL: {query}') +# df = self._to_data_frame(query, parameter) +# logger.debug(f'count= {df.shape[0]}') +# return df +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e - def __build_condition(self, parameter: MasterMainteCsvDlModel): - where_clauses: list[SQLCondition] = [] +# def __build_condition(self, parameter: MasterMainteCsvDlModel): +# where_clauses: list[SQLCondition] = [] - # 検索条件が指定されずにSQLが壊れることを予防するため、常に真の固定条件を追加しておく - where_clauses.append(SQLCondition('', '', '1 = 1', literal=True)) +# # 検索条件が指定されずにSQLが壊れることを予防するため、常に真の固定条件を追加しておく +# where_clauses.append(SQLCondition('', '', '1 = 1', literal=True)) - # 領域コードが入力されていた場合 - if is_not_empty(parameter.ta_cd): - parameter.adapt_ta_cd = f'%{parameter.ta_cd}%' - where_clauses.append(SQLCondition('eci.ta_cd', condition.LIKE, 'adapt_ta_cd')) +# # 領域コードが入力されていた場合 +# if is_not_empty(parameter.ta_cd): +# parameter.adapt_ta_cd = f'%{parameter.ta_cd}%' +# where_clauses.append(SQLCondition('eci.ta_cd', condition.LIKE, 'adapt_ta_cd')) - # 施設コードが入力されていた場合 - if is_not_empty(parameter.inst_cd): - parameter.adapt_inst_cd = f'%{parameter.inst_cd}%' - where_clauses.append(SQLCondition('eci.inst_cd', condition.LIKE, 'adapt_inst_cd')) +# # 施設コードが入力されていた場合 +# if is_not_empty(parameter.inst_cd): +# parameter.adapt_inst_cd = f'%{parameter.inst_cd}%' +# where_clauses.append(SQLCondition('eci.inst_cd', condition.LIKE, 'adapt_inst_cd')) - # MUIDが入力されていた場合 - if is_not_empty(parameter.emp_cd): - parameter.adapt_emp_cd = f'%{parameter.emp_cd}%' - where_clauses.append(SQLCondition('eci.emp_cd', condition.LIKE, 'adapt_emp_cd')) +# # MUIDが入力されていた場合 +# if is_not_empty(parameter.emp_cd): +# parameter.adapt_emp_cd = f'%{parameter.emp_cd}%' +# where_clauses.append(SQLCondition('eci.emp_cd', condition.LIKE, 'adapt_emp_cd')) - # 担当者種別コードが入力されていた場合 - if is_not_empty(parameter.emp_chg_type_cd): - parameter.adapt_emp_chg_type_cd = f'%{parameter.emp_chg_type_cd}%' - where_clauses.append(SQLCondition('eci.emp_chg_type_cd', condition.LIKE, 'adapt_emp_chg_type_cd')) +# # 担当者種別コードが入力されていた場合 +# if is_not_empty(parameter.emp_chg_type_cd): +# parameter.adapt_emp_chg_type_cd = f'%{parameter.emp_chg_type_cd}%' +# where_clauses.append(SQLCondition('eci.emp_chg_type_cd', condition.LIKE, 'adapt_emp_chg_type_cd')) - # 適用期間内が入力されていた場合 - if is_not_empty(parameter.adapt_apply_date_from): - where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_apply_date_from')) - where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_apply_date_from')) +# # 適用期間内が入力されていた場合 +# if is_not_empty(parameter.adapt_apply_date_from): +# where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_apply_date_from')) +# where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_apply_date_from')) - # 適用開始日(FROM)が入力されていた場合 - if is_not_empty(parameter.adapt_start_date_from): - where_clauses.append(SQLCondition('eci.start_date', condition.GE, 'adapt_start_date_from')) +# # 適用開始日(FROM)が入力されていた場合 +# if is_not_empty(parameter.adapt_start_date_from): +# where_clauses.append(SQLCondition('eci.start_date', condition.GE, 'adapt_start_date_from')) - # 適用開始日(TO)が入力されていた場合 - if is_not_empty(parameter.adapt_start_date_to): - where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_start_date_to')) +# # 適用開始日(TO)が入力されていた場合 +# if is_not_empty(parameter.adapt_start_date_to): +# where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_start_date_to')) - # 適用終了日(FROM)が入力されていた場合 - if is_not_empty(parameter.adapt_end_date_from): - where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_end_date_from')) +# # 適用終了日(FROM)が入力されていた場合 +# if is_not_empty(parameter.adapt_end_date_from): +# where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_end_date_from')) - # 適用終了日(TO)が入力されていた場合 - if is_not_empty(parameter.adapt_end_date_to): - where_clauses.append(SQLCondition('eci.end_date', condition.LE, 'adapt_end_date_to')) +# # 適用終了日(TO)が入力されていた場合 +# if is_not_empty(parameter.adapt_end_date_to): +# where_clauses.append(SQLCondition('eci.end_date', condition.LE, 'adapt_end_date_to')) - # データ作成日(FROM)が入力されていた場合 - if is_not_empty(parameter.adapt_create_date_from): - where_clauses.append(SQLCondition('eci.create_date', condition.GE, 'adapt_create_date_from')) +# # データ作成日(FROM)が入力されていた場合 +# if is_not_empty(parameter.adapt_create_date_from): +# where_clauses.append(SQLCondition('eci.create_date', condition.GE, 'adapt_create_date_from')) - # データ作成日(TO)が入力されていた場合 - if is_not_empty(parameter.adapt_create_date_to): - where_clauses.append(SQLCondition('eci.create_date', condition.LE, 'adapt_create_date_to')) +# # データ作成日(TO)が入力されていた場合 +# if is_not_empty(parameter.adapt_create_date_to): +# where_clauses.append(SQLCondition('eci.create_date', condition.LE, 'adapt_create_date_to')) - # データ更新日(FROM)が入力されていた場合 - if is_not_empty(parameter.adapt_update_date_from): - where_clauses.append(SQLCondition('eci.update_date', condition.GE, 'adapt_update_date_from')) +# # データ更新日(FROM)が入力されていた場合 +# if is_not_empty(parameter.adapt_update_date_from): +# where_clauses.append(SQLCondition('eci.update_date', condition.GE, 'adapt_update_date_from')) - # データ更新日(TO)が入力されていた場合 - if is_not_empty(parameter.adapt_update_date_to): - where_clauses.append(SQLCondition('eci.update_date', condition.LE, 'adapt_update_date_to')) +# # データ更新日(TO)が入力されていた場合 +# if is_not_empty(parameter.adapt_update_date_to): +# where_clauses.append(SQLCondition('eci.update_date', condition.LE, 'adapt_update_date_to')) - where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses]) +# where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses]) - logger.debug(f'条件設定終了:{where_clauses_str}') - return where_clauses_str +# logger.debug(f'条件設定終了:{where_clauses_str}') +# return where_clauses_str - DELETE_SQL = "DELETE FROM emp_chg_inst_wrk" +# DELETE_SQL = "DELETE FROM emp_chg_inst_wrk" - def delete_dummy_table(self): - try: - query = self.DELETE_SQL - self._database.execute(query) - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def delete_dummy_table(self): +# try: +# query = self.DELETE_SQL +# self._database.execute(query) +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e - COPY_TABLE_SQL = "INSERT INTO emp_chg_inst_wrk SELECT * FROM emp_chg_inst" +# COPY_TABLE_SQL = "INSERT INTO emp_chg_inst_wrk SELECT * FROM emp_chg_inst" - def copy_real_to_dummy(self): - try: - query = self.COPY_TABLE_SQL - self._database.execute(query) - except Exception as e: - logger.exception(f'DB Error : Exception={e.args}') - raise e +# def copy_real_to_dummy(self): +# try: +# query = self.COPY_TABLE_SQL +# self._database.execute(query) +# except Exception as e: +# logger.exception(f'DB Error : Exception={e.args}') +# raise e diff --git a/ecs/jskult-webapp/src/repositories/emp_master_repository.py b/ecs/jskult-webapp/src/repositories/emp_master_repository.py index 9ef6aa4a..c3f567c1 100644 --- a/ecs/jskult-webapp/src/repositories/emp_master_repository.py +++ b/ecs/jskult-webapp/src/repositories/emp_master_repository.py @@ -1,31 +1,32 @@ -from src.repositories.base_repository import BaseRepository -from src.model.db.master_mente_count import MasterMenteCountModel -from src.logging.get_logger import get_logger +# ファイル削除予定 +# from src.repositories.base_repository import BaseRepository +# from src.model.db.master_mente_count import MasterMenteCountModel +# from src.logging.get_logger import get_logger -logger = get_logger('従業員マスタ') +# logger = get_logger('従業員マスタ') -class EmpMasterRepository(BaseRepository): +# class EmpMasterRepository(BaseRepository): - FETCH_COUNT_SQL = """\ - SELECT - COUNT(*) AS count - FROM - src05.emp - WHERE - emp.emp_cd = :emp_cd - AND str_to_date(emp.start_date, '%Y%m%d') <= str_to_date(:start_work_date, '%Y%m%d') - AND str_to_date(:start_work_date, '%Y%m%d') <= str_to_date(emp.end_date ,'%Y%m%d') - """ +# FETCH_COUNT_SQL = """\ +# SELECT +# COUNT(*) AS count +# FROM +# src05.emp +# WHERE +# emp.emp_cd = :emp_cd +# AND str_to_date(emp.start_date, '%Y%m%d') <= str_to_date(:start_work_date, '%Y%m%d') +# AND str_to_date(:start_work_date, '%Y%m%d') <= str_to_date(emp.end_date ,'%Y%m%d') +# """ - def fetch_count(self, emp_cd, start_work_date) -> MasterMenteCountModel: - try: - query = self.FETCH_COUNT_SQL - result = self._database.execute_select(query, {'emp_cd': emp_cd, 'start_work_date': start_work_date}) - models = [MasterMenteCountModel(**r) for r in result] - if len(models) == 0: - return 0 - return models[0].count - except Exception as e: - logger.exception(f"DB Error : Exception={e.args}") - raise e +# def fetch_count(self, emp_cd, start_work_date) -> MasterMenteCountModel: +# try: +# query = self.FETCH_COUNT_SQL +# result = self._database.execute_select(query, {'emp_cd': emp_cd, 'start_work_date': start_work_date}) +# models = [MasterMenteCountModel(**r) for r in result] +# if len(models) == 0: +# return 0 +# return models[0].count +# except Exception as e: +# logger.exception(f"DB Error : Exception={e.args}") +# raise e diff --git a/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py b/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py index e2d210c4..7e4dc0dc 100644 --- a/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py +++ b/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py @@ -1,33 +1,34 @@ -from src.repositories.base_repository import BaseRepository -from src.model.db.master_mente_count import MasterMenteCountModel -from src.logging.get_logger import get_logger +# ファイル削除予定 +# from src.repositories.base_repository import BaseRepository +# from src.model.db.master_mente_count import MasterMenteCountModel +# from src.logging.get_logger import get_logger -logger = get_logger('汎用区分マスタ') +# logger = get_logger('汎用区分マスタ') -class GenericKbnMstRepository(BaseRepository): +# class GenericKbnMstRepository(BaseRepository): - FETCH_SQL = """\ - SELECT - COUNT(*) AS count - FROM - src05.generic_kbn_mst - WHERE - generic_kbn_mst.generic_kbn_cd = :generic_kbn_cd - AND - generic_kbn_mst.kbn_cd = :kbn_cd - AND - STR_TO_DATE( :start_date , '%Y%m%d') BETWEEN generic_kbn_mst.start_date AND generic_kbn_mst.end_date\ - """ +# FETCH_SQL = """\ +# SELECT +# COUNT(*) AS count +# FROM +# src05.generic_kbn_mst +# WHERE +# generic_kbn_mst.generic_kbn_cd = :generic_kbn_cd +# AND +# generic_kbn_mst.kbn_cd = :kbn_cd +# AND +# STR_TO_DATE( :start_date , '%Y%m%d') BETWEEN generic_kbn_mst.start_date AND generic_kbn_mst.end_date\ +# """ - def fetch_count(self, generic_kbn_cd: str, kbn_cd: str, start_date: str) -> MasterMenteCountModel: - try: - query = self.FETCH_SQL - result = self._database.execute_select(query, {'generic_kbn_cd': generic_kbn_cd, 'kbn_cd': kbn_cd, 'start_date' : start_date}) - models = [MasterMenteCountModel(**r) for r in result] - if len(models) == 0: - return 0 - return models[0].count - except Exception as e: - logger.error(f"DB Error : Exception={e.args}") - raise e \ No newline at end of file +# def fetch_count(self, generic_kbn_cd: str, kbn_cd: str, start_date: str) -> MasterMenteCountModel: +# try: +# query = self.FETCH_SQL +# result = self._database.execute_select(query, {'generic_kbn_cd': generic_kbn_cd, 'kbn_cd': kbn_cd, 'start_date' : start_date}) +# models = [MasterMenteCountModel(**r) for r in result] +# if len(models) == 0: +# return 0 +# return models[0].count +# except Exception as e: +# logger.error(f"DB Error : Exception={e.args}") +# raise e \ No newline at end of file diff --git a/ecs/jskult-webapp/src/repositories/mst_inst_repository.py b/ecs/jskult-webapp/src/repositories/mst_inst_repository.py index 6ad64652..44f6e426 100644 --- a/ecs/jskult-webapp/src/repositories/mst_inst_repository.py +++ b/ecs/jskult-webapp/src/repositories/mst_inst_repository.py @@ -1,29 +1,30 @@ -from src.repositories.base_repository import BaseRepository -from src.model.db.master_mente_count import MasterMenteCountModel -from src.logging.get_logger import get_logger +# ファイル削除予定 +# from src.repositories.base_repository import BaseRepository +# from src.model.db.master_mente_count import MasterMenteCountModel +# from src.logging.get_logger import get_logger -logger = get_logger('メルク施設マスタ') +# logger = get_logger('メルク施設マスタ') -class MstInstRepository(BaseRepository): +# class MstInstRepository(BaseRepository): - FETCH_COUNT_SQL = """\ - SELECT - COUNT(*) AS count - FROM - src05.mst_inst - WHERE - mst_inst.inst_cd = :inst_cd - """ +# FETCH_COUNT_SQL = """\ +# SELECT +# COUNT(*) AS count +# FROM +# src05.mst_inst +# WHERE +# mst_inst.inst_cd = :inst_cd +# """ - def fetch_count(self, inst_cd) -> MasterMenteCountModel: - try: - query = self.FETCH_COUNT_SQL - result = self._database.execute_select(query, {'inst_cd': inst_cd}) - models = [MasterMenteCountModel(**r) for r in result] - if len(models) == 0: - return 0 - return models[0].count - except Exception as e: - logger.exception(f"DB Error : Exception={e.args}") - raise e +# def fetch_count(self, inst_cd) -> MasterMenteCountModel: +# try: +# query = self.FETCH_COUNT_SQL +# result = self._database.execute_select(query, {'inst_cd': inst_cd}) +# models = [MasterMenteCountModel(**r) for r in result] +# if len(models) == 0: +# return 0 +# return models[0].count +# except Exception as e: +# logger.exception(f"DB Error : Exception={e.args}") +# raise e diff --git a/ecs/jskult-webapp/src/services/batch_status_service.py b/ecs/jskult-webapp/src/services/batch_status_service.py index ffb57af0..0d7fa372 100644 --- a/ecs/jskult-webapp/src/services/batch_status_service.py +++ b/ecs/jskult-webapp/src/services/batch_status_service.py @@ -40,12 +40,13 @@ class BatchStatusService(BaseService): self.__assert_record_exists() return self.hdke_table_record.bch_actf == constants.BATCH_STATUS_PROCESSING - def is_dump_processing(self): - """dump処理処理中かどうかを判定する""" + # 削除予定 + # def is_dump_processing(self): + # """dump処理処理中かどうかを判定する""" - # 日付マスタのレコードがあることを確認 - self.__assert_record_exists() - return self.hdke_table_record.dump_sts_kbn != constants.DUMP_STATUS_UNPROCESSED + # # 日付マスタのレコードがあることを確認 + # self.__assert_record_exists() + # return self.hdke_table_record.dump_sts_kbn != constants.DUMP_STATUS_UNPROCESSED def __assert_record_exists(self): """日付テーブルが有ることを保証する""" diff --git a/ecs/jskult-webapp/src/services/master_mainte_service.py b/ecs/jskult-webapp/src/services/master_mainte_service.py index 6f612a87..513a4008 100644 --- a/ecs/jskult-webapp/src/services/master_mainte_service.py +++ b/ecs/jskult-webapp/src/services/master_mainte_service.py @@ -1,265 +1,266 @@ -import os -import json -import html -import csv +# # ファイル削除予定 +# import os +# import json +# import html +# import csv -import pandas as pd +# import pandas as pd -from fastapi import HTTPException -from io import TextIOWrapper -from src.aws.aws_api_client import AWSAPIClient -from src.aws.s3 import S3Client -from src.error.exceptions import DBException -from starlette import status -from datetime import datetime -from src.services.base_service import BaseService -from src.system_var import constants, environment -from src.repositories.base_repository import BaseRepository -from src.repositories.mst_inst_repository import MstInstRepository -from src.repositories.bu_master_cd_repository import BuMasterRepository -from src.repositories.emp_master_repository import EmpMasterRepository -from src.repositories.emp_chg_inst_repository import EmpChgInstRepository -from src.repositories.generic_kbn_mst_repository import GenericKbnMstRepository -from src.model.internal.master_mainte_csv import MasterMainteCSVItems -from src.model.internal.master_mainte_emp_chg_inst_function import NewEmpChgInstFunction -from src.model.internal.master_mainte_emp_chg_inst_function import ChangeEmpChgInstFunction -from src.model.view.inst_emp_csv_upload_view_model import InstEmpCsvUploadViewModel -from src.model.view.table_override_view_model import TableOverrideViewModel -from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel -from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel -from src.logging.get_logger import get_logger +# from fastapi import HTTPException +# from io import TextIOWrapper +# from src.aws.aws_api_client import AWSAPIClient +# from src.aws.s3 import S3Client +# from src.error.exceptions import DBException +# from starlette import status +# from datetime import datetime +# from src.services.base_service import BaseService +# from src.system_var import constants, environment +# from src.repositories.base_repository import BaseRepository +# from src.repositories.mst_inst_repository import MstInstRepository +# from src.repositories.bu_master_cd_repository import BuMasterRepository +# from src.repositories.emp_master_repository import EmpMasterRepository +# from src.repositories.emp_chg_inst_repository import EmpChgInstRepository +# from src.repositories.generic_kbn_mst_repository import GenericKbnMstRepository +# from src.model.internal.master_mainte_csv import MasterMainteCSVItems +# from src.model.internal.master_mainte_emp_chg_inst_function import NewEmpChgInstFunction +# from src.model.internal.master_mainte_emp_chg_inst_function import ChangeEmpChgInstFunction +# from src.model.view.inst_emp_csv_upload_view_model import InstEmpCsvUploadViewModel +# from src.model.view.table_override_view_model import TableOverrideViewModel +# from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel +# from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel +# from src.logging.get_logger import get_logger -logger = get_logger('マスターメンテ') +# logger = get_logger('マスターメンテ') -class MasterMainteService(BaseService): - REPOSITORIES = { - 'mst_inst_repository': MstInstRepository, - 'emp_master_repository': EmpMasterRepository, - 'bu_master_repository': BuMasterRepository, - 'emp_chginst_repository': EmpChgInstRepository, - 'generic_kbn_mst_repository': GenericKbnMstRepository, - } +# class MasterMainteService(BaseService): +# REPOSITORIES = { +# 'mst_inst_repository': MstInstRepository, +# 'emp_master_repository': EmpMasterRepository, +# 'bu_master_repository': BuMasterRepository, +# 'emp_chginst_repository': EmpChgInstRepository, +# 'generic_kbn_mst_repository': GenericKbnMstRepository, +# } - CLIENTS = { - 's3_client': S3Client - } +# CLIENTS = { +# 's3_client': S3Client +# } - mst_inst_repository: MstInstRepository - emp_master_repository: EmpMasterRepository - bu_master_repository: BuMasterRepository - emp_chginst_repository: EmpChgInstRepository - generic_kbn_mst_repository: GenericKbnMstRepository - s3_client: S3Client +# mst_inst_repository: MstInstRepository +# emp_master_repository: EmpMasterRepository +# bu_master_repository: BuMasterRepository +# emp_chginst_repository: EmpChgInstRepository +# generic_kbn_mst_repository: GenericKbnMstRepository +# s3_client: S3Client - def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None: - super().__init__(repositories, clients) - self.mst_inst_repository = repositories['mst_inst_repository'] - self.emp_master_repository = repositories['emp_master_repository'] - self.bu_master_repository = repositories['bu_master_repository'] - self.emp_chginst_repository = repositories['emp_chginst_repository'] - self.generic_kbn_mst_repository = repositories['generic_kbn_mst_repository'] - self.s3_client = clients['s3_client'] +# def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None: +# super().__init__(repositories, clients) +# self.mst_inst_repository = repositories['mst_inst_repository'] +# self.emp_master_repository = repositories['emp_master_repository'] +# self.bu_master_repository = repositories['bu_master_repository'] +# self.emp_chginst_repository = repositories['emp_chginst_repository'] +# self.generic_kbn_mst_repository = repositories['generic_kbn_mst_repository'] +# self.s3_client = clients['s3_client'] - def prepare_mainte_csv_up_view(self, - file: TextIOWrapper, - csv_file_name: str, - csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel: +# def prepare_mainte_csv_up_view(self, +# file: TextIOWrapper, +# csv_file_name: str, +# csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel: - if csv_upload_form.select_function != 'new' and csv_upload_form.select_function != 'change': - raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}') - if csv_upload_form.select_table != 'dummy' and csv_upload_form.select_table != 'real': - raise Exception(f'登録テーブルの選択値が不正です: {csv_upload_form.select_table}') +# if csv_upload_form.select_function != 'new' and csv_upload_form.select_function != 'change': +# raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}') +# if csv_upload_form.select_table != 'dummy' and csv_upload_form.select_table != 'real': +# raise Exception(f'登録テーブルの選択値が不正です: {csv_upload_form.select_table}') - (table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table) +# (table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table) - csv_items = MasterMainteCSVItems( - file, - csv_upload_form.select_function, - table_name, - self.mst_inst_repository, - self.emp_master_repository, - self.bu_master_repository, - self.emp_chginst_repository, - self.generic_kbn_mst_repository - ) +# csv_items = MasterMainteCSVItems( +# file, +# csv_upload_form.select_function, +# table_name, +# self.mst_inst_repository, +# self.emp_master_repository, +# self.bu_master_repository, +# self.emp_chginst_repository, +# self.generic_kbn_mst_repository +# ) - error_message_list = [] - # CSVファイル0件(ヘッダ行のみ)チェック - if len(csv_items.lines) == 0: - error_message_list.append('選択されたCSVファイルの2行目以降に値が記入されておりません。') - else: - for row_item in csv_items: - error_message_list.extend([data for data in row_item.validate()]) +# error_message_list = [] +# # CSVファイル0件(ヘッダ行のみ)チェック +# if len(csv_items.lines) == 0: +# error_message_list.append('選択されたCSVファイルの2行目以降に値が記入されておりません。') +# else: +# for row_item in csv_items: +# error_message_list.extend([data for data in row_item.validate()]) - csv_upload_list = [] - json_upload_data = '' - if len(error_message_list) == 0: - csv_upload_list: list[dict] = csv_items.to_dict() - # json作成 - json_upload_data = csv_items.to_json() +# csv_upload_list = [] +# json_upload_data = '' +# if len(error_message_list) == 0: +# csv_upload_list: list[dict] = csv_items.to_dict() +# # json作成 +# json_upload_data = csv_items.to_json() - mainte_csv_up = InstEmpCsvUploadViewModel( - is_verified=True, - error_message_list=error_message_list, - select_function=csv_upload_form.select_function, - select_table=csv_upload_form.select_table, - csv_upload_list=csv_upload_list, - json_upload_data=json_upload_data, - csv_file_name=csv_file_name, - select_function_message=self.__make_dialog_confirm_message( - csv_upload_form.select_function, - selected_table_msg) - ) - return mainte_csv_up +# mainte_csv_up = InstEmpCsvUploadViewModel( +# is_verified=True, +# error_message_list=error_message_list, +# select_function=csv_upload_form.select_function, +# select_table=csv_upload_form.select_table, +# csv_upload_list=csv_upload_list, +# json_upload_data=json_upload_data, +# csv_file_name=csv_file_name, +# select_function_message=self.__make_dialog_confirm_message( +# csv_upload_form.select_function, +# selected_table_msg) +# ) +# return mainte_csv_up - def prepare_mainte_new_inst_view(self, - user_name: str, - csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel: +# def prepare_mainte_new_inst_view(self, +# user_name: str, +# csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel: - (table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table) +# (table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table) - csv_data_list = json.loads(html.unescape(csv_upload_form.unescape().json_upload_data)) +# csv_data_list = json.loads(html.unescape(csv_upload_form.unescape().json_upload_data)) - if csv_upload_form.select_function == 'new': - emp_chg_inst = NewEmpChgInstFunction( - csv_data_list, - table_name, - selected_table_msg, - user_name, - self.emp_chginst_repository) - elif csv_upload_form.select_function == 'change': - emp_chg_inst = ChangeEmpChgInstFunction( - csv_data_list, - table_name, - selected_table_msg, - user_name, - self.emp_chginst_repository) - else: - raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}') +# if csv_upload_form.select_function == 'new': +# emp_chg_inst = NewEmpChgInstFunction( +# csv_data_list, +# table_name, +# selected_table_msg, +# user_name, +# self.emp_chginst_repository) +# elif csv_upload_form.select_function == 'change': +# emp_chg_inst = ChangeEmpChgInstFunction( +# csv_data_list, +# table_name, +# selected_table_msg, +# user_name, +# self.emp_chginst_repository) +# else: +# raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}') - (result_message_list, raw_error_list) = emp_chg_inst.save() +# (result_message_list, raw_error_list) = emp_chg_inst.save() - error_message_list = [] - error_message_list.extend(raw_error_list) +# error_message_list = [] +# error_message_list.extend(raw_error_list) - mainte_csv_up = InstEmpCsvUploadViewModel( - is_insert=True, - result_message_list=result_message_list, - error_message_list=error_message_list - ) - return mainte_csv_up +# mainte_csv_up = InstEmpCsvUploadViewModel( +# is_insert=True, +# result_message_list=result_message_list, +# error_message_list=error_message_list +# ) +# return mainte_csv_up - def copy_data_real_to_dummy(self) -> TableOverrideViewModel: - try: - self.emp_chginst_repository.begin() - self.emp_chginst_repository.to_jst() - self.emp_chginst_repository.delete_dummy_table() - self.emp_chginst_repository.copy_real_to_dummy() - self.emp_chginst_repository.commit() - except Exception as e: - self.emp_chginst_repository.rollback() - raise e +# def copy_data_real_to_dummy(self) -> TableOverrideViewModel: +# try: +# self.emp_chginst_repository.begin() +# self.emp_chginst_repository.to_jst() +# self.emp_chginst_repository.delete_dummy_table() +# self.emp_chginst_repository.copy_real_to_dummy() +# self.emp_chginst_repository.commit() +# except Exception as e: +# self.emp_chginst_repository.rollback() +# raise e - # コピー完了をマークして画面に返却 - table_override = TableOverrideViewModel( - is_override=True - ) - return table_override +# # コピー完了をマークして画面に返却 +# table_override = TableOverrideViewModel( +# is_override=True +# ) +# return table_override - def search_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel) -> pd.DataFrame: - try: - csv_download_form.unescape() - # 施設担当者データを検索 - search_result_df = self.search_download_emp_chg_inst_data(csv_download_form) - except DBException as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail={'error': 'db_error', 'message': e.args} - ) +# def search_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel) -> pd.DataFrame: +# try: +# csv_download_form.unescape() +# # 施設担当者データを検索 +# search_result_df = self.search_download_emp_chg_inst_data(csv_download_form) +# except DBException as e: +# raise HTTPException( +# status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, +# detail={'error': 'db_error', 'message': e.args} +# ) - return search_result_df +# return search_result_df - def search_download_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel): - (table_name, _) = self.__choose_target_table(csv_download_form.select_table) - search_result_df = self.emp_chginst_repository.fetch_as_data_frame(table_name, csv_download_form) - return search_result_df +# def search_download_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel): +# (table_name, _) = self.__choose_target_table(csv_download_form.select_table) +# search_result_df = self.emp_chginst_repository.fetch_as_data_frame(table_name, csv_download_form) +# return search_result_df - def write_csv_file(self, data_frame: pd.DataFrame, header: list[str], download_file_name: str): - # csvに書き込み - output_file_path = os.path.join(constants.MENTE_CSV_TEMPORARY_FILE_DIR_PATH, download_file_name) - # 横長のDataFrameとするため、ヘッダーの加工処理 - header_data = {} - for df_column, header_column in zip(data_frame.columns, header): - header_data[df_column] = header_column +# def write_csv_file(self, data_frame: pd.DataFrame, header: list[str], download_file_name: str): +# # csvに書き込み +# output_file_path = os.path.join(constants.MENTE_CSV_TEMPORARY_FILE_DIR_PATH, download_file_name) +# # 横長のDataFrameとするため、ヘッダーの加工処理 +# header_data = {} +# for df_column, header_column in zip(data_frame.columns, header): +# header_data[df_column] = header_column - header_df = pd.DataFrame([header_data], index=None) - output_df = pd.concat([header_df, data_frame]) - # ヘッダー行としてではなく、1レコードとして出力する - output_df.to_csv(output_file_path, encoding="utf-8_sig", quoting=csv.QUOTE_ALL, index=False, header=False) +# header_df = pd.DataFrame([header_data], index=None) +# output_df = pd.concat([header_df, data_frame]) +# # ヘッダー行としてではなく、1レコードとして出力する +# output_df.to_csv(output_file_path, encoding="utf-8_sig", quoting=csv.QUOTE_ALL, index=False, header=False) - return output_file_path +# return output_file_path - def upload_emp_chg_inst_data_file(self, df: pd.DataFrame, user_id: str, select_table: str) -> tuple[str, str]: - if df.shape[0] == 0: - return '該当データが存在しないためCSVファイルを出力しませんでした', '' +# def upload_emp_chg_inst_data_file(self, df: pd.DataFrame, user_id: str, select_table: str) -> tuple[str, str]: +# if df.shape[0] == 0: +# return '該当データが存在しないためCSVファイルを出力しませんでした', '' - # ファイル名に使用するタイムスタンプを初期化しておく - current_timestamp = datetime.now() - download_file_name = f'Result_{user_id}_{current_timestamp:%Y%m%d%H%M%S%f}.csv' +# # ファイル名に使用するタイムスタンプを初期化しておく +# current_timestamp = datetime.now() +# download_file_name = f'Result_{user_id}_{current_timestamp:%Y%m%d%H%M%S%f}.csv' - # ファイルを書き出し(CSV) - local_file_path = self.__write_emp_chg_inst_data_to_file(df, download_file_name) +# # ファイルを書き出し(CSV) +# local_file_path = self.__write_emp_chg_inst_data_to_file(df, download_file_name) - # ローカルファイルからS3にアップロードし、ダウンロード用URLを取得する - download_file_url = '' - try: - bucket_name = environment.MASTER_MAINTENANCE_BUCKET - file_key = f'data/{os.path.basename(local_file_path)}' - self.s3_client.upload_file(local_file_path, bucket_name, file_key) - # アップロード後、ローカルからは削除する - self.delete_local_file(local_file_path) - download_file_url = self.generate_download_file_url(local_file_path) - except Exception as e: - logger.exception(f'S3 アクセスエラー{e}') - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail={'error': 'aws_error', 'message': e.args} - ) - if select_table == 'dummy': - result_msg = f'ダミーテーブルのデータ{df.shape[0]}件をCSVファイルに出力しました' - else: - result_msg = f'本番テーブルのデータ{df.shape[0]}件をCSVファイルに出力しました' +# # ローカルファイルからS3にアップロードし、ダウンロード用URLを取得する +# download_file_url = '' +# try: +# bucket_name = environment.MASTER_MAINTENANCE_BUCKET +# file_key = f'data/{os.path.basename(local_file_path)}' +# self.s3_client.upload_file(local_file_path, bucket_name, file_key) +# # アップロード後、ローカルからは削除する +# self.delete_local_file(local_file_path) +# download_file_url = self.generate_download_file_url(local_file_path) +# except Exception as e: +# logger.exception(f'S3 アクセスエラー{e}') +# raise HTTPException( +# status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, +# detail={'error': 'aws_error', 'message': e.args} +# ) +# if select_table == 'dummy': +# result_msg = f'ダミーテーブルのデータ{df.shape[0]}件をCSVファイルに出力しました' +# else: +# result_msg = f'本番テーブルのデータ{df.shape[0]}件をCSVファイルに出力しました' - return result_msg, download_file_url +# return result_msg, download_file_url - def generate_download_file_url(self, local_file_path: str) -> str: - bucket_name = environment.MASTER_MAINTENANCE_BUCKET - file_key = f'data/{os.path.basename(local_file_path)}' - return self.s3_client.generate_presigned_url(bucket_name, file_key, constants.MENTE_CSV_DOWNLOAD_FILE_NAME) +# def generate_download_file_url(self, local_file_path: str) -> str: +# bucket_name = environment.MASTER_MAINTENANCE_BUCKET +# file_key = f'data/{os.path.basename(local_file_path)}' +# return self.s3_client.generate_presigned_url(bucket_name, file_key, constants.MENTE_CSV_DOWNLOAD_FILE_NAME) - def __write_emp_chg_inst_data_to_file(self, df: pd.DataFrame, download_file_name: str) -> str: - logger.info('CSVファイルを出力する') - local_file_path = self.write_csv_file( - df, header=constants.MENTE_CSV_DOWNLOAD_HEADER, download_file_name=download_file_name) +# def __write_emp_chg_inst_data_to_file(self, df: pd.DataFrame, download_file_name: str) -> str: +# logger.info('CSVファイルを出力する') +# local_file_path = self.write_csv_file( +# df, header=constants.MENTE_CSV_DOWNLOAD_HEADER, download_file_name=download_file_name) - return local_file_path +# return local_file_path - def __choose_target_table(self, select_table: str): - if select_table == 'dummy': - table_name = 'src05.emp_chg_inst_wrk' - selected_table_msg = constants.CSV_CHANGE_TABLE_NAME - elif select_table == 'real': - table_name = 'src05.emp_chg_inst' - selected_table_msg = constants.CSV_REAL_TABLE_NAME - else: - raise Exception(f'登録テーブルの選択値が不正です: {select_table}') - return (table_name, selected_table_msg) +# def __choose_target_table(self, select_table: str): +# if select_table == 'dummy': +# table_name = 'src05.emp_chg_inst_wrk' +# selected_table_msg = constants.CSV_CHANGE_TABLE_NAME +# elif select_table == 'real': +# table_name = 'src05.emp_chg_inst' +# selected_table_msg = constants.CSV_REAL_TABLE_NAME +# else: +# raise Exception(f'登録テーブルの選択値が不正です: {select_table}') +# return (table_name, selected_table_msg) - def __make_dialog_confirm_message(self, select_function: str, selected_table_msg: str) -> str: - select_function_msg = '新規施設登録' if select_function == 'new' else '施設担当者変更' - return f'{selected_table_msg}に{select_function_msg}を行いますか?' +# def __make_dialog_confirm_message(self, select_function: str, selected_table_msg: str) -> str: +# select_function_msg = '新規施設登録' if select_function == 'new' else '施設担当者変更' +# return f'{selected_table_msg}に{select_function_msg}を行いますか?' - def delete_local_file(self, local_file_path: str): - os.remove(local_file_path) +# def delete_local_file(self, local_file_path: str): +# os.remove(local_file_path) diff --git a/ecs/jskult-webapp/src/static/css/masterMainte.css b/ecs/jskult-webapp/src/static/css/masterMainte.css index dd760425..ae8eb63e 100644 --- a/ecs/jskult-webapp/src/static/css/masterMainte.css +++ b/ecs/jskult-webapp/src/static/css/masterMainte.css @@ -1,3 +1,4 @@ +/* ファイル削除予定 */ /* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */ /* @see https://bootstrap-guide.com/content/reboot#page-defaults */ table { diff --git a/ecs/jskult-webapp/src/static/function/businessLogicScript.js b/ecs/jskult-webapp/src/static/function/businessLogicScript.js index 1ca36656..15783b66 100644 --- a/ecs/jskult-webapp/src/static/function/businessLogicScript.js +++ b/ecs/jskult-webapp/src/static/function/businessLogicScript.js @@ -289,31 +289,32 @@ function checkNumberOnlyForm($this) $this.value=str; } +// TODO 削除予定 // メニューへボタンの関数 // 機能概要:マスターメンテメニュー画面に遷移する -function backToMainteMenu(loadingElemId = '_loading'){ - sessionStorage.clear(); - // ローディング表示 - showLoading(loadingElemId); - location.href = "/masterMainte/masterMainteMenu"; -} +// function backToMainteMenu(loadingElemId = '_loading'){ +// sessionStorage.clear(); +// // ローディング表示 +// showLoading(loadingElemId); +// location.href = "/masterMainte/masterMainteMenu"; +// } -// 確認ダイアログ -function confirmDialog(strMesssage) { - var result = confirm(strMesssage); - return result; -} +// // 確認ダイアログ +// function confirmDialog(strMesssage) { +// var result = confirm(strMesssage); +// return result; +// } -function formInsertBtDisabled(){ - var validFlg = false; - if(document.getElementById("csvFile").value === ""){ - validFlg = true; - } +// TODO 削除予定 function formInsertBtDisabled(){ +// var validFlg = false; +// if(document.getElementById("csvFile").value === ""){ +// validFlg = true; +// } - if (validFlg == true) { - document.getElementById("confirm").disabled = true; - } - else { - document.getElementById("confirm").disabled = false; - } -} +// if (validFlg == true) { +// document.getElementById("confirm").disabled = true; +// } +// else { +// document.getElementById("confirm").disabled = false; +// } +// } diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index c3d5f4ec..8a321898 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -7,8 +7,9 @@ PERMISSION_DISABLED = 0 # 日付テーブル.バッチ処理ステータス:未処理 BATCH_STATUS_PROCESSING = '1' +# 削除予定 # 日付テーブル.dump取得状態区分:未処理 -DUMP_STATUS_UNPROCESSED = '0' +# DUMP_STATUS_UNPROCESSED = '0' # 生物由来照会 @@ -76,134 +77,136 @@ LOGOUT_REASON_MESSAGE_MAP = { LOGOUT_REASON_UNEXPECTED: '予期しないエラーが発生しました。
再度Loginするか、
管理者に問い合わせてください。', LOGOUT_REASON_LOGIN_FAILED_LIMIT_EXCEEDED: 'ログイン失敗回数の上限を超えましたので
アカウントをロックしました。
管理者に連絡してください' } - +#削除予定 # 新規施設担当者登録CSV(マスターメンテ) -NEW_INST_EMP_CSV_LOGICAL_NAMES = [ - '施設コード', - '施設名', - '領域コード', - '担当者種別コード', - 'MUID', - '担当者名(姓)', - '担当者名(名)', - 'ビジネスユニットコード', - '適用開始日', - '適用終了日' -] +# NEW_INST_EMP_CSV_LOGICAL_NAMES = [ +# '施設コード', +# '施設名', +# '領域コード', +# '担当者種別コード', +# 'MUID', +# '担当者名(姓)', +# '担当者名(名)', +# 'ビジネスユニットコード', +# '適用開始日', +# '適用終了日' +# ] # 施設コードの列No -CSV_NEW_INST_CD_COL_NO = 0 +# CSV_NEW_INST_CD_COL_NO = 0 # 施設名の列No -CSV_NEW_INST_NAME_COL_NO = 1 +# CSV_NEW_INST_NAME_COL_NO = 1 # 領域コードの列No -CSV_NEW_TA_CD_COL_NO = 2 +# CSV_NEW_TA_CD_COL_NO = 2 # 担当者種別コードの列No -CSV_NEW_EMP_CHG_TYPE_CD_COL_NO = 3 +# CSV_NEW_EMP_CHG_TYPE_CD_COL_NO = 3 # MUIDの列No -CSV_NEW_EMP_CD_COL_NO = 4 +# CSV_NEW_EMP_CD_COL_NO = 4 # 担当者名(姓)の列No -CSV_NEW_EMP_NAME_FAMILY_COL_NO = 5 +# CSV_NEW_EMP_NAME_FAMILY_COL_NO = 5 # 担当者名(名)の列No -CSV_NEW_EMP_NAME_FIRST_COL_NO = 6 +# CSV_NEW_EMP_NAME_FIRST_COL_NO = 6 # ビジネスユニットコードの列No -CSV_NEW_BU_CD_COL_NO = 7 +# CSV_NEW_BU_CD_COL_NO = 7 # 適用開始日の列No -CSV_NEW_START_DATE = 8 +# CSV_NEW_START_DATE = 8 # 適用終了日の列No -CSV_NEW_END_DATE = 9 +# CSV_NEW_END_DATE = 9 +#削除予定 # 施設担当者変更登録CSV(マスターメンテ) -CHANGE_INST_CSV_LOGICAL_NAMES = [ - 'ビジネスユニットコード', - 'ビジネスユニット名', - '組織コード', - '組織名略称', - '施設コード', - '施設名', - '領域コード', - '説明', - '担当者種別コード', - 'MUID', - '担当者名', - '施設担当_開始日', - '施設担当_終了日', - '終了日の変更', - 'コメント' -] +# CHANGE_INST_CSV_LOGICAL_NAMES = [ +# 'ビジネスユニットコード', +# 'ビジネスユニット名', +# '組織コード', +# '組織名略称', +# '施設コード', +# '施設名', +# '領域コード', +# '説明', +# '担当者種別コード', +# 'MUID', +# '担当者名', +# '施設担当_開始日', +# '施設担当_終了日', +# '終了日の変更', +# 'コメント' +# ] # ビジネスユニットコードの列No -CSV_CHANGE_BU_CD_COL_NO = 0 -# ビジネスユニット名の列No -CSV_CHANGE_BU_NAME_COL_NO = 1 -# 組織コードの列No -CSV_CHANGE_ORG_CD_COL_NO = 2 -# 組織名略称の列No -CSV_CHANGE_ORG_SHORT_NAME_COL_NO = 3 -# 施設コードの列No -CSV_CHANGE_INST_CD_COL_NO = 4 -# 施設名の列No -CSV_CHANGE_INST_NAME_COL_NO = 5 -# 領域コードの列No -CSV_CHANGE_TA_CD_COL_NO = 6 -# 説明の列No -CSV_CHANGE_EXPLAIN_COL_NO = 7 -# 担当者種別コード -CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO = 8 -# MUIDの列No -CSV_CHANGE_EMP_CD_COL_NO = 9 -# 担当者名の列No -CSV_CHANGE_EMP_FULL_NAME_COL_NO = 10 -# 施設担当_開始日の列No -CSV_CHANGE_INST_EMP_START_DATE_COL_NO = 11 -# 施設担当_終了日の列No -CSV_CHANGE_INST_EMP_END_DATE_COL_NO = 12 -# 終了日の変更の列No -CSV_CHANGE_CHANGE_END_DATE_COL_NO = 13 -# コメントの列No -CSV_CHANGE_COMMENT = 14 +# CSV_CHANGE_BU_CD_COL_NO = 0 +# # ビジネスユニット名の列No +# CSV_CHANGE_BU_NAME_COL_NO = 1 +# # 組織コードの列No +# CSV_CHANGE_ORG_CD_COL_NO = 2 +# # 組織名略称の列No +# CSV_CHANGE_ORG_SHORT_NAME_COL_NO = 3 +# # 施設コードの列No +# CSV_CHANGE_INST_CD_COL_NO = 4 +# # 施設名の列No +# CSV_CHANGE_INST_NAME_COL_NO = 5 +# # 領域コードの列No +# CSV_CHANGE_TA_CD_COL_NO = 6 +# # 説明の列No +# CSV_CHANGE_EXPLAIN_COL_NO = 7 +# # 担当者種別コード +# CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO = 8 +# # MUIDの列No +# CSV_CHANGE_EMP_CD_COL_NO = 9 +# # 担当者名の列No +# CSV_CHANGE_EMP_FULL_NAME_COL_NO = 10 +# # 施設担当_開始日の列No +# CSV_CHANGE_INST_EMP_START_DATE_COL_NO = 11 +# # 施設担当_終了日の列No +# CSV_CHANGE_INST_EMP_END_DATE_COL_NO = 12 +# # 終了日の変更の列No +# CSV_CHANGE_CHANGE_END_DATE_COL_NO = 13 +# # コメントの列No +# CSV_CHANGE_COMMENT = 14 +# 削除予定 # CSVアップロードテーブル名(マスターメンテ) -CSV_REAL_TABLE_NAME = '本番テーブル' -CSV_CHANGE_TABLE_NAME = 'ダミーテーブル' +# CSV_REAL_TABLE_NAME = '本番テーブル' +# CSV_CHANGE_TABLE_NAME = 'ダミーテーブル' -MENTE_CSV_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data') +# MENTE_CSV_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data') -MENTE_CSV_DOWNLOAD_EXTRACT_COLUMNS = [ - 'inst_cd', - 'inst_name', - 'ta_cd', - 'emp_chg_type_cd', - 'emp_cd', - 'emp_name_full', - 'bu_cd', - 'bu_name', - 'start_date', - 'end_date', - 'creater', - 'create_date', - 'updater', - 'update_date' -] +# MENTE_CSV_DOWNLOAD_EXTRACT_COLUMNS = [ +# 'inst_cd', +# 'inst_name', +# 'ta_cd', +# 'emp_chg_type_cd', +# 'emp_cd', +# 'emp_name_full', +# 'bu_cd', +# 'bu_name', +# 'start_date', +# 'end_date', +# 'creater', +# 'create_date', +# 'updater', +# 'update_date' +# ] -MENTE_CSV_DOWNLOAD_HEADER = [ - '施設コード', - '施設名', - '領域コード', - '担当者種別コード', - 'MUID', - '担当者名', - 'ビジネスユニットコード', - 'ビジネスユニット名', - '適用開始日', - '適用終了日', - '作成者', - '作成日', - '更新者', - '更新日' -] +# MENTE_CSV_DOWNLOAD_HEADER = [ +# '施設コード', +# '施設名', +# '領域コード', +# '担当者種別コード', +# 'MUID', +# '担当者名', +# 'ビジネスユニットコード', +# 'ビジネスユニット名', +# '適用開始日', +# '適用終了日', +# '作成者', +# '作成日', +# '更新者', +# '更新日' +# ] -MENTE_CSV_DOWNLOAD_FILE_NAME = 'instEmpData.csv' +# MENTE_CSV_DOWNLOAD_FILE_NAME = 'instEmpData.csv' # CSVアップロードの制限サイズ=20MB -MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE = 20971520 +# MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE = 20971520 # 利用停止区分 DISPLAY_USER_STOP_DIV = { diff --git a/ecs/jskult-webapp/src/system_var/environment.py b/ecs/jskult-webapp/src/system_var/environment.py index a03b02d5..23034fdf 100644 --- a/ecs/jskult-webapp/src/system_var/environment.py +++ b/ecs/jskult-webapp/src/system_var/environment.py @@ -11,7 +11,7 @@ COGNITO_CLIENT_SECRET = os.environ['COGNITO_CLIENT_SECRET'] AWS_REGION = os.environ['AWS_REGION'] SESSION_TABLE_NAME = os.environ['SESSION_TABLE_NAME'] BIO_ACCESS_LOG_BUCKET = os.environ['BIO_ACCESS_LOG_BUCKET'] -MASTER_MAINTENANCE_BUCKET = os.environ['MASTER_MAINTENANCE_BUCKET'] +# 削除予定 MASTER_MAINTENANCE_BUCKET = os.environ['MASTER_MAINTENANCE_BUCKET'] DB_HOST = os.environ['DB_HOST'] DB_PORT = int(os.environ['DB_PORT']) diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html index b0b50fe8..6dac963e 100644 --- a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html +++ b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html @@ -1,3 +1,4 @@ + diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html index bf1bf845..6525e6f0 100644 --- a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html +++ b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html @@ -1,3 +1,4 @@ + @@ -210,4 +211,4 @@ {% include '_loading.html' %} {% endwith %} - + \ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/masterMainteMenu.html b/ecs/jskult-webapp/src/templates/masterMainteMenu.html index fe0f7a93..610e6ad4 100644 --- a/ecs/jskult-webapp/src/templates/masterMainteMenu.html +++ b/ecs/jskult-webapp/src/templates/masterMainteMenu.html @@ -1,3 +1,4 @@ + diff --git a/ecs/jskult-webapp/src/templates/menu.html b/ecs/jskult-webapp/src/templates/menu.html index 8b920198..26c53ca1 100644 --- a/ecs/jskult-webapp/src/templates/menu.html +++ b/ecs/jskult-webapp/src/templates/menu.html @@ -23,7 +23,7 @@
生物由来データ参照は
日次バッチ処理中のため利用出来ません
{% endif %} {% endif %} - {% if menu.is_available_master_maintenance_menu() %} + +

Logout diff --git a/ecs/jskult-webapp/src/templates/tableOverride.html b/ecs/jskult-webapp/src/templates/tableOverride.html index b5fe6a58..2303efc9 100644 --- a/ecs/jskult-webapp/src/templates/tableOverride.html +++ b/ecs/jskult-webapp/src/templates/tableOverride.html @@ -1,3 +1,4 @@ + @@ -62,4 +63,4 @@ {% include '_loading.html' %} {% endwith %} - + \ No newline at end of file From 5112e42e454e634203ebcbfda76c11a988ddf400 Mon Sep 17 00:00:00 2001 From: yono Date: Thu, 22 May 2025 17:29:32 +0900 Subject: [PATCH 02/30] =?UTF-8?q?feat:Python3.12=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/Dockerfile | 10 +- ecs/jskult-webapp/Pipfile | 2 +- ecs/jskult-webapp/Pipfile.lock | 785 +++++++++++++++++---------------- ecs/jskult-webapp/README.md | 2 +- 4 files changed, 412 insertions(+), 387 deletions(-) diff --git a/ecs/jskult-webapp/Dockerfile b/ecs/jskult-webapp/Dockerfile index d0285a04..22b6a814 100644 --- a/ecs/jskult-webapp/Dockerfile +++ b/ecs/jskult-webapp/Dockerfile @@ -1,15 +1,15 @@ -FROM python:3.9 +FROM python:3.12-slim-bookworm ENV TZ="Asia/Tokyo" +# pythonの標準出力をバッファリングしないフラグ +ENV PYTHONUNBUFFERED=1 +# pythonのバイトコードを生成しないフラグ +ENV PYTHONDONTWRITEBYTECODE=1 WORKDIR /usr/src/app COPY Pipfile Pipfile.lock ./ RUN \ apt update -y && \ - # パッケージのセキュリティアップデートのみを適用するコマンド - apt install -y unattended-upgrades && \ - unattended-upgrades && \ - pip install --upgrade pip wheel setuptools && \ pip install pipenv --no-cache-dir && \ pipenv install --system --deploy && \ pip uninstall -y pipenv virtualenv-clone virtualenv diff --git a/ecs/jskult-webapp/Pipfile b/ecs/jskult-webapp/Pipfile index 5da9c0a0..7143dcef 100644 --- a/ecs/jskult-webapp/Pipfile +++ b/ecs/jskult-webapp/Pipfile @@ -33,7 +33,7 @@ autopep8 = "*" flake8 = "*" [requires] -python_version = "3.9" +python_version = "3.12" [pipenv] allow_prereleases = true diff --git a/ecs/jskult-webapp/Pipfile.lock b/ecs/jskult-webapp/Pipfile.lock index a2145fb9..3857abac 100644 --- a/ecs/jskult-webapp/Pipfile.lock +++ b/ecs/jskult-webapp/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "f727e8be45822a45479f4b39e614a35fe2d493378fef76db529e9ce4e452979d" + "sha256": "d029e060f273e27553163c9e6d72f59c96bab17641440e68aba9a9b10cbd87dd" }, "pipfile-spec": 6, "requires": { - "python_version": "3.9" + "python_version": "3.12" }, "sources": [ { @@ -34,27 +34,28 @@ }, "boto3": { "hashes": [ - "sha256:752d31105a45e3e01c8c68471db14ae439990b75a35e72b591ca528e2575b28f", - "sha256:d125cb11e22817f7a2581bade4bf7b75247b401888890239ceb5d3e902ccaf38" + "sha256:37e4b6b7f77f4cc476ea82eb76a502a289bb750eee96f7d07ec9bcec6592191a", + "sha256:417d0328fd3394ffb1c1f400d4277d45b0b86f48d2f088a02306474969344a47" ], "index": "pypi", - "version": "==1.37.37" + "markers": "python_version >= '3.9'", + "version": "==1.38.21" }, "botocore": { "hashes": [ - "sha256:3eadde6fed95c4cb469cc39d1c3558528b7fa76d23e7e16d4bddc77250431a64", - "sha256:eb730ff978f47c02f0c8ed07bccdc0db6d8fa098ed32ac31bee1da0e9be480d1" + "sha256:08d5e9c00e5cc9e0ae0e60570846011789dc7f1d4ea094b3f3e3f3ae1ff2063a", + "sha256:567b4d338114174d0b41857002a4b1e8efb68f1654ed9f3ec6c34ebdef5e9eaf" ], - "markers": "python_version >= '3.8'", - "version": "==1.37.37" + "markers": "python_version >= '3.9'", + "version": "==1.38.21" }, "certifi": { "hashes": [ - "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", - "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe" + "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", + "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3" ], "markers": "python_version >= '3.6'", - "version": "==2025.1.31" + "version": "==2025.4.26" }, "cffi": { "hashes": [ @@ -126,106 +127,106 @@ "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b" ], - "markers": "platform_python_implementation != 'PyPy'", + "markers": "python_version >= '3.8'", "version": "==1.17.1" }, "charset-normalizer": { "hashes": [ - "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537", - "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa", - "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a", - "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294", - "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b", - "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", - "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", - "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", - "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4", - "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", - "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", - "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", - "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", - "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", - "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", - "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", - "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", - "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496", - "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", - "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", - "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e", - "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a", - "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4", - "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca", - "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78", - "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", - "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5", - "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", - "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", - "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", - "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765", - "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6", - "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", - "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", - "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", - "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd", - "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c", - "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", - "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", - "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", - "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770", - "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824", - "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", - "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf", - "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487", - "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d", - "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd", - "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", - "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534", - "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", - "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", - "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", - "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd", - "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", - "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9", - "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", - "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", - "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d", - "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", - "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", - "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", - "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7", - "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", - "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", - "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8", - "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", - "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", - "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", - "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", - "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", - "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", - "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", - "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", - "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", - "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", - "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", - "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", - "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e", - "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6", - "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", - "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", - "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e", - "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", - "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", - "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c", - "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", - "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", - "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089", - "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", - "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e", - "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", - "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616" + "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", + "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45", + "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", + "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", + "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", + "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", + "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d", + "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", + "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184", + "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", + "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b", + "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64", + "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", + "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", + "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", + "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344", + "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58", + "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", + "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", + "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", + "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", + "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", + "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", + "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", + "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", + "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1", + "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01", + "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", + "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58", + "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", + "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", + "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2", + "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a", + "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", + "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", + "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5", + "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb", + "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f", + "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", + "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", + "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", + "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", + "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7", + "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", + "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455", + "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", + "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4", + "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", + "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", + "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", + "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", + "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", + "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", + "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", + "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", + "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", + "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", + "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa", + "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", + "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", + "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", + "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", + "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", + "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", + "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02", + "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", + "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", + "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", + "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", + "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", + "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", + "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", + "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681", + "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", + "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", + "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a", + "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", + "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", + "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", + "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", + "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027", + "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", + "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", + "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", + "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", + "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", + "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", + "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da", + "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", + "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f", + "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", + "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f" ], "markers": "python_version >= '3.7'", - "version": "==3.4.1" + "version": "==3.4.2" }, "click": { "hashes": [ @@ -237,44 +238,46 @@ }, "cryptography": { "hashes": [ - "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390", - "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41", - "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", - "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5", - "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", - "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d", - "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", - "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", - "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", - "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", - "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", - "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", - "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", - "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", - "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", - "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", - "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562", - "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", - "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", - "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", - "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d", - "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471", - "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", - "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa", - "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb", - "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", - "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", - "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", - "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", - "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", - "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", - "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615", - "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", - "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", - "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308" + "sha256:057723b79752a142efbc609e90b0dff27b0361ccbee3bd48312d70f5cdf53b78", + "sha256:05c2385b1f5c89a17df19900cfb1345115a77168f5ed44bdf6fd3de1ce5cc65b", + "sha256:08281de408e7eb71ba3cd5098709a356bfdf65eebd7ee7633c3610f0aa80d79b", + "sha256:10d68763892a7b19c22508ab57799c4423c7c8cd61d7eee4c5a6a55a46511949", + "sha256:1655d3a76e3dedb683c982a6c3a2cbfae2d08f47a48ec5a3d58db52b3d29ea6f", + "sha256:18f8084b7ca3ce1b8d38bdfe33c48116edf9a08b4d056ef4a96dceaa36d8d965", + "sha256:2cb03a944a1a412724d15a7c051d50e63a868031f26b6a312f2016965b661942", + "sha256:4142e20c29224cec63e9e32eb1e6014fb285fe39b7be66b3564ca978a3a8afe9", + "sha256:463096533acd5097f8751115bc600b0b64620c4aafcac10c6d0041e6e68f88fe", + "sha256:48caa55c528617fa6db1a9c3bf2e37ccb31b73e098ac2b71408d1f2db551dde4", + "sha256:49af56491473231159c98c2c26f1a8f3799a60e5cf0e872d00745b858ddac9d2", + "sha256:4cc31c66411e14dd70e2f384a9204a859dc25b05e1f303df0f5326691061b839", + "sha256:501de1296b2041dccf2115e3c7d4947430585601b251b140970ce255c5cfb985", + "sha256:59c0c8f043dd376bbd9d4f636223836aed50431af4c5a467ed9bf61520294627", + "sha256:614bca7c6ed0d8ad1dce683a6289afae1f880675b4090878a0136c3da16bc693", + "sha256:61a8b1bbddd9332917485b2453d1de49f142e6334ce1d97b7916d5a85d179c84", + "sha256:7429936146063bd1b2cfc54f0e04016b90ee9b1c908a7bed0800049cbace70eb", + "sha256:7c73968fbb7698a4c5d6160859db560d3aac160edde89c751edd5a8bc6560c88", + "sha256:80303ee6a02ef38c4253160446cbeb5c400c07e01d4ddbd4ff722a89b736d95a", + "sha256:965611880c3fa8e504b7458484c0697e00ae6e937279cd6734fdaa2bc954dc49", + "sha256:9a900036b42f7324df7c7ad9569eb92ba0b613cf699160dd9c2154b24fd02f8e", + "sha256:9cfd1399064b13043082c660ddd97a0358e41c8b0dc7b77c1243e013d305c344", + "sha256:a8ec324711596fbf21837d3a5db543937dd84597d364769b46e0102250023f77", + "sha256:a9727a21957d3327cf6b7eb5ffc9e4b663909a25fea158e3fcbc49d4cdd7881b", + "sha256:b19f4b28dd2ef2e6d600307fee656c00825a2980c4356a7080bd758d633c3a6f", + "sha256:b2de529027579e43b6dc1f805f467b102fb7d13c1e54c334f1403ee2b37d0059", + "sha256:c0c000c1a09f069632d8a9eb3b610ac029fcc682f1d69b758e625d6ee713f4ed", + "sha256:cdafb86eb673c3211accffbffdb3cdffa3aaafacd14819e0898d23696d18e4d3", + "sha256:d2a90ce2f0f5b695e4785ac07c19a58244092f3c85d57db6d8eb1a2b26d2aad6", + "sha256:d784d57b958ffd07e9e226d17272f9af0c41572557604ca7554214def32c26bf", + "sha256:d891942592789fa0ab71b502550bbadb12f540d7413d7d7c4cef4b02af0f5bc6", + "sha256:dc7693573f16535428183de8fd27f0ca1ca37a51baa0b41dc5ed7b3d68fe80e2", + "sha256:ddb8d01aa900b741d6b7cc585a97aff787175f160ab975e21f880e89d810781a", + "sha256:e328357b6bbf79928363dbf13f4635b7aac0306afb7e5ad24d21d0c5761c3253", + "sha256:e86c8d54cd19a13e9081898b3c24351683fd39d726ecf8e774aaa9d8d96f5f3a", + "sha256:e9e4bdcd70216b08801e267c0b563316b787f957a46e215249921f99288456f9", + "sha256:f169469d04a23282de9d0be349499cb6683b6ff1b68901210faacac9b0c24b7d" ], "markers": "python_version >= '3.7' and python_full_version not in '3.9.0, 3.9.1'", - "version": "==44.0.2" + "version": "==45.0.2" }, "et-xmlfile": { "hashes": [ @@ -286,11 +289,11 @@ }, "exceptiongroup": { "hashes": [ - "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", - "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" + "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", + "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88" ], - "markers": "python_version < '3.11'", - "version": "==1.2.2" + "markers": "python_version >= '3.7'", + "version": "==1.3.0" }, "fastapi": { "hashes": [ @@ -298,68 +301,69 @@ "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.115.12" }, "greenlet": { "hashes": [ - "sha256:0010e928e1901d36625f21d008618273f9dda26b516dbdecf873937d39c9dff0", - "sha256:04e781447a4722e30b4861af728cb878d73a3df79509dc19ea498090cea5d204", - "sha256:0e14541f9024a280adb9645143d6a0a51fda6f7c5695fd96cb4d542bb563442f", - "sha256:144283ad88ed77f3ebd74710dd419b55dd15d18704b0ae05935766a93f5671c5", - "sha256:17fd241c0d50bacb7ce8ff77a30f94a2d0ca69434ba2e0187cf95a5414aeb7e1", - "sha256:18adc14ab154ca6e53eecc9dc50ff17aeb7ba70b7e14779b26e16d71efa90038", - "sha256:199453d64b02d0c9d139e36d29681efd0e407ed8e2c0bf89d88878d6a787c28f", - "sha256:1cf89e2d92bae0d7e2d6093ce0bed26feeaf59a5d588e3984e35fcd46fc41090", - "sha256:1d2d43bd711a43db8d9b9187500e6432ddb4fafe112d082ffabca8660a9e01a7", - "sha256:1dcb1108449b55ff6bc0edac9616468f71db261a4571f27c47ccf3530a7f8b97", - "sha256:211a9721f540e454a02e62db7956263e9a28a6cf776d4b9a7213844e36426333", - "sha256:23f56a0103deb5570c8d6a0bb4ddf8a7a28931973ad7ed7a883460a67e599b32", - "sha256:2688b3bd3198cc4bad7a79648a95fee088c24a0f6abd05d3639e6c3040ded015", - "sha256:2919b126eeb63ca5fa971501cd20cd6cdb5522369a8e39548bbc73a3e10b8b41", - "sha256:29449a2b82ed7ce11f8668c31ef20d31e9d88cd8329eb933098fab5a8608a93a", - "sha256:2b986f1a6467710e7ffeeeac1777da0318c95bbfcc467acbd0bd35abc775f558", - "sha256:33ea7e7269d6f7275ce31f593d6dcfedd97539c01f63fbdc8d84e493e20b1b2c", - "sha256:397b6bbda06f8fe895893d96218cd6f6d855a6701dc45012ebe12262423cec8b", - "sha256:39801e633a978c3f829f21022501e7b0c3872683d7495c1850558d1a6fb95ed0", - "sha256:4174fa6fa214e8924cedf332b6f2395ba2b9879f250dacd3c361b2fca86f58af", - "sha256:430cba962c85e339767235a93450a6aaffed6f9c567e73874ea2075f5aae51e1", - "sha256:47aeadd1e8fbdef8fdceb8fb4edc0cbb398a57568d56fd68f2bc00d0d809e6b6", - "sha256:58ef3d637c54e2f079064ca936556c4af3989144e4154d80cfd4e2a59fc3769c", - "sha256:598da3bd464c2cc411b723e3d4afc27b13c219ac077ba897bac88443ae45f5ec", - "sha256:5be69cd50994b8465c3ad1467f9e63001f76e53a89440ad4440d1b6d52591280", - "sha256:5e57ff52315bfc0c5493917f328b8ba3ae0c0515d94524453c4d24e7638cbb53", - "sha256:6005f7a86de836a1dc4b8d824a2339cdd5a1ca7cb1af55ea92575401f9952f4c", - "sha256:6017a4d430fad5229e397ad464db504ae70cb7b903757c4688cee6c25d6ce8d8", - "sha256:60e77242e38e99ecaede853755bbd8165e0b20a2f1f3abcaa6f0dceb826a7411", - "sha256:6fad8a9ca98b37951a053d7d2d2553569b151cd8c4ede744806b94d50d7f8f73", - "sha256:7154b13ef87a8b62fc05419f12d75532d7783586ad016c57b5de8a1c6feeb517", - "sha256:78b721dfadc60e3639141c0e1f19d23953c5b4b98bfcaf04ce40f79e4f01751c", - "sha256:7b162de2fb61b4c7f4b5d749408bf3280cae65db9b5a6aaf7f922ac829faa67c", - "sha256:7b17a26abc6a1890bf77d5d6b71c0999705386b00060d15c10b8182679ff2790", - "sha256:7d08b88ee8d506ca1f5b2a58744e934d33c6a1686dd83b81e7999dfc704a912f", - "sha256:7f163d04f777e7bd229a50b937ecc1ae2a5b25296e6001445e5433e4f51f5191", - "sha256:7fee6f518868e8206c617f4084a83ad4d7a3750b541bf04e692dfa02e52e805d", - "sha256:82a68a25a08f51fc8b66b113d1d9863ee123cdb0e8f1439aed9fc795cd6f85cf", - "sha256:844acfd479ee380f3810415e682c9ee941725fb90b45e139bb7fd6f85c6c9a30", - "sha256:8a8940a8d301828acd8b9f3f85db23069a692ff2933358861b19936e29946b95", - "sha256:8b3538711e7c0efd5f7a8fc1096c4db9598d6ed99dc87286b31e4ce9f8a8da67", - "sha256:8fd2583024ff6cd5d4f842d446d001de4c4fe1264fdb5f28ddea28f6488866df", - "sha256:a0bc5776ac2831c022e029839bf1b9d3052332dcf5f431bb88c8503e27398e31", - "sha256:b2392cc41eeed4055978c6b52549ccd9effd263bb780ffd639c0e1e7e2055ab0", - "sha256:b7a7b7f2bad3ca72eb2fa14643f1c4ca11d115614047299d89bc24a3b11ddd09", - "sha256:b86a3ccc865ae601f446af042707b749eebc297928ea7bd0c5f60c56525850be", - "sha256:b99de16560097b9984409ded0032f101f9555e1ab029440fc6a8b5e76dbba7ac", - "sha256:cd37273dc7ca1d5da149b58c8b3ce0711181672ba1b09969663905a765affe21", - "sha256:ce531d7c424ef327a391de7a9777a6c93a38e1f89e18efa903a1c4ba11f85905", - "sha256:d3f32d7c70b1c26844fd0e4e56a1da852b493e4e1c30df7b07274a1e5a9b599e", - "sha256:d97bc1be4bad83b70d8b8627ada6724091af41139616696e59b7088f358583b9", - "sha256:e61d426969b68b2170a9f853cc36d5318030494576e9ec0bfe2dc2e2afa15a68", - "sha256:e8622b33d8694ec373ad55050c3d4e49818132b44852158442e1931bb02af336", - "sha256:e8ac9a2c20fbff3d0b853e9ef705cdedb70d9276af977d1ec1cde86a87a4c821", - "sha256:ee59db626760f1ca8da697a086454210d36a19f7abecc9922a2374c04b47735b" + "sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057", + "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207", + "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3", + "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4", + "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b", + "sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc", + "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825", + "sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370", + "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708", + "sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457", + "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763", + "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf", + "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe", + "sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e", + "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d", + "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59", + "sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa", + "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e", + "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51", + "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3", + "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e", + "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61", + "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5", + "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74", + "sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907", + "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275", + "sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f", + "sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59", + "sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c", + "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf", + "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b", + "sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819", + "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65", + "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e", + "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240", + "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5", + "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659", + "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485", + "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec", + "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8", + "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418", + "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce", + "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6", + "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7", + "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6", + "sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f", + "sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13", + "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b", + "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068", + "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325", + "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330", + "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834", + "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b", + "sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5", + "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421" ], - "markers": "python_version < '3.14' and platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))", - "version": "==3.2.0" + "markers": "python_version >= '3.9'", + "version": "==3.2.2" }, "gunicorn": { "hashes": [ @@ -367,15 +371,16 @@ "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==23.0.0" }, "h11": { "hashes": [ - "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", - "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" + "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", + "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86" ], - "markers": "python_version >= '3.7'", - "version": "==0.14.0" + "markers": "python_version >= '3.8'", + "version": "==0.16.0" }, "httptools": { "hashes": [ @@ -440,6 +445,7 @@ "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==3.1.6" }, "jmespath": { @@ -557,6 +563,7 @@ "sha256:f617ebe1fdceadc2b87497c4d56f3c863155179488feb63f83a62cf95df37b69" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==0.0.13" }, "numpy": { @@ -608,6 +615,7 @@ "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==2.0.2" }, "openpyxl": { @@ -616,6 +624,7 @@ "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==3.1.5" }, "packaging": { @@ -672,6 +681,7 @@ "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==2.2.3" }, "pycparser": { @@ -684,124 +694,128 @@ }, "pydantic": { "hashes": [ - "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", - "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f" + "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", + "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb" ], "index": "pypi", - "version": "==2.11.3" + "markers": "python_version >= '3.9'", + "version": "==2.11.4" }, "pydantic-core": { "hashes": [ - "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672", - "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1", - "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add", - "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068", - "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b", - "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505", - "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", - "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5", - "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e", - "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544", - "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", - "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a", - "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", - "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1", - "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", - "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83", - "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", - "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde", - "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26", - "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896", - "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", - "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", - "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48", - "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a", - "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761", - "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7", - "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5", - "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5", - "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", - "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", - "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", - "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db", - "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33", - "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850", - "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", - "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2", - "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb", - "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02", - "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c", - "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77", - "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504", - "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", - "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24", - "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", - "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331", - "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396", - "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", - "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea", - "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b", - "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969", - "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b", - "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", - "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927", - "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc", - "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", - "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595", - "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", - "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", - "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe", - "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599", - "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e", - "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89", - "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6", - "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d", - "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", - "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5", - "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd", - "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", - "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", - "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe", - "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", - "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c", - "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30", - "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e", - "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96", - "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f", - "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", - "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824", - "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde", - "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", - "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3", - "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568", - "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961", - "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", - "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", - "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5", - "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad", - "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", - "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", - "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383", - "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", - "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", - "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", - "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc", - "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5", - "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65", - "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", - "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89", - "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091" + "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", + "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", + "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", + "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", + "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", + "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", + "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", + "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", + "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", + "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", + "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", + "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", + "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", + "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", + "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", + "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", + "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", + "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", + "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", + "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", + "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", + "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", + "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", + "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", + "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", + "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", + "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", + "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", + "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", + "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", + "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", + "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", + "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", + "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", + "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", + "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", + "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", + "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", + "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", + "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", + "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", + "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", + "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", + "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", + "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", + "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", + "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", + "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", + "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", + "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", + "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", + "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", + "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", + "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", + "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", + "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", + "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", + "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", + "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", + "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", + "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", + "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", + "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", + "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", + "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", + "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", + "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", + "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", + "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", + "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", + "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", + "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", + "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", + "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", + "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", + "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", + "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", + "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", + "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", + "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", + "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", + "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", + "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", + "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", + "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", + "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", + "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", + "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", + "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", + "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", + "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", + "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", + "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", + "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", + "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", + "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", + "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", + "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", + "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d" ], "markers": "python_version >= '3.9'", - "version": "==2.33.1" + "version": "==2.33.2" }, "pyjwt": { - "extras": [], + "extras": [ + "crypto" + ], "hashes": [ "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==2.10.1" }, "pymysql": { @@ -810,6 +824,7 @@ "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==1.1.1" }, "pynamodb": { @@ -818,6 +833,7 @@ "sha256:a85ea1ee42d6cb53128ec910507406e7901532f605d28e2680016e25bcbd6828" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==6.0.2" }, "python-dateutil": { @@ -825,7 +841,7 @@ "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==2.9.0.post0" }, "python-dotenv": { @@ -842,6 +858,7 @@ "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.0.20" }, "pytz": { @@ -916,22 +933,23 @@ "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==2.32.3" }, "s3transfer": { "hashes": [ - "sha256:757af0f2ac150d3c75bc4177a32355c3862a98d20447b69a0161812992fe0bd4", - "sha256:8c8aad92784779ab8688a61aefff3e28e9ebdce43142808eaa3f0b0f402f68b7" + "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18", + "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c" ], - "markers": "python_version >= '3.8'", - "version": "==0.11.5" + "markers": "python_version >= '3.9'", + "version": "==0.12.0" }, "six": { "hashes": [ "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.17.0" }, "sniffio": { @@ -944,66 +962,67 @@ }, "sqlalchemy": { "hashes": [ - "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a", - "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", - "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", - "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", - "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", - "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", - "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", - "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", - "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", - "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", - "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", - "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", - "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867", - "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", - "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff", - "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", - "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2", - "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5", - "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", - "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", - "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", - "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", - "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", - "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", - "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", - "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", - "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", - "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", - "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", - "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37", - "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", - "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625", - "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", - "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47", - "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", - "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", - "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", - "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", - "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", - "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96", - "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", - "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", - "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438", - "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", - "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e", - "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", - "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08", - "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3", - "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", - "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", - "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8", - "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", - "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", - "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", - "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", - "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", - "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106" + "sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5", + "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", + "sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b", + "sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b", + "sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348", + "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda", + "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5", + "sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2", + "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29", + "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", + "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", + "sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826", + "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", + "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", + "sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45", + "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", + "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", + "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", + "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", + "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", + "sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71", + "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11", + "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", + "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", + "sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8", + "sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd", + "sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814", + "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08", + "sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea", + "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30", + "sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda", + "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", + "sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923", + "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", + "sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036", + "sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3", + "sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f", + "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", + "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", + "sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2", + "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", + "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", + "sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769", + "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", + "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", + "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b", + "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747", + "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", + "sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440", + "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", + "sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2", + "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", + "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", + "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", + "sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd", + "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", + "sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6" ], "index": "pypi", - "version": "==2.0.40" + "markers": "python_version >= '3.7'", + "version": "==2.0.41" }, "starlette": { "hashes": [ @@ -1023,11 +1042,11 @@ }, "typing-inspection": { "hashes": [ - "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", - "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122" + "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", + "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28" ], "markers": "python_version >= '3.9'", - "version": "==0.4.0" + "version": "==0.4.1" }, "tzdata": { "hashes": [ @@ -1046,12 +1065,15 @@ "version": "==1.26.20" }, "uvicorn": { - "extras": [], + "extras": [ + "standard" + ], "hashes": [ "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==0.34.2" }, "uvloop": { @@ -1255,6 +1277,7 @@ "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88" ], "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", "version": "==2.0.1" } }, @@ -1265,6 +1288,7 @@ "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==2.3.2" }, "flake8": { @@ -1273,6 +1297,7 @@ "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==7.2.0" }, "mccabe": { @@ -1334,7 +1359,7 @@ "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7" ], - "markers": "python_version < '3.11'", + "markers": "python_version >= '3.8'", "version": "==2.2.1" } } diff --git a/ecs/jskult-webapp/README.md b/ecs/jskult-webapp/README.md index c65cbf83..dbce1011 100644 --- a/ecs/jskult-webapp/README.md +++ b/ecs/jskult-webapp/README.md @@ -47,7 +47,7 @@ ## 実行 - VSCode上で「F5」キーを押下すると、Webアプリケーションのサーバーが起動する -- 「」にアクセスし、ログイン画面が表示されていれば成功 +- 「」にアクセスし、ログイン画面が表示されていれば成功 ## フォルダ構成 From dd974f1d88efedab8f784dfe3fee23a5ba7e3237 Mon Sep 17 00:00:00 2001 From: yono Date: Thu, 22 May 2025 17:31:08 +0900 Subject: [PATCH 03/30] =?UTF-8?q?fix:=E3=83=9E=E3=82=B9=E3=82=BF=E3=83=A1?= =?UTF-8?q?=E3=83=B3=E3=83=86=E5=BB=83=E6=AD=A2=E3=81=AB=E4=BC=B4=E3=81=86?= =?UTF-8?q?=E3=83=90=E3=82=B0=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/templates/_header.html | 2 +- ecs/jskult-webapp/src/templates/menu.html | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-webapp/src/templates/_header.html b/ecs/jskult-webapp/src/templates/_header.html index 95ac2464..62f6e2ab 100644 --- a/ecs/jskult-webapp/src/templates/_header.html +++ b/ecs/jskult-webapp/src/templates/_header.html @@ -15,5 +15,5 @@ - + \ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/menu.html b/ecs/jskult-webapp/src/templates/menu.html index 26c53ca1..ab3b6d56 100644 --- a/ecs/jskult-webapp/src/templates/menu.html +++ b/ecs/jskult-webapp/src/templates/menu.html @@ -23,7 +23,7 @@
生物由来データ参照は
日次バッチ処理中のため利用出来ません
{% endif %} {% endif %} - + {% endif %} + #}

Logout From de1764d76464a009a2c513461d575dbcb05a7764 Mon Sep 17 00:00:00 2001 From: yono Date: Thu, 22 May 2025 22:35:10 +0900 Subject: [PATCH 04/30] =?UTF-8?q?style:=20=E5=89=8A=E9=99=A4=E4=BA=88?= =?UTF-8?q?=E5=AE=9A=E7=AE=87=E6=89=80=E3=81=ABTODO=E3=82=92=E4=BB=98?= =?UTF-8?q?=E4=B8=8E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/master_mainte.py | 2 +- ecs/jskult-webapp/src/model/db/master_mente_count.py | 2 +- ecs/jskult-webapp/src/model/internal/master_mainte_csv.py | 2 +- .../model/internal/master_mainte_emp_chg_inst_function.py | 2 +- .../src/model/request/master_mainte_csvdl.py | 2 +- .../src/model/request/master_mainte_csvup.py | 2 +- .../src/model/view/inst_emp_csv_download_view_model.py | 2 +- .../src/model/view/inst_emp_csv_upload_view_model.py | 2 +- .../src/model/view/master_mainte_menu_view_model.py | 2 +- .../src/model/view/table_override_view_model.py | 2 +- .../src/repositories/bu_master_cd_repository.py | 2 +- .../src/repositories/emp_chg_inst_repository.py | 2 +- .../src/repositories/emp_master_repository.py | 2 +- .../src/repositories/generic_kbn_mst_repository.py | 2 +- ecs/jskult-webapp/src/repositories/mst_inst_repository.py | 2 +- ecs/jskult-webapp/src/services/batch_status_service.py | 2 +- ecs/jskult-webapp/src/services/master_mainte_service.py | 2 +- ecs/jskult-webapp/src/static/css/masterMainte.css | 2 +- .../src/static/function/businessLogicScript.js | 4 ++-- ecs/jskult-webapp/src/system_var/constants.py | 8 ++++---- ecs/jskult-webapp/src/system_var/environment.py | 2 +- ecs/jskult-webapp/src/templates/instEmpCsvDL.html | 2 +- ecs/jskult-webapp/src/templates/instEmpCsvUL.html | 2 +- ecs/jskult-webapp/src/templates/masterMainteMenu.html | 2 +- ecs/jskult-webapp/src/templates/menu.html | 2 +- ecs/jskult-webapp/src/templates/tableOverride.html | 2 +- 26 files changed, 30 insertions(+), 30 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/master_mainte.py b/ecs/jskult-webapp/src/controller/master_mainte.py index 89313271..48002fac 100644 --- a/ecs/jskult-webapp/src/controller/master_mainte.py +++ b/ecs/jskult-webapp/src/controller/master_mainte.py @@ -1,4 +1,4 @@ -# # ファイル削除予定 +# TODO: ファイル削除予定 # from io import BytesIO, TextIOWrapper # from typing import Optional diff --git a/ecs/jskult-webapp/src/model/db/master_mente_count.py b/ecs/jskult-webapp/src/model/db/master_mente_count.py index a426f43b..55a348b2 100644 --- a/ecs/jskult-webapp/src/model/db/master_mente_count.py +++ b/ecs/jskult-webapp/src/model/db/master_mente_count.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from typing import Optional # from src.model.db.base_db_model import BaseDBModel diff --git a/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py b/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py index 93cc3bca..9110af12 100644 --- a/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py +++ b/ecs/jskult-webapp/src/model/internal/master_mainte_csv.py @@ -1,4 +1,4 @@ -# # ファイル削除予定 +# TODO:ファイル削除予定 # import csv # import json # from abc import ABCMeta, abstractmethod diff --git a/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py b/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py index 546d4a80..45c09e36 100644 --- a/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py +++ b/ecs/jskult-webapp/src/model/internal/master_mainte_emp_chg_inst_function.py @@ -1,4 +1,4 @@ -# # ファイル削除予定 +# TODO:ファイル削除予定 # from abc import ABCMeta, abstractmethod # from src.repositories.emp_chg_inst_repository import EmpChgInstRepository # from src.logging.get_logger import get_logger diff --git a/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py b/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py index 450c7bfb..59826b1e 100644 --- a/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py +++ b/ecs/jskult-webapp/src/model/request/master_mainte_csvdl.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO:ファイル削除予定 # from typing import Optional # from fastapi import Form diff --git a/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py b/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py index 26de9648..970746a1 100644 --- a/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py +++ b/ecs/jskult-webapp/src/model/request/master_mainte_csvup.py @@ -1,4 +1,4 @@ -# # ファイル削除予定 +# TODO: ファイル削除予定 # from typing import Optional, Annotated # from fastapi import Form diff --git a/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py b/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py index 4bbc8ec5..0e2d572e 100644 --- a/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py +++ b/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py @@ -1,4 +1,4 @@ -#ファイル削除予定 +# TODO: ファイル削除予定 # from pydantic import BaseModel # from src.util.string_util import is_not_empty diff --git a/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py b/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py index 4ceadd31..58731ee3 100644 --- a/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py +++ b/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py @@ -1,4 +1,4 @@ -#ファイル削除予定 +# TODO: ファイル削除予定 # from pydantic import BaseModel # from src.system_var import constants diff --git a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py index 68da3996..d7a55342 100644 --- a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py +++ b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from pydantic import BaseModel diff --git a/ecs/jskult-webapp/src/model/view/table_override_view_model.py b/ecs/jskult-webapp/src/model/view/table_override_view_model.py index 1ef3d6ac..a8c835f5 100644 --- a/ecs/jskult-webapp/src/model/view/table_override_view_model.py +++ b/ecs/jskult-webapp/src/model/view/table_override_view_model.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from pydantic import BaseModel diff --git a/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py b/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py index 3580d2f7..69fab1f0 100644 --- a/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py +++ b/ecs/jskult-webapp/src/repositories/bu_master_cd_repository.py @@ -1,4 +1,4 @@ -# # file削除予定 +# TODO: file削除予定 # from src.repositories.base_repository import BaseRepository # from src.model.db.master_mente_count import MasterMenteCountModel # from src.logging.get_logger import get_logger diff --git a/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py b/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py index 64097cb1..cdffb464 100644 --- a/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py +++ b/ecs/jskult-webapp/src/repositories/emp_chg_inst_repository.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from src.db import sql_condition as condition # from src.db.sql_condition import SQLCondition # from src.logging.get_logger import get_logger diff --git a/ecs/jskult-webapp/src/repositories/emp_master_repository.py b/ecs/jskult-webapp/src/repositories/emp_master_repository.py index c3f567c1..1504fce1 100644 --- a/ecs/jskult-webapp/src/repositories/emp_master_repository.py +++ b/ecs/jskult-webapp/src/repositories/emp_master_repository.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from src.repositories.base_repository import BaseRepository # from src.model.db.master_mente_count import MasterMenteCountModel # from src.logging.get_logger import get_logger diff --git a/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py b/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py index 7e4dc0dc..2d1e8e4e 100644 --- a/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py +++ b/ecs/jskult-webapp/src/repositories/generic_kbn_mst_repository.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from src.repositories.base_repository import BaseRepository # from src.model.db.master_mente_count import MasterMenteCountModel # from src.logging.get_logger import get_logger diff --git a/ecs/jskult-webapp/src/repositories/mst_inst_repository.py b/ecs/jskult-webapp/src/repositories/mst_inst_repository.py index 44f6e426..2e07212d 100644 --- a/ecs/jskult-webapp/src/repositories/mst_inst_repository.py +++ b/ecs/jskult-webapp/src/repositories/mst_inst_repository.py @@ -1,4 +1,4 @@ -# ファイル削除予定 +# TODO: ファイル削除予定 # from src.repositories.base_repository import BaseRepository # from src.model.db.master_mente_count import MasterMenteCountModel # from src.logging.get_logger import get_logger diff --git a/ecs/jskult-webapp/src/services/batch_status_service.py b/ecs/jskult-webapp/src/services/batch_status_service.py index 0d7fa372..c3ccd450 100644 --- a/ecs/jskult-webapp/src/services/batch_status_service.py +++ b/ecs/jskult-webapp/src/services/batch_status_service.py @@ -40,7 +40,7 @@ class BatchStatusService(BaseService): self.__assert_record_exists() return self.hdke_table_record.bch_actf == constants.BATCH_STATUS_PROCESSING - # 削除予定 + # TODO: 削除予定 # def is_dump_processing(self): # """dump処理処理中かどうかを判定する""" diff --git a/ecs/jskult-webapp/src/services/master_mainte_service.py b/ecs/jskult-webapp/src/services/master_mainte_service.py index 513a4008..ec2506df 100644 --- a/ecs/jskult-webapp/src/services/master_mainte_service.py +++ b/ecs/jskult-webapp/src/services/master_mainte_service.py @@ -1,4 +1,4 @@ -# # ファイル削除予定 +# TODO: ファイル削除予定 # import os # import json # import html diff --git a/ecs/jskult-webapp/src/static/css/masterMainte.css b/ecs/jskult-webapp/src/static/css/masterMainte.css index ae8eb63e..46f4c13e 100644 --- a/ecs/jskult-webapp/src/static/css/masterMainte.css +++ b/ecs/jskult-webapp/src/static/css/masterMainte.css @@ -1,4 +1,4 @@ -/* ファイル削除予定 */ +/* TODO: ファイル削除予定 */ /* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */ /* @see https://bootstrap-guide.com/content/reboot#page-defaults */ table { diff --git a/ecs/jskult-webapp/src/static/function/businessLogicScript.js b/ecs/jskult-webapp/src/static/function/businessLogicScript.js index 15783b66..07d0ce22 100644 --- a/ecs/jskult-webapp/src/static/function/businessLogicScript.js +++ b/ecs/jskult-webapp/src/static/function/businessLogicScript.js @@ -289,7 +289,7 @@ function checkNumberOnlyForm($this) $this.value=str; } -// TODO 削除予定 +// TODO: 削除予定 // メニューへボタンの関数 // 機能概要:マスターメンテメニュー画面に遷移する // function backToMainteMenu(loadingElemId = '_loading'){ @@ -305,7 +305,7 @@ function checkNumberOnlyForm($this) // return result; // } -// TODO 削除予定 function formInsertBtDisabled(){ +// TODO: 削除予定 function formInsertBtDisabled(){ // var validFlg = false; // if(document.getElementById("csvFile").value === ""){ // validFlg = true; diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index 8a321898..6a26b065 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -7,7 +7,7 @@ PERMISSION_DISABLED = 0 # 日付テーブル.バッチ処理ステータス:未処理 BATCH_STATUS_PROCESSING = '1' -# 削除予定 +# TODO: 削除予定 # 日付テーブル.dump取得状態区分:未処理 # DUMP_STATUS_UNPROCESSED = '0' @@ -77,7 +77,7 @@ LOGOUT_REASON_MESSAGE_MAP = { LOGOUT_REASON_UNEXPECTED: '予期しないエラーが発生しました。
再度Loginするか、
管理者に問い合わせてください。', LOGOUT_REASON_LOGIN_FAILED_LIMIT_EXCEEDED: 'ログイン失敗回数の上限を超えましたので
アカウントをロックしました。
管理者に連絡してください' } -#削除予定 +# TODO: 削除予定 # 新規施設担当者登録CSV(マスターメンテ) # NEW_INST_EMP_CSV_LOGICAL_NAMES = [ # '施設コード', @@ -112,7 +112,7 @@ LOGOUT_REASON_MESSAGE_MAP = { # 適用終了日の列No # CSV_NEW_END_DATE = 9 -#削除予定 +# TODO: 削除予定 # 施設担当者変更登録CSV(マスターメンテ) # CHANGE_INST_CSV_LOGICAL_NAMES = [ # 'ビジネスユニットコード', @@ -162,7 +162,7 @@ LOGOUT_REASON_MESSAGE_MAP = { # # コメントの列No # CSV_CHANGE_COMMENT = 14 -# 削除予定 +# TODO: 削除予定 # CSVアップロードテーブル名(マスターメンテ) # CSV_REAL_TABLE_NAME = '本番テーブル' # CSV_CHANGE_TABLE_NAME = 'ダミーテーブル' diff --git a/ecs/jskult-webapp/src/system_var/environment.py b/ecs/jskult-webapp/src/system_var/environment.py index 23034fdf..3fda53bf 100644 --- a/ecs/jskult-webapp/src/system_var/environment.py +++ b/ecs/jskult-webapp/src/system_var/environment.py @@ -11,7 +11,7 @@ COGNITO_CLIENT_SECRET = os.environ['COGNITO_CLIENT_SECRET'] AWS_REGION = os.environ['AWS_REGION'] SESSION_TABLE_NAME = os.environ['SESSION_TABLE_NAME'] BIO_ACCESS_LOG_BUCKET = os.environ['BIO_ACCESS_LOG_BUCKET'] -# 削除予定 MASTER_MAINTENANCE_BUCKET = os.environ['MASTER_MAINTENANCE_BUCKET'] +# TODO: 削除予定 MASTER_MAINTENANCE_BUCKET = os.environ['MASTER_MAINTENANCE_BUCKET'] DB_HOST = os.environ['DB_HOST'] DB_PORT = int(os.environ['DB_PORT']) diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html index 6dac963e..c8b01d9b 100644 --- a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html +++ b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html @@ -1,4 +1,4 @@ - + diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html index 6525e6f0..dabb564e 100644 --- a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html +++ b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html @@ -1,4 +1,4 @@ - + diff --git a/ecs/jskult-webapp/src/templates/masterMainteMenu.html b/ecs/jskult-webapp/src/templates/masterMainteMenu.html index 610e6ad4..924b0fdf 100644 --- a/ecs/jskult-webapp/src/templates/masterMainteMenu.html +++ b/ecs/jskult-webapp/src/templates/masterMainteMenu.html @@ -1,4 +1,4 @@ - + diff --git a/ecs/jskult-webapp/src/templates/menu.html b/ecs/jskult-webapp/src/templates/menu.html index ab3b6d56..4449b7ce 100644 --- a/ecs/jskult-webapp/src/templates/menu.html +++ b/ecs/jskult-webapp/src/templates/menu.html @@ -23,7 +23,7 @@
生物由来データ参照は
日次バッチ処理中のため利用出来ません
{% endif %} {% endif %} - {#  TODO 削除予定 {% if menu.is_available_master_maintenance_menu() %} + {# TODO: 削除予定 {% if menu.is_available_master_maintenance_menu() %} {% if menu.is_batch_processing() %}
マスターメンテメニューは
日次バッチ処理中のため利用出来ません
{% elif menu.is_backup_processing() %} diff --git a/ecs/jskult-webapp/src/templates/tableOverride.html b/ecs/jskult-webapp/src/templates/tableOverride.html index 2303efc9..9e2b6328 100644 --- a/ecs/jskult-webapp/src/templates/tableOverride.html +++ b/ecs/jskult-webapp/src/templates/tableOverride.html @@ -1,4 +1,4 @@ - + From ebaa71842d5a0e7d935a45b9c4c40016e2dc7036 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Mon, 26 May 2025 10:20:41 +0900 Subject: [PATCH 05/30] first commit --- .../jskult-batch-daily/.dockerignore | 12 + .../jskult-batch-daily/.env.example | 26 ++ .../jskult-batch-daily/.gitignore | 10 + .../jskult-batch-daily/.vscode/launch.json | 16 + .../.vscode/recommended_settings.json | 31 ++ .../jskult-batch-daily/Dockerfile | 20 + .../jskult-batch-daily/Pipfile | 29 ++ .../jskult-batch-daily/Pipfile.lock | 397 ++++++++++++++++++ .../jskult-batch-daily/README.md | 292 +++++++++++++ .../jskult-batch-daily/entrypoint.py | 10 + .../jskult-batch-daily/pytest.ini | 3 + .../jskult-batch-daily/src/__init__.py | 0 .../jskult-batch-daily/src/aws/__init__.py | 0 .../jskult-batch-daily/src/aws/s3.py | 185 ++++++++ .../src/batch/common/__init__.py | 0 .../jskult-batch-daily/src/db/__init__.py | 0 .../jskult-batch-daily/src/db/database.py | 195 +++++++++ .../jskult-batch-daily/src/error/__init__.py | 0 .../src/error/exceptions.py | 10 + .../src/jobctrl_dcfInstMergeIo.py | 4 + .../src/logging/get_logger.py | 37 ++ .../src/system_var/__init__.py | 0 .../src/system_var/constants.py | 17 + .../src/system_var/environment.py | 38 ++ 24 files changed, 1332 insertions(+) create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore new file mode 100644 index 00000000..8b9da402 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore @@ -0,0 +1,12 @@ +tests/* +.coverage +.env +.env.example +.report/* +.vscode/* +.pytest_cache/* +*/__pychache__/* +Dockerfile +pytest.ini +README.md +*.sql diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example new file mode 100644 index 00000000..500f843d --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example @@ -0,0 +1,26 @@ +DB_HOST=************ +DB_PORT=************ +DB_USERNAME=************ +DB_PASSWORD=************ +DB_SCHEMA=src05 +LOG_LEVEL=INFO +ULTMARC_DATA_BUCKET=**************** +ULTMARC_DATA_FOLDER=recv +JSKULT_BACKUP_BUCKET=**************** +ULTMARC_BACKUP_FOLDER=ultmarc +VJSK_BACKUP_FOLDER=vjsk +JSKULT_CONFIG_BUCKET=********************** +JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar +JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +VJSK_DATA_SEND_FOLDER=send +VJSK_DATA_RECEIVE_FOLDER=recv +VJSK_DATA_BUCKET=************* +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt +JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert +JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore new file mode 100644 index 00000000..bd0b37f8 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore @@ -0,0 +1,10 @@ +.vscode/settings.json +.env + +# python +__pycache__ + +# python test +.pytest_cache +.coverage +.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json new file mode 100644 index 00000000..9dbaa9c6 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // IntelliSense を使用して利用可能な属性を学べます。 + // 既存の属性の説明をホバーして表示します。 + // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(DEBUG)jskult batch daily", + "type": "python", + "request": "launch", + "program": "entrypoint.py", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json new file mode 100644 index 00000000..2fde8732 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json @@ -0,0 +1,31 @@ +{ + "[python]": { + "editor.defaultFormatter": null, + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + }, + // 自身の環境に合わせて変えてください + "python.defaultInterpreterPath": "", + "python.linting.lintOnSave": true, + "python.linting.enabled": true, + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": [ + "--max-line-length=200", + "--ignore=F541" + ], + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Path": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length", "200", + "--ignore=F541" + ], + "python.testing.pytestArgs": [ + "tests/batch/" + ], + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile new file mode 100644 index 00000000..fc0fde90 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim-bookworm + +ENV TZ="Asia/Tokyo" +# pythonの標準出力をバッファリングしないフラグ +ENV PYTHONUNBUFFERED=1 +# pythonのバイトコードを生成しないフラグ +ENV PYTHONDONTWRITEBYTECODE=1 + +WORKDIR /usr/src/app +COPY Pipfile Pipfile.lock ./ +RUN \ + apt update -y && \ + pip install pipenv --no-cache-dir && \ + pipenv install --system --deploy && \ + pip uninstall -y pipenv virtualenv-clone virtualenv + +COPY src ./src +COPY entrypoint.py entrypoint.py + +CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile new file mode 100644 index 00000000..a40e6c17 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile @@ -0,0 +1,29 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[scripts] +"test:ultmarc" = "pytest tests/batch/ultmarc/" +"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" +"test:vjsk" = "pytest tests/batch/vjsk/" +"test:vjsk:cov" = "pytest --cov=src/batch/vjsk/ --cov-branch --cov-report=term-missing tests/batch/vjsk/" + +[packages] +boto3 = "*" +PyMySQL = "*" +sqlalchemy = "*" +tenacity = "*" + +[dev-packages] +autopep8 = "*" +flake8 = "*" +pytest = "*" +pytest-cov = "*" +boto3 = "*" + +[requires] +python_version = "3.9" + +[pipenv] +allow_prereleases = true diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock new file mode 100644 index 00000000..60fdb517 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock @@ -0,0 +1,397 @@ +{ + "_meta": { + "hash": { + "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:6633bce2b73284acce1453ca85834c7c5a59e0dbcce1170be461cc079bdcdfcf", + "sha256:668400d13889d2d2fcd66ce785cc0b0fc040681f58a9c7f67daa9149a52b6c63" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==1.38.13" + }, + "botocore": { + "hashes": [ + "sha256:22feee15753cd3f9f7179d041604078a1024701497d27b22be7c6707e8d13ccb", + "sha256:de29fee43a1f02787fb5b3756ec09917d5661ed95b2b2d64797ab04196f69e14" + ], + "markers": "python_version >= '3.9'", + "version": "==1.38.13" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "pymysql": { + "hashes": [ + "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", + "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==1.1.1" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.9.0.post0" + }, + "s3transfer": { + "hashes": [ + "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18", + "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c" + ], + "markers": "python_version >= '3.9'", + "version": "==0.12.0" + }, + "six": { + "hashes": [ + "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", + "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.17.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a", + "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", + "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", + "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", + "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", + "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", + "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", + "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", + "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", + "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", + "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", + "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", + "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867", + "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", + "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff", + "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", + "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2", + "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5", + "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", + "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", + "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", + "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", + "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", + "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", + "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", + "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", + "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", + "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", + "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", + "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37", + "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", + "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625", + "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", + "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47", + "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", + "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", + "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", + "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", + "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", + "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96", + "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", + "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", + "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438", + "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", + "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e", + "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", + "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08", + "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3", + "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", + "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", + "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8", + "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", + "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", + "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", + "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", + "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", + "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==2.0.40" + }, + "tenacity": { + "hashes": [ + "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", + "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==9.1.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", + "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" + ], + "markers": "python_version >= '3.8'", + "version": "==4.13.2" + }, + "urllib3": { + "hashes": [ + "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.20" + } + }, + "develop": { + "autopep8": { + "hashes": [ + "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda", + "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==2.3.1" + }, + "boto3": { + "hashes": [ + "sha256:9edf49640c79a05b0a72f4c2d1e24dfc164344b680535a645f455ac624dc3680", + "sha256:db58348849a5af061f0f5ec9c3b699da5221ca83354059fdccb798e3ddb6b62a" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.35.57" + }, + "botocore": { + "hashes": [ + "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34", + "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327" + ], + "markers": "python_version >= '3.8'", + "version": "==1.35.57" + }, + "coverage": { + "extras": [ + "toml" + ], + "hashes": [ + "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376", + "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", + "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111", + "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172", + "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491", + "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546", + "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", + "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", + "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", + "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c", + "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", + "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", + "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", + "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0", + "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db", + "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf", + "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", + "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", + "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", + "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e", + "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522", + "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25", + "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", + "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea", + "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", + "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", + "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07", + "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", + "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa", + "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901", + "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b", + "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", + "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0", + "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", + "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19", + "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", + "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51", + "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", + "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3", + "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", + "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076", + "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", + "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718", + "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", + "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e", + "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", + "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", + "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09", + "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", + "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", + "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f", + "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72", + "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a", + "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", + "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b", + "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", + "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", + "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", + "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b", + "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", + "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", + "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858" + ], + "markers": "python_version >= '3.9'", + "version": "==7.6.4" + }, + "exceptiongroup": { + "hashes": [ + "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", + "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" + ], + "markers": "python_version < '3.11'", + "version": "==1.2.2" + }, + "flake8": { + "hashes": [ + "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38", + "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1'", + "version": "==7.1.1" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "packaging": { + "hashes": [ + "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", + "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" + ], + "markers": "python_version >= '3.8'", + "version": "==24.2" + }, + "pluggy": { + "hashes": [ + "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", + "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" + ], + "markers": "python_version >= '3.8'", + "version": "==1.5.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", + "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521" + ], + "markers": "python_version >= '3.8'", + "version": "==2.12.1" + }, + "pyflakes": { + "hashes": [ + "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", + "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" + ], + "markers": "python_version >= '3.8'", + "version": "==3.2.0" + }, + "pytest": { + "hashes": [ + "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", + "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==8.3.3" + }, + "pytest-cov": { + "hashes": [ + "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", + "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==6.0.0" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.9.0.post0" + }, + "s3transfer": { + "hashes": [ + "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d", + "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c" + ], + "markers": "python_version >= '3.8'", + "version": "==0.10.3" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "tomli": { + "hashes": [ + "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", + "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.2" + }, + "urllib3": { + "hashes": [ + "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" + ], + "markers": "python_version < '3.10'", + "version": "==1.26.20" + } + } +} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md new file mode 100644 index 00000000..144cf9b8 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md @@ -0,0 +1,292 @@ +# 実消化&アルトマーク 日次バッチ + +## 概要 + +実消化&アルトマークの日次バッチ処理。 + +## 環境情報 + +- Python 3.9 +- MySQL 8.23 +- VSCode + +## 環境構築 + +- Python の構築 + + - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 + - 「Pipenv の導入」までを行っておくこと + - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する + - `pipenv install --dev --python ` + - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく + +- MySQL の環境構築 + - Windows の場合、以下のリンクからダウンロードする + - + - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 + - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると + - `docker-compose up -d` + - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと + - データを投入する + - 立ち上げたデータベースに「src05」スキーマを作成する + - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する + - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` +- 環境変数の設定 + - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する + - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください +- VSCode の設定 + - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する + - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する + +## 実行 + +- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 +- 「entrypoint.py」が、バッチ処理のエントリーポイント。 +- 実際の処理は、「src/jobctrl_daily.py」で行っている。 + +## 単体テスト(アルトマーク取込処理) + +アルトマーク取込処理は、単体テストコードを使用してテスト自動化を行う + +### テスト準備 + +- VSCodeで以下の拡張機能をインストールする + - Python + - Python Test Explorer for Visual Studio Code + - Test Explorer UI +- VSCode 上でショートカット「ctrl」+「shift」+「P」でコマンドパレットを開く +- コマンドパレットの検索窓に「Python」と入力し、「Python: テストを構成する」を押下する +- 現在のワークスペースを選び、「pytest」を選択する +- 「tests」フォルダを選択する +- バックグランドで、pytest モジュールのインストールが始まれば成功 + +### テスト用のサブコマンド一覧 + +- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる + - `Pipfile`内の「scripts」セクションに宣言されている + +| コマンド | 概要 | +| ---------------- | -------------------------------------------------------------------------------------------- | +| test:ultmarc | tests/batch/ultmarc フォルダ配下のユニットテストを実行する | +| test:ultmarc:cov | tests/batch/ultmarc フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | + +### テスト共通関数の仕様 + +- tests/testing_utility.py内の共通関数の仕様について記載する + +#### create_ultmarc_test_data_from_csv + +- 引数 + - file_path: str +- 戻り値 + - src.batch.ultmarc.datfile.DatFileのインスタンス +- 処理概要 + - CSVファイルから、アルトマークのインプットデータを作成する + - データフォーマットは以下 + - 文字コード: UTF-8 + - 改行コード:LF + - ヘッダ: なし + - 値囲い: ダブルクォート + - アルトマークデータと文字コードを合わせるため、指定されたファイルを一時ディレクトリに、文字コード「cp932」で書き出してからテストデータとして読み込む + - テストデータそのものはUTF-8の文字コードで作成すること + +### create_db_data_from_csv + +- 引数 + - file_path: str +- 戻り値 + - テーブルのレコードに相当する辞書のリスト +- 処理概要 + - CSVファイルから、アルトマークテーブルに相当するテストデータを作成する + - テストの初期データ、期待値データを作成するのに利用する + - データフォーマットは以下 + - 文字コード: UTF-8 + - 改行コード:LF + - ヘッダ: なし + - 値囲い: ダブルクォート + - ファイル内の、以下の形式のデータを自動的に変換する + - `NULL` + - `None`に変換される + - `yyyy-mm-dd`もしくは、`yyyy/mm/dd`の文字 + - Date型に変換される + - `yyyy-mm-dd hh:mm:ss`もしくは、`yyyy/mm/dd hh:mm:ss`の文字 + - DateTime型に変換される + +### create_insert_sql_with_parameter + +- 引数 + - table_name: str テーブル名 + - column_names: list[str] カラム名のリスト + - test_data: list[str]: 値のリスト +- 戻り値 + - INSERT文とバインドパラメータ辞書 +- 処理概要 + - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でINSERT文、バインドパラメータを作成する + +### create_delete_sql_with_parameter + +- 引数 + - table_name: str テーブル名 + - column_names: list[str] カラム名のリスト + - test_data: list[str]: 値のリスト +- 戻り値 + - DELETE文とバインドパラメータ辞書 +- 処理概要 + - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でDELETE文、バインドパラメータを作成する + +### create_ultmarc_table_mapper_sut + +- 引数 + - line: src.batch.ultmarc.datfile.DatFileLine アルトマークデータファイルの1行 + - db: src.db.Database データベース操作クラス +- 戻り値 + - マッパークラス +- 処理概要 + - src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory.UltmarcTableMapperFactoryを通じて、テスト対象のマッパークラスを生成して返す + +### assert_table_results + +- 引数 + - actual_rows: list[dict] テスト結果の辞書リスト + - expect_rows: list[dict] 期待値の辞書リスト + - ignore_col_name: list 比較を無視するDBのカラム名. Default None. +- 戻り値 + - なし +- 処理概要 + - テスト結果データと期待値データを突き合わせ、期待値どおりとなっているかを確認する + - ignore_col_nameに指定したカラムは、呼び出し元のテストコード内で個別に突き合わせする + + +## 単体テスト(実消化データ取込処理) + +実消化データは、単体テストコードを使用してテスト自動化を行う + +### テスト準備 + +※単体テスト(アルトマーク取込処理)と同じ + +### テスト用のサブコマンド一覧 + +- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる + - `Pipfile`内の「scripts」セクションに宣言されている + +| コマンド | 概要 | +| ---------------- | -------------------------------------------------------------------------------------------- | +| test:vjsk | tests/batch/vjsk フォルダ配下のユニットテストを実行する | +| test:vjsk:cov | tests/batch/vjsk フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | + +### テスト共通関数の仕様 + +- tests/testing_vjsk_utility.py内の共通関数の仕様について記載する + +#### create_vjsk_assertion_list + +- 概要 + - DB登録期待値リストを作成する +- Args: + - file_path (str): DB登録期待値ファイル(tsvファイル)のパス + - memo: ※DB登録期待値ファイルの前提 + - memo: 受領データファイルと同じ + - memo: BOM付きtsv形式 + - memo: 一行目はカラム名になっているヘッダ行 + - Returns: + - List(dict) DB登録期待値辞書リスト + + +## フォルダ構成 + +```text +. +├── Pipfile -- Pythonモジュールの依存関係を管理するファイル +├── Dockerfile -- Dockerイメージを作成するためのファイル +├── Pipfile -- Pythonモジュールの依存関係を管理するファイル +├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル +├── README.md -- 当ファイル +├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル +├── src -- ソースコードの保管場所 +│ ├── aws -- AWS関連処理 +│ │ └── s3.py -- S3クライアントとバケット処理 +│ ├── batch -- バッチ処理関連ソース置き場 +│ │ ├── batch_functions.py -- バッチ処理共通関数置き場 +│ │ ├── datachange -- 実績洗替関連ソース置き場 +│ │ │ └── emp_chg_inst_lau.py -- 施設担当者マスタ洗替 +│ │ └── jissekiaraigae.py -- 実績洗替処理のエントリーポイント +│ │ └── ultmarc -- アルトマーク関連処理 +│ │ ├── ultmarc_process.py -- アルトマーク関連処理のエントリーポイント +│ │ ├── datfile.py -- データファイル読込 +│ │ └── utmp_tables -- アルトマークテーブルへの登録関連 +│ │ ├── table_mapper -- テーブルへのデータマッピング処理 +│ │ │ ├── concrete -- テーブルマッパーのマッピング処理を行う具象クラス(全テーブル分) +│ │ │ │ ├── com_alma_mapper.py +│ │ │ │ ├── ... +│ │ │ │ └── null_mapper.py -- テスト用、空振りするマッパークラス +│ │ │ └── ultmarc_table_mapper.py -- テーブルへの登録処理を行う抽象クラス +│ │ ├── tables -- アルトマークデータのDTOクラス(全テーブル分) +│ │ │ ├── com_alma.py +│ │ │ ├── ... +│ │ │ └── ultmarc_table.py -- アルトマークテーブルの抽象クラス +│ │ └── ultmarc_table_mapper_factory.py -- テーブルマッパー生成クラス +│ ├── db +│ │ └── database.py -- データベース操作共通処理 +│ ├── error +│ │ └── exceptions.py -- カスタム例外 +│ ├── jobctrl_daily.py -- 日次バッチ処理のエントリーポイント。「entrypoint.py」 から呼ばれる。 +│ ├── logging +│ │ └── get_logger.py -- ログ出力の共通処理 +│ ├── system_var +│ │ └── environment.py -- 環境変数 +│ └── time +│ └── elapsed_time.py -- 実行時間計測用 +└── tests -- ユニットテストのルートディレクト + ├── batch + │ └── ultmarc -- アルトマーク関連のユニットテストを格納する + │ │ └── utmp_tables + │ │ └── table_mapper -- 以下、マッパークラス単位でフォルダを切る + │ │ └── com_alma + │ │ ├── test_com_alma_mapper.py -- テストコード本体 + │ │ ├── com_alma_insert.csv -- S3に配置される想定のテストCSVデータ。ケースごとに用意する。 + │ │ ... + │ │ ├── db_com_alma_before_update.csv -- テスト時に事前にDBに登録しておくデータ。CSVで用意する。 + │ │ ... + │ │ ├── expect_com_alma_insert.csv -- テストの期待値データ。CSVで用意する。 + │ │ ... + │ └─vjsk -- 実消化データ取込処理関連のユニットテストを格納する + │ │ + │ ├─vjsk_file_check -- 受領ファイルチェック処理関連のユニットテストを格納する + │ │ ├─conftest.py -- テスト内で共通利用できるフィクスチャの宣言 + │ │ └─test_vjsk_file_check.py -- テストクラス本体 + │ │ + │ └─vjsk_load -- 受領データ登録処理関連のユニットテストを格納する + │ │ conftest.py -- テスト内で共通利用できるフィクスチャの宣言 + │ │ test_vjsk_load.py -- テストクラス本体 + │ │ + │ └─testdata -- テストモジュールが使用するテストデータを格納する + │ │ bio_slip_data_202304280000.tsv -- 正常ケースの単体確認用 + │ │ ... -- *20230428* は新規4件の登録確認用 + │ │ whs_mst_202304290000.tsv -- *20230429* は更新2件+追加新規2件の登録確認用 + │ │ + │ ├─NoData -- 正常ケースの単体確認用 + │ │ bio_slip_data_nodatarecord.tsv -- ヘッダ行のみでデータが0件の動作確認用 + │ │ ... + │ │ whs_mst_nodatarecord.tsv + │ │ + │ ├─TestFormatErrorFile -- 異常ケースの単体確認用 + │ │ bio_slip_data_formaterror.tsv -- 末尾行のタブ数が想定と異なる(ファイル欠落がある)ときの動作確認用 + │ │ ... + │ │ whs_mst_formaterror.tsv + │ │ + │ ├─TestImportFileToDb -- 正常ケースの単体確認用 + │ │ bio_slip_data_202304270000.gz -- 対向元システムから送られてきた状態(gz圧縮)の受領データファイルの動作確認用 + │ │ ... + │ │ whs_mst_202304270000.gz + │ │ + │ └─UnzipError -- 異常ケースの単体確認用 + │ bio_slip_data_202304270000.gz -- gz圧縮ファイルが解凍できないときの動作確認用 + │ ... + │ whs_mst_202304270000.gz + │ + ├── conftest.py -- テスト内で共通利用できるフィクスチャを宣言する(執筆時点ではDBのみ) + ├── testing_utility.py -- テストの共通関数 + └── testing_vjsk_utility.py -- テストの共通関数(実消化データ取込処理関連) +``` + diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py new file mode 100644 index 00000000..62891bf7 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py @@ -0,0 +1,10 @@ +"""実消化&アルトマーク DCF施設削除新規マスタ作成のエントリーポイント""" +from src import jobctrl_jobctrl_dcfInstMergeIo + +if __name__ == '__main__': + try: + exit(jobctrl_jobctrl_dcfInstMergeIo.exec()) + except Exception: + # エラーが起きても、正常系のコードで返す。 + # エラーが起きた事実はbatch_process内でログを出す。 + exit(0) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini new file mode 100644 index 00000000..5dbe2661 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +log_format = %(levelname)s %(asctime)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py new file mode 100644 index 00000000..6203868d --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py @@ -0,0 +1,185 @@ +import gzip +import os +import os.path as path +import shutil +import tempfile + +import boto3 + +from src.system_var import environment + + +class S3Client: + __s3_client = boto3.client('s3') + _bucket_name: str + + def list_objects(self, bucket_name: str, folder_name: str): + response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + if response['KeyCount'] == 0: + return [] + contents = response['Contents'] + # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする + objects = [{'filename': content['Key'], 'size': content['Size']} + for content in contents if not content['Key'].endswith('/')] + return objects + + def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: + copy_source = {'Bucket': src_bucket, 'Key': src_key} + self.__s3_client.copy(copy_source, dest_bucket, dest_key) + return + + def download_file(self, bucket_name: str, file_key: str, file): + self.__s3_client.download_fileobj( + Bucket=bucket_name, + Key=file_key, + Fileobj=file + ) + return + + def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): + self.__s3_client.upload_file( + local_file_path, + Bucket=bucket_name, + Key=file_key + ) + + def delete_file(self, bucket_name: str, file_key: str): + self.__s3_client.delete_object( + Bucket=bucket_name, + Key=file_key + ) + + +class S3Bucket(): + _s3_client = S3Client() + _bucket_name: str = None + + +class UltmarcBucket(S3Bucket): + _bucket_name = environment.ULTMARC_DATA_BUCKET + _folder = environment.ULTMARC_DATA_FOLDER + + def list_dat_file(self): + return self._s3_client.list_objects(self._bucket_name, self._folder) + + def download_dat_file(self, dat_filename: str): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, dat_filename, f) + f.seek(0) + return temporary_file_path + + def backup_dat_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + ultmarc_backup_bucket = UltmarcBackupBucket() + backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) + # コピー元のファイルを削除 + self._s3_client.delete_file(self._bucket_name, dat_file_key) + + +class ConfigBucket(S3Bucket): + _bucket_name = environment.JSKULT_CONFIG_BUCKET + + def download_holiday_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) + holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + f.seek(0) + return temporary_file_path + + def download_wholesaler_stock_input_day_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) + f.seek(0) + return temporary_file_path + + def download_ultmarc_hex_convert_config(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) + hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f) + f.seek(0) + return temporary_file_path + + +class JskUltBackupBucket(S3Bucket): + _bucket_name = environment.JSKULT_BACKUP_BUCKET + + +class UltmarcBackupBucket(JskUltBackupBucket): + _folder = environment.ULTMARC_BACKUP_FOLDER + + +class VjskBackupBucket(JskUltBackupBucket): + _folder = environment.VJSK_BACKUP_FOLDER + + +class VjskReceiveBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER + + _s3_file_list = None + + def get_s3_file_list(self): + self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) + return self._s3_file_list + + def download_data_file(self, data_filename: str): + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, data_filename, f) + f.seek(0) + return temporary_file_path + + def unzip_data_file(self, filename: str): + temp_dir = os.path.dirname(filename) + decompress_filename = os.path.basename(filename).replace('.gz', '') + decompress_file_path = os.path.join(temp_dir, decompress_filename) + with gzip.open(filename, 'rb') as gz: + with open(decompress_file_path, 'wb') as decompressed_file: + shutil.copyfileobj(gz, decompressed_file) + + ret = [decompress_file_path] + return ret + + def backup_dat_file(self, target_files: list, datetime_key: str): + jskult_backup_bucket = VjskBackupBucket() + for target_file in target_files: + backup_from_file_path = target_file.get("filename") + backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") + backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' + self._s3_client.copy(self._bucket_name, backup_from_file_path, + jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.delete_file(self._bucket_name, backup_from_file_path) + + +class VjskSendBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _send_folder = environment.VJSK_DATA_SEND_FOLDER + + def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + # S3バケットにファイルを移動 + csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + return + + def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + vjsk_backup_bucket = VjskBackupBucket() + dat_key = f'{self._send_folder}/{dat_file_key}' + backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py new file mode 100644 index 00000000..5ddaba4e --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py @@ -0,0 +1,195 @@ +from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, + create_engine, text) +from sqlalchemy.engine.url import URL +from tenacity import retry, stop_after_attempt, wait_exponential + +from src.error.exceptions import DBException +from src.logging.get_logger import get_logger +from src.system_var import environment + +logger = get_logger(__name__) + + +class Database: + """データベース操作クラス""" + __connection: Connection = None + __transactional_engine: Engine = None + __autocommit_engine: Engine = None + __host: str = None + __port: str = None + __username: str = None + __password: str = None + __schema: str = None + __autocommit: bool = None + __connection_string: str = None + + def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: + """このクラスの新たなインスタンスを初期化します + + Args: + username (str): DBユーザー名 + password (str): DBパスワード + host (str): DBホスト名 + port (int): DBポート + schema (str): DBスキーマ名 + autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. + """ + self.__username = username + self.__password = password + self.__host = host + self.__port = int(port) + self.__schema = schema + self.__autocommit = autocommit + + self.__connection_string = URL.create( + drivername='mysql+pymysql', + username=self.__username, + password=self.__password, + host=self.__host, + port=self.__port, + database=self.__schema, + query={"charset": "utf8mb4", "local_infile": "1"}, + ) + + self.__transactional_engine = create_engine( + self.__connection_string, + pool_timeout=5, + poolclass=QueuePool + ) + + self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + + @classmethod + def get_instance(cls, autocommit=False): + """インスタンスを取得します + + Args: + autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. + Returns: + Database: DB操作クラスインスタンス + """ + return cls( + username=environment.DB_USERNAME, + password=environment.DB_PASSWORD, + host=environment.DB_HOST, + port=environment.DB_PORT, + schema=environment.DB_SCHEMA, + autocommit=autocommit + ) + + @retry( + wait=wait_exponential( + multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, + min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, + max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS + ), + stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT), + retry_error_cls=DBException + ) + def connect(self): + """ + DBに接続します。接続に失敗した場合、リトライします。\n + インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) + Raises: + DBException: 接続失敗 + """ + try: + self.__connection = ( + self.__autocommit_engine.connect() if self.__autocommit is True + else self.__transactional_engine.connect()) + except Exception as e: + raise DBException(e) + + def execute_select(self, select_query: str, parameters=None) -> list[dict]: + """SELECTクエリを実行します。 + + Args: + select_query (str): SELECT文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + list[dict]: カラム名: 値の辞書リスト + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(select_query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(select_query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + result_rows = result.mappings().all() + return result_rows + + def execute(self, query: str, parameters=None) -> CursorResult: + """SQLクエリを実行します。 + + Args: + query (str): SQL文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + CursorResult: 取得結果 + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + return result + + def begin(self): + """トランザクションを開始します。""" + if not self.__connection.in_transaction(): + self.__connection.begin() + + def commit(self): + """トランザクションをコミットします""" + if self.__connection.in_transaction(): + self.__connection.commit() + + def rollback(self): + """トランザクションをロールバックします""" + if self.__connection.in_transaction(): + self.__connection.rollback() + + def disconnect(self): + """DB接続を切断します。""" + if self.__connection is not None: + self.__connection.close() + self.__connection = None + + def to_jst(self): + self.execute('SET time_zone = "+9:00"') + + def __execute_with_transaction(self, query: str, parameters: dict): + # トランザクションを開始してクエリを実行する + with self.__connection.begin(): + try: + result = self.__connection.execute(text(query), parameters=parameters) + except Exception as e: + self.__connection.rollback() + raise e + # ここでコミットされる + return result diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py new file mode 100644 index 00000000..055c24f6 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py @@ -0,0 +1,10 @@ +class MeDaCaException(Exception): + pass + + +class DBException(MeDaCaException): + pass + + +class BatchOperationException(MeDaCaException): + pass diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py new file mode 100644 index 00000000..9c29840c --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py @@ -0,0 +1,4 @@ +"""実消化&アルトマーク DCF施設削除新規マスタ作成""" + +def exec(): + pass \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py new file mode 100644 index 00000000..f36f1199 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py @@ -0,0 +1,37 @@ +import logging + +from src.system_var.environment import LOG_LEVEL + +# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する +for name in ["boto3", "botocore", "s3transfer", "urllib3"]: + logging.getLogger(name).setLevel(logging.WARNING) + + +def get_logger(log_name: str) -> logging.Logger: + """一意のログ出力モジュールを取得します。 + + Args: + log_name (str): ロガー名 + + Returns: + _type_: _description_ + """ + logger = logging.getLogger(log_name) + level = logging.getLevelName(LOG_LEVEL) + if not isinstance(level, int): + level = logging.INFO + logger.setLevel(level) + + if not logger.hasHandlers(): + handler = logging.StreamHandler() + logger.addHandler(handler) + + formatter = logging.Formatter( + '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', + '%Y-%m-%d %H:%M:%S' + ) + + for handler in logger.handlers: + handler.setFormatter(formatter) + + return logger diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py new file mode 100644 index 00000000..8a0ccbb3 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py @@ -0,0 +1,17 @@ +# バッチ正常終了コード +BATCH_EXIT_CODE_SUCCESS = 0 + +# バッチ処理中フラグ:未処理 +BATCH_ACTF_BATCH_UNPROCESSED = '0' +# バッチ処理中フラグ:処理中 +BATCH_ACTF_BATCH_IN_PROCESSING = '1' +# dump取得状態区分:未処理 +DUMP_STATUS_KBN_UNPROCESSED = '0' +# dump取得状態区分:dump取得正常終了 +DUMP_STATUS_KBN_COMPLETE = '2' + +# カレンダーファイルのコメントシンボル +CALENDAR_COMMENT_SYMBOL = '#' + +# 月曜日(datetime.weekday()で月曜日を表す数字) +WEEKDAY_MONDAY = 0 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py new file mode 100644 index 00000000..0af7a118 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py @@ -0,0 +1,38 @@ +import os + +# Database +DB_HOST = os.environ['DB_HOST'] +DB_PORT = int(os.environ['DB_PORT']) +DB_USERNAME = os.environ['DB_USERNAME'] +DB_PASSWORD = os.environ['DB_PASSWORD'] +DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS +ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] +ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] +JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] +ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] +VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] +JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] +JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] +VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] +JSKULT_CONFIG_CONVERT_FOLDER = os.environ['JSKULT_CONFIG_CONVERT_FOLDER'] +JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME = os.environ['JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME'] +VJSK_DATA_RECEIVE_FOLDER = os.environ['VJSK_DATA_RECEIVE_FOLDER'] + +# 初期値がある環境変数 +LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') +DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) + +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] From f4a7638ae1a36948001c0cc69460947c2cb53b7d Mon Sep 17 00:00:00 2001 From: yono Date: Mon, 26 May 2025 18:28:38 +0900 Subject: [PATCH 06/30] =?UTF-8?q?feat:=E5=AE=9F=E6=B6=88=E5=8C=96=E9=81=8E?= =?UTF-8?q?=E5=8E=BB=E3=83=87=E3=83=BC=E3=82=BF=E3=82=A2=E3=83=BC=E3=82=AB?= =?UTF-8?q?=E3=82=A4=E3=83=96=E5=87=A6=E7=90=86=E3=81=AE=E6=96=B0=E8=A6=8F?= =?UTF-8?q?=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .vscode/launch.json | 2 +- .../.dockerignore | 12 + .../.env.example | 15 + ecs/jskult-batch-archive-jsk-data/.gitignore | 10 + .../.vscode/launch.json | 16 + .../.vscode/recommended_settings.json | 31 ++ ecs/jskult-batch-archive-jsk-data/Dockerfile | 20 + ecs/jskult-batch-archive-jsk-data/Pipfile | 29 ++ .../Pipfile.lock | 447 ++++++++++++++++++ ecs/jskult-batch-archive-jsk-data/README.md | 85 ++++ .../entrypoint.py | 10 + ecs/jskult-batch-archive-jsk-data/pytest.ini | 3 + .../src/__init__.py | 0 .../src/aws/__init__.py | 0 .../src/aws/s3.py | 62 +++ .../src/batch/archive_jsk_data.py | 62 +++ .../src/batch/common/__init__.py | 0 .../src/batch/jskult_archive_manager.py | 103 ++++ .../src/db/__init__.py | 0 .../src/db/database.py | 195 ++++++++ .../src/error/__init__.py | 0 .../src/error/exceptions.py | 6 + .../src/logging/get_logger.py | 37 ++ .../src/system_var/__init__.py | 0 .../src/system_var/constants.py | 2 + .../src/system_var/environment.py | 18 + 26 files changed, 1164 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-archive-jsk-data/.dockerignore create mode 100644 ecs/jskult-batch-archive-jsk-data/.env.example create mode 100644 ecs/jskult-batch-archive-jsk-data/.gitignore create mode 100644 ecs/jskult-batch-archive-jsk-data/.vscode/launch.json create mode 100644 ecs/jskult-batch-archive-jsk-data/.vscode/recommended_settings.json create mode 100644 ecs/jskult-batch-archive-jsk-data/Dockerfile create mode 100644 ecs/jskult-batch-archive-jsk-data/Pipfile create mode 100644 ecs/jskult-batch-archive-jsk-data/Pipfile.lock create mode 100644 ecs/jskult-batch-archive-jsk-data/README.md create mode 100644 ecs/jskult-batch-archive-jsk-data/entrypoint.py create mode 100644 ecs/jskult-batch-archive-jsk-data/pytest.ini create mode 100644 ecs/jskult-batch-archive-jsk-data/src/__init__.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/aws/__init__.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/aws/s3.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/batch/common/__init__.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/db/__init__.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/db/database.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/error/__init__.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/error/exceptions.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/logging/get_logger.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/system_var/__init__.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/system_var/constants.py create mode 100644 ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py diff --git a/.vscode/launch.json b/.vscode/launch.json index 753ba49f..cde61101 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -9,7 +9,7 @@ "type": "python", "request": "launch", // windowsだと\区切りかも - "program": "ecs\\dataimport\\dataimport\\controller.py", + "program": "ecs/jskult-batch-archive-jsk-data/entrypoint.py", "console": "integratedTerminal", "justMyCode": true, "envFile": "${workspaceFolder}/.env" diff --git a/ecs/jskult-batch-archive-jsk-data/.dockerignore b/ecs/jskult-batch-archive-jsk-data/.dockerignore new file mode 100644 index 00000000..8b9da402 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/.dockerignore @@ -0,0 +1,12 @@ +tests/* +.coverage +.env +.env.example +.report/* +.vscode/* +.pytest_cache/* +*/__pychache__/* +Dockerfile +pytest.ini +README.md +*.sql diff --git a/ecs/jskult-batch-archive-jsk-data/.env.example b/ecs/jskult-batch-archive-jsk-data/.env.example new file mode 100644 index 00000000..c217d53f --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/.env.example @@ -0,0 +1,15 @@ +DB_HOST=**************** +DB_PORT=**************** +DB_USERNAME=**************** +DB_PASSWORD=**************** +DB_SCHEMA=**************** + +LOG_LEVEL=INFO + +JSKULT_CONFIG_BUCKET=********************** +JSKULT_ARCHIVE_BUCKET=**************** + +DB_CONNECTION_MAX_RETRY_ATTEMPT=4 +DB_CONNECTION_RETRY_INTERVAL_INIT=5 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50 \ No newline at end of file diff --git a/ecs/jskult-batch-archive-jsk-data/.gitignore b/ecs/jskult-batch-archive-jsk-data/.gitignore new file mode 100644 index 00000000..bd0b37f8 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/.gitignore @@ -0,0 +1,10 @@ +.vscode/settings.json +.env + +# python +__pycache__ + +# python test +.pytest_cache +.coverage +.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-archive-jsk-data/.vscode/launch.json b/ecs/jskult-batch-archive-jsk-data/.vscode/launch.json new file mode 100644 index 00000000..5a9a4867 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // IntelliSense を使用して利用可能な属性を学べます。 + // 既存の属性の説明をホバーして表示します。 + // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(DEBUG)archive", + "type": "python", + "request": "launch", + "program": "entrypoint.py", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/ecs/jskult-batch-archive-jsk-data/.vscode/recommended_settings.json b/ecs/jskult-batch-archive-jsk-data/.vscode/recommended_settings.json new file mode 100644 index 00000000..2fde8732 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/.vscode/recommended_settings.json @@ -0,0 +1,31 @@ +{ + "[python]": { + "editor.defaultFormatter": null, + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + }, + // 自身の環境に合わせて変えてください + "python.defaultInterpreterPath": "", + "python.linting.lintOnSave": true, + "python.linting.enabled": true, + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": [ + "--max-line-length=200", + "--ignore=F541" + ], + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Path": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length", "200", + "--ignore=F541" + ], + "python.testing.pytestArgs": [ + "tests/batch/" + ], + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/ecs/jskult-batch-archive-jsk-data/Dockerfile b/ecs/jskult-batch-archive-jsk-data/Dockerfile new file mode 100644 index 00000000..fc0fde90 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim-bookworm + +ENV TZ="Asia/Tokyo" +# pythonの標準出力をバッファリングしないフラグ +ENV PYTHONUNBUFFERED=1 +# pythonのバイトコードを生成しないフラグ +ENV PYTHONDONTWRITEBYTECODE=1 + +WORKDIR /usr/src/app +COPY Pipfile Pipfile.lock ./ +RUN \ + apt update -y && \ + pip install pipenv --no-cache-dir && \ + pipenv install --system --deploy && \ + pip uninstall -y pipenv virtualenv-clone virtualenv + +COPY src ./src +COPY entrypoint.py entrypoint.py + +CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-archive-jsk-data/Pipfile b/ecs/jskult-batch-archive-jsk-data/Pipfile new file mode 100644 index 00000000..2b56d8c0 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/Pipfile @@ -0,0 +1,29 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[scripts] +"test:ultmarc" = "pytest tests/batch/ultmarc/" +"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" +"test:vjsk" = "pytest tests/batch/vjsk/" +"test:vjsk:cov" = "pytest --cov=src/batch/vjsk/ --cov-branch --cov-report=term-missing tests/batch/vjsk/" + +[packages] +boto3 = "*" +PyMySQL = "*" +sqlalchemy = "*" +tenacity = "*" + +[dev-packages] +autopep8 = "*" +flake8 = "*" +pytest = "*" +pytest-cov = "*" +boto3 = "*" + +[requires] +python_version = "3.12" + +[pipenv] +allow_prereleases = true diff --git a/ecs/jskult-batch-archive-jsk-data/Pipfile.lock b/ecs/jskult-batch-archive-jsk-data/Pipfile.lock new file mode 100644 index 00000000..7179f5b9 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/Pipfile.lock @@ -0,0 +1,447 @@ +{ + "_meta": { + "hash": { + "sha256": "aa2d1d97600fea225b7d249dae0d065190d00fdadbf85b20773e0c1d9862f5c1" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.12" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0", + "sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==1.38.23" + }, + "botocore": { + "hashes": [ + "sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a", + "sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457" + ], + "markers": "python_version >= '3.9'", + "version": "==1.38.23" + }, + "greenlet": { + "hashes": [ + "sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057", + "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207", + "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3", + "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4", + "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b", + "sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc", + "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825", + "sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370", + "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708", + "sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457", + "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763", + "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf", + "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe", + "sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e", + "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d", + "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59", + "sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa", + "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e", + "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51", + "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3", + "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e", + "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61", + "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5", + "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74", + "sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907", + "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275", + "sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f", + "sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59", + "sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c", + "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf", + "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b", + "sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819", + "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65", + "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e", + "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240", + "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5", + "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659", + "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485", + "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec", + "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8", + "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418", + "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce", + "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6", + "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7", + "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6", + "sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f", + "sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13", + "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b", + "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068", + "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325", + "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330", + "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834", + "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b", + "sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5", + "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421" + ], + "markers": "python_version >= '3.9'", + "version": "==3.2.2" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "pymysql": { + "hashes": [ + "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", + "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==1.1.1" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.9.0.post0" + }, + "s3transfer": { + "hashes": [ + "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be", + "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177" + ], + "markers": "python_version >= '3.9'", + "version": "==0.13.0" + }, + "six": { + "hashes": [ + "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", + "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.17.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5", + "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", + "sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b", + "sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b", + "sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348", + "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda", + "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5", + "sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2", + "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29", + "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", + "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", + "sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826", + "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", + "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", + "sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45", + "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", + "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", + "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", + "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", + "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", + "sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71", + "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11", + "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", + "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", + "sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8", + "sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd", + "sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814", + "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08", + "sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea", + "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30", + "sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda", + "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", + "sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923", + "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", + "sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036", + "sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3", + "sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f", + "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", + "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", + "sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2", + "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", + "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", + "sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769", + "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", + "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", + "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b", + "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747", + "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", + "sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440", + "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", + "sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2", + "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", + "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", + "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", + "sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd", + "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", + "sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==2.0.41" + }, + "tenacity": { + "hashes": [ + "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", + "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==9.1.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:6cd49c8b914bb3869a16ed9d1001e3d0ff1d84fae4838076fe3b361ab8b32b65", + "sha256:90196079d79b4658568e177f50c24c327b73a85e664c0af9f3937e2015b65956" + ], + "markers": "python_version >= '3.9'", + "version": "==4.14.0rc1" + }, + "urllib3": { + "hashes": [ + "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", + "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" + ], + "markers": "python_version >= '3.9'", + "version": "==2.4.0" + } + }, + "develop": { + "autopep8": { + "hashes": [ + "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758", + "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==2.3.2" + }, + "boto3": { + "hashes": [ + "sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0", + "sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==1.38.23" + }, + "botocore": { + "hashes": [ + "sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a", + "sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457" + ], + "markers": "python_version >= '3.9'", + "version": "==1.38.23" + }, + "coverage": { + "extras": [ + "toml" + ], + "hashes": [ + "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7", + "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", + "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404", + "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11", + "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5", + "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d", + "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347", + "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", + "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3", + "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3", + "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", + "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e", + "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", + "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", + "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", + "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a", + "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", + "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", + "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", + "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", + "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", + "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", + "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", + "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a", + "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be", + "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", + "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622", + "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6", + "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879", + "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a", + "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a", + "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a", + "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", + "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", + "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", + "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", + "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", + "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", + "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", + "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", + "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", + "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53", + "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975", + "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7", + "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", + "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", + "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", + "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", + "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c", + "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca", + "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787", + "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9", + "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a", + "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", + "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", + "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", + "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", + "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", + "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7", + "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", + "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb", + "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", + "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", + "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d", + "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837", + "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54", + "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3" + ], + "markers": "python_version >= '3.9'", + "version": "==7.8.2" + }, + "flake8": { + "hashes": [ + "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343", + "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==7.2.0" + }, + "iniconfig": { + "hashes": [ + "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", + "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" + ], + "markers": "python_version >= '3.8'", + "version": "==2.1.0" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "packaging": { + "hashes": [ + "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", + "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" + ], + "markers": "python_version >= '3.8'", + "version": "==25.0" + }, + "pluggy": { + "hashes": [ + "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", + "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746" + ], + "markers": "python_version >= '3.9'", + "version": "==1.6.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9", + "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae" + ], + "markers": "python_version >= '3.9'", + "version": "==2.13.0" + }, + "pyflakes": { + "hashes": [ + "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a", + "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b" + ], + "markers": "python_version >= '3.9'", + "version": "==3.3.2" + }, + "pytest": { + "hashes": [ + "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", + "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==8.3.5" + }, + "pytest-cov": { + "hashes": [ + "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", + "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==6.1.1" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.9.0.post0" + }, + "s3transfer": { + "hashes": [ + "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be", + "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177" + ], + "markers": "python_version >= '3.9'", + "version": "==0.13.0" + }, + "six": { + "hashes": [ + "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", + "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.17.0" + }, + "urllib3": { + "hashes": [ + "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", + "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" + ], + "markers": "python_version >= '3.9'", + "version": "==2.4.0" + } + } +} diff --git a/ecs/jskult-batch-archive-jsk-data/README.md b/ecs/jskult-batch-archive-jsk-data/README.md new file mode 100644 index 00000000..b9a9340c --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/README.md @@ -0,0 +1,85 @@ +# 実消化&アルトマーク 日次バッチ + +## 概要 + +実消化過去データアーカイブ処理。 + +## 環境情報 + +- Python 3.12 +- MySQL 8.23 +- VSCode + +## 環境構築 + +- Python の構築 + + - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 + - 「Pipenv の導入」までを行っておくこと + - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する + - `pipenv install --dev --python ` + - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく + +- MySQL の環境構築 + - Windows の場合、以下のリンクからダウンロードする + - + - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 + - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると + - `docker-compose up -d` + - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと + - データを投入する + - 立ち上げたデータベースに「src05」スキーマを作成する + - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する + - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` +- 環境変数の設定 + - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する + - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください +- VSCode の設定 + - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する + - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する + +## 実行 + +- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 +- 「entrypoint.py」が、バッチ処理のエントリーポイント。 +- 実際の処理は、「src/jobctrl_daily.py」で行っている。 + + +### テスト準備 + +- VSCodeで以下の拡張機能をインストールする + - Python + - Python Test Explorer for Visual Studio Code + - Test Explorer UI +- VSCode 上でショートカット「ctrl」+「shift」+「P」でコマンドパレットを開く +- コマンドパレットの検索窓に「Python」と入力し、「Python: テストを構成する」を押下する +- 現在のワークスペースを選び、「pytest」を選択する +- 「tests」フォルダを選択する +- バックグランドで、pytest モジュールのインストールが始まれば成功 + + +## フォルダ構成 + +```text +. +├── Pipfile -- Pythonモジュールの依存関係を管理するファイル +├── Dockerfile -- Dockerイメージを作成するためのファイル +├── Pipfile -- Pythonモジュールの依存関係を管理するファイル +├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル +├── README.md -- 当ファイル +├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル +└── src -- ソースコードの保管場所 + ├── aws -- AWS関連処理 + │ └── s3.py -- S3クライアントとバケット処理 + ├── batch -- バッチ処理関連ソース置き場 + │ ├── archive_jsk_data.py -- 実消化過去データアーカイブ処理 + │ └── jskult_archive_manager.py -- アーカイブ管理テーブル操作処理 + ├── db + │ └── database.py -- データベース操作共通処理 + ├── error + │ └── exceptions.py -- カスタム例外 + ├── logging + │ └── get_logger.py -- ログ出力の共通処理 + └── system_var + ├── constants.py -- 定数 + └── environment.py -- 環境変数 diff --git a/ecs/jskult-batch-archive-jsk-data/entrypoint.py b/ecs/jskult-batch-archive-jsk-data/entrypoint.py new file mode 100644 index 00000000..ad075788 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/entrypoint.py @@ -0,0 +1,10 @@ +"""実消化過去データアーカイブ処理ののエントリーポイント""" +from src.batch import archive_jsk_data + +if __name__ == '__main__': + # try: + exit(archive_jsk_data.exec()) + # except Exception: + # エラーが起きても、正常系のコードで返す。 + # エラーが起きた事実はbatch_process内でログを出す。 + exit(0) diff --git a/ecs/jskult-batch-archive-jsk-data/pytest.ini b/ecs/jskult-batch-archive-jsk-data/pytest.ini new file mode 100644 index 00000000..5dbe2661 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +log_format = %(levelname)s %(asctime)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S diff --git a/ecs/jskult-batch-archive-jsk-data/src/__init__.py b/ecs/jskult-batch-archive-jsk-data/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-archive-jsk-data/src/aws/__init__.py b/ecs/jskult-batch-archive-jsk-data/src/aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py b/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py new file mode 100644 index 00000000..a6e0074a --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py @@ -0,0 +1,62 @@ +import gzip +import os +import os.path as path +import shutil +import tempfile +import boto3 +from src.system_var import environment + + +class S3Client: + __s3_client = boto3.client('s3') + _bucket_name: str + + def list_objects(self, bucket_name: str, folder_name: str): + response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + if response['KeyCount'] == 0: + return [] + contents = response['Contents'] + # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする + objects = [{'filename': content['Key'], 'size': content['Size']} + for content in contents if not content['Key'].endswith('/')] + return objects + + def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: + copy_source = {'Bucket': src_bucket, 'Key': src_key} + self.__s3_client.copy(copy_source, dest_bucket, dest_key) + return + + def download_file(self, bucket_name: str, file_key: str, file): + self.__s3_client.download_fileobj( + Bucket=bucket_name, + Key=file_key, + Fileobj=file + ) + return + + def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): + self.__s3_client.upload_file( + local_file_path, + Bucket=bucket_name, + Key=file_key + ) + + def delete_file(self, bucket_name: str, file_key: str): + self.__s3_client.delete_object( + Bucket=bucket_name, + Key=file_key + ) + +class S3Bucket(): + _s3_client = S3Client() + _bucket_name: str = None + +class JskultArchiveBucket(S3Bucket): + _bucket_name = environment.JSKULT_ARCHIVE_BUCKET + + def upload_archive_zip_file(self, archive_zip: str, archive_zip_path: str, send_folder: str): + # S3バケットにファイルを移動 + archive_zip_name = f'{send_folder}/{archive_zip}' + s3_client = S3Client() + s3_client.upload_file(archive_zip_path, self._bucket_name, archive_zip_name) + return f"{self._bucket_name}/{archive_zip_name}" \ No newline at end of file diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py b/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py new file mode 100644 index 00000000..d9693c98 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py @@ -0,0 +1,62 @@ +from src.logging.get_logger import get_logger +from src.batch.jskult_archive_manager import JskultArchiveManager +from src.aws.s3 import JskultArchiveBucket +import os.path as path +from datetime import timedelta +import tempfile +import csv +import zipfile + + +logger = get_logger("実消化_過去データアーカイブ処理") + +def exec(): + try: + logger.info("処理開始:実消化_過去データアーカイブ処理") + jskult_archive_manager = JskultArchiveManager() + # アーカイブ管理テーブルから対象テーブル、条件項目、条件年月、実行間隔(月)、前回条件年月、保存先を取得 + jskult_archive_manage_data_list = jskult_archive_manager.get_archive_manage() + + # 取得したレコード分繰り返す + for jskult_archive_manage_data in jskult_archive_manage_data_list: + # 対象テーブルで条件項目が条件年月以前のデータを取得 + archive_data = jskult_archive_manager.get_archive_data(jskult_archive_manage_data["target_table"], jskult_archive_manage_data["filter_column"], jskult_archive_manage_data["filter_date"]) + # 取得データが0件の場合、スキップする + if not archive_data: + logger.info(f"アーカイブ対象データがありませんでした。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}") + continue + + # 一時フォルダ作成 + with tempfile.TemporaryDirectory() as temporary_dir: + # 取得したデータをCSVに出力 + day_after_prev_filter_date = jskult_archive_manage_data["prev_filter_date"] + timedelta(days=1) + file_name = f'{jskult_archive_manage_data["target_table"]}_{day_after_prev_filter_date.strftime('%Y%m%d')}_{jskult_archive_manage_data["filter_date"].strftime('%Y%m%d')}' + csv_file_path = path.join(temporary_dir, f"{file_name}.csv") + headers = archive_data[0].keys() + with open(csv_file_path, 'w', newline='') as file: + writer = csv.DictWriter(file, fieldnames=headers, quoting=csv.QUOTE_ALL) + writer.writeheader() + writer.writerows(archive_data) + logger.info(f"CSVファイル作成に成功しました。{file_name}.csv") + + # 作成したCSVをzip形式に圧縮 + zip_file_path = path.join(temporary_dir, f"{file_name}.zip") + with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) as zipf: + zipf.write(csv_file_path) + logger.info(f"zip形式への圧縮に成功しました。{file_name}.zip") + + # 圧縮したCSVを保存先へアップロード + archive_bucket = JskultArchiveBucket() + upload_file_path = archive_bucket.upload_archive_zip_file(f"{file_name}.zip", zip_file_path, jskult_archive_manage_data["archive_storage"]) + logger.info(f"{upload_file_path}へのアップロードに成功しました。") + + # アーカイブしたデータをDBから削除 + jskult_archive_manager.delete_archive_data(jskult_archive_manage_data["target_table"], jskult_archive_manage_data["filter_column"], jskult_archive_manage_data["filter_date"]) + logger.info(f"アーカイブしたデータのDBから削除に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}") + + # 次回に向けてアーカイブ管理テーブルを更新する + jskult_archive_manager.update_archive_manage(jskult_archive_manage_data["target_table"]) + logger.info(f"アーカイブ管理テーブルの更新に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']}") + logger.info("処理終了:実消化_過去データアーカイブ処理") + except Exception as e: + logger.info(f"異常終了:実消化_過去データアーカイブ処理 {e}") \ No newline at end of file diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/common/__init__.py b/ecs/jskult-batch-archive-jsk-data/src/batch/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py b/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py new file mode 100644 index 00000000..033be013 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py @@ -0,0 +1,103 @@ +from src.db.database import Database +from src.logging.get_logger import get_logger +logger = get_logger("アーカイブ管理テーブル操作") + +class JskultArchiveManager: + _db : Database = None + def __init__(self): + self._db = Database.get_instance() + + def get_archive_manage(self): + try: + logger.info("処理開始:アーカイブ管理テーブル取得") + sql = """ + select + target_table + , filter_column + , filter_date + , run_interval_months + , prev_filter_date + , archive_storage + from + internal07.jskult_archive_manage; + """ + self._db.connect() + jskult_archive_manage_data = self._db.execute_select(sql) + logger.info("処理終了:アーカイブ管理テーブル取得") + return jskult_archive_manage_data + except Exception as e: + logger.info("異常終了:アーカイブ管理テーブル取得") + raise + finally: + self._db.disconnect() + + def get_archive_data(self,target_table:str, filter_column:str, filter_date:str): + try: + logger.info("処理開始:アーカイブデータ取得") + sql = f""" + select + * + from + src07.{target_table} + where + str_to_date({filter_column},'%Y%m%d') <= '{filter_date}'; + """ + self._db.connect() + target_table_data = self._db.execute_select(sql) + logger.info("処理開始:アーカイブデータ終了") + return target_table_data + except Exception as e: + logger.info("異常終了:アーカイブ管理テーブル取得") + raise + finally: + self._db.disconnect() + + def delete_archive_data(self, target_table:str, filter_column:str, filter_date:str): + try: + logger.info("処理開始:アーカイブ後データ削除") + sql = f""" + delete from + src07.{target_table} + where + str_to_date({filter_column},'%Y%m%d') <= '{filter_date}'; + """ + self._db.connect() + self._db.begin() + self._db.execute(sql) + self._db.commit() + logger.info("処理終了:アーカイブ後データ削除") + return + except: + self._db.rollback() + logger.info("異常終了:アーカイブ後データ削除") + raise + finally: + self._db.disconnect() + + def update_archive_manage(self, target_table:str): + try: + logger.info("処理開始:アーカイブ管理テーブル条件年月更新") + sql = f""" + update internal07.jskult_archive_manage + set + prev_filter_date = filter_date + , filter_date = LAST_DAY( + DATE_ADD(filter_date, INTERVAL run_interval_months MONTH) + ) + , upd_user = CURRENT_USER () + , upd_date = NOW() + where + target_table = '{target_table}'; + """ + self._db.connect() + self._db.begin() + self._db.execute(sql) + self._db.commit() + logger.info("処理終了:アーカイブ管理テーブル条件年月更新") + return + except: + self._db.rollback() + logger.info("異常終了:アーカイブ管理テーブル条件年月更新") + raise + finally: + self._db.disconnect() diff --git a/ecs/jskult-batch-archive-jsk-data/src/db/__init__.py b/ecs/jskult-batch-archive-jsk-data/src/db/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-archive-jsk-data/src/db/database.py b/ecs/jskult-batch-archive-jsk-data/src/db/database.py new file mode 100644 index 00000000..5ddaba4e --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/db/database.py @@ -0,0 +1,195 @@ +from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, + create_engine, text) +from sqlalchemy.engine.url import URL +from tenacity import retry, stop_after_attempt, wait_exponential + +from src.error.exceptions import DBException +from src.logging.get_logger import get_logger +from src.system_var import environment + +logger = get_logger(__name__) + + +class Database: + """データベース操作クラス""" + __connection: Connection = None + __transactional_engine: Engine = None + __autocommit_engine: Engine = None + __host: str = None + __port: str = None + __username: str = None + __password: str = None + __schema: str = None + __autocommit: bool = None + __connection_string: str = None + + def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: + """このクラスの新たなインスタンスを初期化します + + Args: + username (str): DBユーザー名 + password (str): DBパスワード + host (str): DBホスト名 + port (int): DBポート + schema (str): DBスキーマ名 + autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. + """ + self.__username = username + self.__password = password + self.__host = host + self.__port = int(port) + self.__schema = schema + self.__autocommit = autocommit + + self.__connection_string = URL.create( + drivername='mysql+pymysql', + username=self.__username, + password=self.__password, + host=self.__host, + port=self.__port, + database=self.__schema, + query={"charset": "utf8mb4", "local_infile": "1"}, + ) + + self.__transactional_engine = create_engine( + self.__connection_string, + pool_timeout=5, + poolclass=QueuePool + ) + + self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + + @classmethod + def get_instance(cls, autocommit=False): + """インスタンスを取得します + + Args: + autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. + Returns: + Database: DB操作クラスインスタンス + """ + return cls( + username=environment.DB_USERNAME, + password=environment.DB_PASSWORD, + host=environment.DB_HOST, + port=environment.DB_PORT, + schema=environment.DB_SCHEMA, + autocommit=autocommit + ) + + @retry( + wait=wait_exponential( + multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, + min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, + max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS + ), + stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT), + retry_error_cls=DBException + ) + def connect(self): + """ + DBに接続します。接続に失敗した場合、リトライします。\n + インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) + Raises: + DBException: 接続失敗 + """ + try: + self.__connection = ( + self.__autocommit_engine.connect() if self.__autocommit is True + else self.__transactional_engine.connect()) + except Exception as e: + raise DBException(e) + + def execute_select(self, select_query: str, parameters=None) -> list[dict]: + """SELECTクエリを実行します。 + + Args: + select_query (str): SELECT文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + list[dict]: カラム名: 値の辞書リスト + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(select_query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(select_query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + result_rows = result.mappings().all() + return result_rows + + def execute(self, query: str, parameters=None) -> CursorResult: + """SQLクエリを実行します。 + + Args: + query (str): SQL文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + CursorResult: 取得結果 + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + return result + + def begin(self): + """トランザクションを開始します。""" + if not self.__connection.in_transaction(): + self.__connection.begin() + + def commit(self): + """トランザクションをコミットします""" + if self.__connection.in_transaction(): + self.__connection.commit() + + def rollback(self): + """トランザクションをロールバックします""" + if self.__connection.in_transaction(): + self.__connection.rollback() + + def disconnect(self): + """DB接続を切断します。""" + if self.__connection is not None: + self.__connection.close() + self.__connection = None + + def to_jst(self): + self.execute('SET time_zone = "+9:00"') + + def __execute_with_transaction(self, query: str, parameters: dict): + # トランザクションを開始してクエリを実行する + with self.__connection.begin(): + try: + result = self.__connection.execute(text(query), parameters=parameters) + except Exception as e: + self.__connection.rollback() + raise e + # ここでコミットされる + return result diff --git a/ecs/jskult-batch-archive-jsk-data/src/error/__init__.py b/ecs/jskult-batch-archive-jsk-data/src/error/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-archive-jsk-data/src/error/exceptions.py b/ecs/jskult-batch-archive-jsk-data/src/error/exceptions.py new file mode 100644 index 00000000..2db060ff --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/error/exceptions.py @@ -0,0 +1,6 @@ +class MeDaCaException(Exception): + pass + + +class DBException(MeDaCaException): + pass diff --git a/ecs/jskult-batch-archive-jsk-data/src/logging/get_logger.py b/ecs/jskult-batch-archive-jsk-data/src/logging/get_logger.py new file mode 100644 index 00000000..f36f1199 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/logging/get_logger.py @@ -0,0 +1,37 @@ +import logging + +from src.system_var.environment import LOG_LEVEL + +# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する +for name in ["boto3", "botocore", "s3transfer", "urllib3"]: + logging.getLogger(name).setLevel(logging.WARNING) + + +def get_logger(log_name: str) -> logging.Logger: + """一意のログ出力モジュールを取得します。 + + Args: + log_name (str): ロガー名 + + Returns: + _type_: _description_ + """ + logger = logging.getLogger(log_name) + level = logging.getLevelName(LOG_LEVEL) + if not isinstance(level, int): + level = logging.INFO + logger.setLevel(level) + + if not logger.hasHandlers(): + handler = logging.StreamHandler() + logger.addHandler(handler) + + formatter = logging.Formatter( + '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', + '%Y-%m-%d %H:%M:%S' + ) + + for handler in logger.handlers: + handler.setFormatter(formatter) + + return logger diff --git a/ecs/jskult-batch-archive-jsk-data/src/system_var/__init__.py b/ecs/jskult-batch-archive-jsk-data/src/system_var/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-archive-jsk-data/src/system_var/constants.py b/ecs/jskult-batch-archive-jsk-data/src/system_var/constants.py new file mode 100644 index 00000000..8a555af3 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/system_var/constants.py @@ -0,0 +1,2 @@ +# バッチ正常終了コード +BATCH_EXIT_CODE_SUCCESS = 0 diff --git a/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py b/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py new file mode 100644 index 00000000..249aa4f0 --- /dev/null +++ b/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py @@ -0,0 +1,18 @@ +import os + +# Database +DB_HOST = os.environ['DB_HOST'] +DB_PORT = int(os.environ['DB_PORT']) +DB_USERNAME = os.environ['DB_USERNAME'] +DB_PASSWORD = os.environ['DB_PASSWORD'] +DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS +JSKULT_ARCHIVE_BUCKET = os.environ['JSKULT_ARCHIVE_BUCKET'] + +# 初期値がある環境変数 +LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') +DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) From e9f6a90c01ad67badff51dcb1a4962464f1ca147 Mon Sep 17 00:00:00 2001 From: yono Date: Mon, 26 May 2025 19:00:27 +0900 Subject: [PATCH 07/30] feat: numpy = "==2.2.*" --- ecs/jskult-webapp/Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/Pipfile b/ecs/jskult-webapp/Pipfile index 7143dcef..d74b9920 100644 --- a/ecs/jskult-webapp/Pipfile +++ b/ecs/jskult-webapp/Pipfile @@ -26,7 +26,7 @@ openpyxl = "*" xlrd = "*" sqlalchemy = "==2.*" mojimoji = "*" -numpy = "==2.0.*" +numpy = "==2.2.*" [dev-packages] autopep8 = "*" From 705612032af459bca3d563bce68b929cd6defea0 Mon Sep 17 00:00:00 2001 From: yono Date: Mon, 26 May 2025 19:20:31 +0900 Subject: [PATCH 08/30] =?UTF-8?q?feat:=20numpy=20=3D=20"=3D=3D2.2.*"?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C=E3=82=B3=E3=83=9F=E3=83=83=E3=83=88=E3=81=97?= =?UTF-8?q?=E5=BF=98=E3=82=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/Pipfile.lock | 276 ++++++++++++++------------------- 1 file changed, 120 insertions(+), 156 deletions(-) diff --git a/ecs/jskult-webapp/Pipfile.lock b/ecs/jskult-webapp/Pipfile.lock index 3857abac..e6120ba5 100644 --- a/ecs/jskult-webapp/Pipfile.lock +++ b/ecs/jskult-webapp/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "d029e060f273e27553163c9e6d72f59c96bab17641440e68aba9a9b10cbd87dd" + "sha256": "5ce8ef42345c4fd4dad84cb149002b3c9e0eda0d57b189b10284495378c8f499" }, "pipfile-spec": 6, "requires": { @@ -34,20 +34,20 @@ }, "boto3": { "hashes": [ - "sha256:37e4b6b7f77f4cc476ea82eb76a502a289bb750eee96f7d07ec9bcec6592191a", - "sha256:417d0328fd3394ffb1c1f400d4277d45b0b86f48d2f088a02306474969344a47" + "sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0", + "sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==1.38.21" + "version": "==1.38.23" }, "botocore": { "hashes": [ - "sha256:08d5e9c00e5cc9e0ae0e60570846011789dc7f1d4ea094b3f3e3f3ae1ff2063a", - "sha256:567b4d338114174d0b41857002a4b1e8efb68f1654ed9f3ec6c34ebdef5e9eaf" + "sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a", + "sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457" ], "markers": "python_version >= '3.9'", - "version": "==1.38.21" + "version": "==1.38.23" }, "certifi": { "hashes": [ @@ -230,54 +230,54 @@ }, "click": { "hashes": [ - "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", - "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a" + "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", + "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b" ], - "markers": "python_version >= '3.7'", - "version": "==8.1.8" + "markers": "python_version >= '3.10'", + "version": "==8.2.1" }, "cryptography": { "hashes": [ - "sha256:057723b79752a142efbc609e90b0dff27b0361ccbee3bd48312d70f5cdf53b78", - "sha256:05c2385b1f5c89a17df19900cfb1345115a77168f5ed44bdf6fd3de1ce5cc65b", - "sha256:08281de408e7eb71ba3cd5098709a356bfdf65eebd7ee7633c3610f0aa80d79b", - "sha256:10d68763892a7b19c22508ab57799c4423c7c8cd61d7eee4c5a6a55a46511949", - "sha256:1655d3a76e3dedb683c982a6c3a2cbfae2d08f47a48ec5a3d58db52b3d29ea6f", - "sha256:18f8084b7ca3ce1b8d38bdfe33c48116edf9a08b4d056ef4a96dceaa36d8d965", - "sha256:2cb03a944a1a412724d15a7c051d50e63a868031f26b6a312f2016965b661942", - "sha256:4142e20c29224cec63e9e32eb1e6014fb285fe39b7be66b3564ca978a3a8afe9", - "sha256:463096533acd5097f8751115bc600b0b64620c4aafcac10c6d0041e6e68f88fe", - "sha256:48caa55c528617fa6db1a9c3bf2e37ccb31b73e098ac2b71408d1f2db551dde4", - "sha256:49af56491473231159c98c2c26f1a8f3799a60e5cf0e872d00745b858ddac9d2", - "sha256:4cc31c66411e14dd70e2f384a9204a859dc25b05e1f303df0f5326691061b839", - "sha256:501de1296b2041dccf2115e3c7d4947430585601b251b140970ce255c5cfb985", - "sha256:59c0c8f043dd376bbd9d4f636223836aed50431af4c5a467ed9bf61520294627", - "sha256:614bca7c6ed0d8ad1dce683a6289afae1f880675b4090878a0136c3da16bc693", - "sha256:61a8b1bbddd9332917485b2453d1de49f142e6334ce1d97b7916d5a85d179c84", - "sha256:7429936146063bd1b2cfc54f0e04016b90ee9b1c908a7bed0800049cbace70eb", - "sha256:7c73968fbb7698a4c5d6160859db560d3aac160edde89c751edd5a8bc6560c88", - "sha256:80303ee6a02ef38c4253160446cbeb5c400c07e01d4ddbd4ff722a89b736d95a", - "sha256:965611880c3fa8e504b7458484c0697e00ae6e937279cd6734fdaa2bc954dc49", - "sha256:9a900036b42f7324df7c7ad9569eb92ba0b613cf699160dd9c2154b24fd02f8e", - "sha256:9cfd1399064b13043082c660ddd97a0358e41c8b0dc7b77c1243e013d305c344", - "sha256:a8ec324711596fbf21837d3a5db543937dd84597d364769b46e0102250023f77", - "sha256:a9727a21957d3327cf6b7eb5ffc9e4b663909a25fea158e3fcbc49d4cdd7881b", - "sha256:b19f4b28dd2ef2e6d600307fee656c00825a2980c4356a7080bd758d633c3a6f", - "sha256:b2de529027579e43b6dc1f805f467b102fb7d13c1e54c334f1403ee2b37d0059", - "sha256:c0c000c1a09f069632d8a9eb3b610ac029fcc682f1d69b758e625d6ee713f4ed", - "sha256:cdafb86eb673c3211accffbffdb3cdffa3aaafacd14819e0898d23696d18e4d3", - "sha256:d2a90ce2f0f5b695e4785ac07c19a58244092f3c85d57db6d8eb1a2b26d2aad6", - "sha256:d784d57b958ffd07e9e226d17272f9af0c41572557604ca7554214def32c26bf", - "sha256:d891942592789fa0ab71b502550bbadb12f540d7413d7d7c4cef4b02af0f5bc6", - "sha256:dc7693573f16535428183de8fd27f0ca1ca37a51baa0b41dc5ed7b3d68fe80e2", - "sha256:ddb8d01aa900b741d6b7cc585a97aff787175f160ab975e21f880e89d810781a", - "sha256:e328357b6bbf79928363dbf13f4635b7aac0306afb7e5ad24d21d0c5761c3253", - "sha256:e86c8d54cd19a13e9081898b3c24351683fd39d726ecf8e774aaa9d8d96f5f3a", - "sha256:e9e4bdcd70216b08801e267c0b563316b787f957a46e215249921f99288456f9", - "sha256:f169469d04a23282de9d0be349499cb6683b6ff1b68901210faacac9b0c24b7d" + "sha256:00094838ecc7c6594171e8c8a9166124c1197b074cfca23645cee573910d76bc", + "sha256:050ce5209d5072472971e6efbfc8ec5a8f9a841de5a4db0ebd9c2e392cb81972", + "sha256:232954730c362638544758a8160c4ee1b832dc011d2c41a306ad8f7cccc5bb0b", + "sha256:25286aacb947286620a31f78f2ed1a32cded7be5d8b729ba3fb2c988457639e4", + "sha256:2f8f8f0b73b885ddd7f3d8c2b2234a7d3ba49002b0223f58cfde1bedd9563c56", + "sha256:38deed72285c7ed699864f964a3f4cf11ab3fb38e8d39cfcd96710cd2b5bb716", + "sha256:3ad69eeb92a9de9421e1f6685e85a10fbcfb75c833b42cc9bc2ba9fb00da4710", + "sha256:5555365a50efe1f486eed6ac7062c33b97ccef409f5970a0b6f205a7cfab59c8", + "sha256:555e5e2d3a53b4fabeca32835878b2818b3f23966a4efb0d566689777c5a12c8", + "sha256:57a6500d459e8035e813bd8b51b671977fb149a8c95ed814989da682314d0782", + "sha256:5833bb4355cb377ebd880457663a972cd044e7f49585aee39245c0d592904578", + "sha256:71320fbefd05454ef2d457c481ba9a5b0e540f3753354fff6f780927c25d19b0", + "sha256:7573d9eebaeceeb55285205dbbb8753ac1e962af3d9640791d12b36864065e71", + "sha256:92d5f428c1a0439b2040435a1d6bc1b26ebf0af88b093c3628913dd464d13fa1", + "sha256:97787952246a77d77934d41b62fb1b6f3581d83f71b44796a4158d93b8f5c490", + "sha256:9bb5bf55dcb69f7067d80354d0a348368da907345a2c448b0babc4215ccd3497", + "sha256:9cc80ce69032ffa528b5e16d217fa4d8d4bb7d6ba8659c1b4d74a1b0f4235fca", + "sha256:9e4253ed8f5948a3589b3caee7ad9a5bf218ffd16869c516535325fece163dcc", + "sha256:9eda14f049d7f09c2e8fb411dda17dd6b16a3c76a1de5e249188a32aeb92de19", + "sha256:a2b56de3417fd5f48773ad8e91abaa700b678dc7fe1e0c757e1ae340779acf7b", + "sha256:af3f92b1dc25621f5fad065288a44ac790c5798e986a34d393ab27d2b27fcff9", + "sha256:c5edcb90da1843df85292ef3a313513766a78fbbb83f584a5a58fb001a5a9d57", + "sha256:c824c9281cb628015bfc3c59335163d4ca0540d49de4582d6c2637312907e4b1", + "sha256:c92519d242703b675ccefd0f0562eb45e74d438e001f8ab52d628e885751fb06", + "sha256:ca932e11218bcc9ef812aa497cdf669484870ecbcf2d99b765d6c27a86000942", + "sha256:cb6ab89421bc90e0422aca911c69044c2912fc3debb19bb3c1bfe28ee3dff6ab", + "sha256:cfd84777b4b6684955ce86156cfb5e08d75e80dc2585e10d69e47f014f0a5342", + "sha256:d377dde61c5d67eb4311eace661c3efda46c62113ff56bf05e2d679e02aebb5b", + "sha256:d54ae41e6bd70ea23707843021c778f151ca258081586f0cfa31d936ae43d1b2", + "sha256:dc10ec1e9f21f33420cc05214989544727e776286c1c16697178978327b95c9c", + "sha256:ec21313dd335c51d7877baf2972569f40a4291b76a0ce51391523ae358d05899", + "sha256:ec64ee375b5aaa354b2b273c921144a660a511f9df8785e6d1c942967106438e", + "sha256:ed43d396f42028c1f47b5fec012e9e12631266e3825e95c00e3cf94d472dac49", + "sha256:edd6d51869beb7f0d472e902ef231a9b7689508e83880ea16ca3311a00bf5ce7", + "sha256:f22af3c78abfbc7cbcdf2c55d23c3e022e1a462ee2481011d518c7fb9c9f3d65", + "sha256:fae1e637f527750811588e4582988932c222f8251f7b7ea93739acb624e1487f", + "sha256:fed5aaca1750e46db870874c9c273cd5182a9e9deb16f06f7bdffdb5c2bde4b9" ], "markers": "python_version >= '3.7' and python_full_version not in '3.9.0, 3.9.1'", - "version": "==45.0.2" + "version": "==45.0.3" }, "et-xmlfile": { "hashes": [ @@ -287,14 +287,6 @@ "markers": "python_version >= '3.8'", "version": "==2.0.0" }, - "exceptiongroup": { - "hashes": [ - "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", - "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88" - ], - "markers": "python_version >= '3.7'", - "version": "==1.3.0" - }, "fastapi": { "hashes": [ "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", @@ -568,55 +560,65 @@ }, "numpy": { "hashes": [ - "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a", - "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195", - "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951", - "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1", - "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c", - "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc", - "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b", - "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd", - "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4", - "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd", - "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318", - "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448", - "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece", - "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d", - "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5", - "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8", - "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57", - "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78", - "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66", - "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a", - "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e", - "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c", - "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa", - "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d", - "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c", - "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729", - "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97", - "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c", - "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9", - "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669", - "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4", - "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73", - "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385", - "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8", - "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c", - "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b", - "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692", - "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15", - "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131", - "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a", - "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326", - "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b", - "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded", - "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04", - "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd" + "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff", + "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", + "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84", + "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d", + "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6", + "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", + "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b", + "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", + "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163", + "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", + "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", + "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", + "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", + "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", + "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566", + "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf", + "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40", + "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd", + "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06", + "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", + "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", + "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db", + "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", + "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90", + "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", + "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", + "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", + "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c", + "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d", + "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb", + "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", + "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", + "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", + "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", + "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", + "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", + "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", + "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00", + "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", + "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", + "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", + "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", + "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", + "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", + "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3", + "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d", + "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", + "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", + "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", + "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", + "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda", + "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", + "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", + "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", + "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8" ], "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==2.0.2" + "markers": "python_version >= '3.10'", + "version": "==2.2.6" }, "openpyxl": { "hashes": [ @@ -694,12 +696,12 @@ }, "pydantic": { "hashes": [ - "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", - "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb" + "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", + "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==2.11.4" + "version": "==2.11.5" }, "pydantic-core": { "hashes": [ @@ -938,11 +940,11 @@ }, "s3transfer": { "hashes": [ - "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18", - "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c" + "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be", + "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177" ], "markers": "python_version >= '3.9'", - "version": "==0.12.0" + "version": "==0.13.0" }, "six": { "hashes": [ @@ -1034,11 +1036,11 @@ }, "typing-extensions": { "hashes": [ - "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", - "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" + "sha256:6cd49c8b914bb3869a16ed9d1001e3d0ff1d84fae4838076fe3b361ab8b32b65", + "sha256:90196079d79b4658568e177f50c24c327b73a85e664c0af9f3937e2015b65956" ], - "markers": "python_version >= '3.8'", - "version": "==4.13.2" + "markers": "python_version >= '3.9'", + "version": "==4.14.0rc1" }, "typing-inspection": { "hashes": [ @@ -1058,11 +1060,11 @@ }, "urllib3": { "hashes": [ - "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", - "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" + "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", + "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.20" + "markers": "python_version >= '3.9'", + "version": "==2.4.0" }, "uvicorn": { "extras": [ @@ -1323,44 +1325,6 @@ ], "markers": "python_version >= '3.9'", "version": "==3.3.2" - }, - "tomli": { - "hashes": [ - "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", - "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", - "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", - "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", - "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", - "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", - "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", - "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", - "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", - "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", - "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", - "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", - "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", - "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", - "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", - "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", - "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", - "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", - "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", - "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", - "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", - "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", - "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", - "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", - "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", - "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", - "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", - "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", - "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", - "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", - "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", - "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7" - ], - "markers": "python_version >= '3.8'", - "version": "==2.2.1" } } } From b24d1b0eb9e7f39ec3e50258bbe06433dbf147d9 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Mon, 26 May 2025 19:52:09 +0900 Subject: [PATCH 09/30] =?UTF-8?q?=E8=AA=A4=E3=81=A3=E3=81=A6=E8=BF=BD?= =?UTF-8?q?=E5=8A=A0=E3=81=97=E3=81=9F=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=81=AE=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../jskult-batch-daily/.dockerignore | 12 - .../jskult-batch-daily/.env.example | 26 -- .../jskult-batch-daily/.gitignore | 10 - .../jskult-batch-daily/.vscode/launch.json | 16 - .../.vscode/recommended_settings.json | 31 -- .../jskult-batch-daily/Dockerfile | 20 - .../jskult-batch-daily/Pipfile | 29 -- .../jskult-batch-daily/Pipfile.lock | 397 ------------------ .../jskult-batch-daily/README.md | 292 ------------- .../jskult-batch-daily/entrypoint.py | 10 - .../jskult-batch-daily/pytest.ini | 3 - .../jskult-batch-daily/src/__init__.py | 0 .../jskult-batch-daily/src/aws/__init__.py | 0 .../jskult-batch-daily/src/aws/s3.py | 185 -------- .../src/batch/common/__init__.py | 0 .../jskult-batch-daily/src/db/__init__.py | 0 .../jskult-batch-daily/src/db/database.py | 195 --------- .../jskult-batch-daily/src/error/__init__.py | 0 .../src/error/exceptions.py | 10 - .../src/jobctrl_dcfInstMergeIo.py | 4 - .../src/logging/get_logger.py | 37 -- .../src/system_var/__init__.py | 0 .../src/system_var/constants.py | 17 - .../src/system_var/environment.py | 38 -- 24 files changed, 1332 deletions(-) delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore deleted file mode 100644 index 8b9da402..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore +++ /dev/null @@ -1,12 +0,0 @@ -tests/* -.coverage -.env -.env.example -.report/* -.vscode/* -.pytest_cache/* -*/__pychache__/* -Dockerfile -pytest.ini -README.md -*.sql diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example deleted file mode 100644 index 500f843d..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example +++ /dev/null @@ -1,26 +0,0 @@ -DB_HOST=************ -DB_PORT=************ -DB_USERNAME=************ -DB_PASSWORD=************ -DB_SCHEMA=src05 -LOG_LEVEL=INFO -ULTMARC_DATA_BUCKET=**************** -ULTMARC_DATA_FOLDER=recv -JSKULT_BACKUP_BUCKET=**************** -ULTMARC_BACKUP_FOLDER=ultmarc -VJSK_BACKUP_FOLDER=vjsk -JSKULT_CONFIG_BUCKET=********************** -JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt -VJSK_DATA_SEND_FOLDER=send -VJSK_DATA_RECEIVE_FOLDER=recv -VJSK_DATA_BUCKET=************* -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt -JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert -JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json -# 連携データ抽出期間 -SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 -# 洗替対象テーブル名 -SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau -# 卸実績洗替で作成するデータの期間(年単位) -SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore deleted file mode 100644 index bd0b37f8..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -.vscode/settings.json -.env - -# python -__pycache__ - -# python test -.pytest_cache -.coverage -.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json deleted file mode 100644 index 9dbaa9c6..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - // IntelliSense を使用して利用可能な属性を学べます。 - // 既存の属性の説明をホバーして表示します。 - // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "(DEBUG)jskult batch daily", - "type": "python", - "request": "launch", - "program": "entrypoint.py", - "console": "integratedTerminal", - "justMyCode": true - } - ] -} \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json deleted file mode 100644 index 2fde8732..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "[python]": { - "editor.defaultFormatter": null, - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.organizeImports": true - } - }, - // 自身の環境に合わせて変えてください - "python.defaultInterpreterPath": "", - "python.linting.lintOnSave": true, - "python.linting.enabled": true, - "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": true, - "python.linting.flake8Args": [ - "--max-line-length=200", - "--ignore=F541" - ], - "python.formatting.provider": "autopep8", - "python.formatting.autopep8Path": "autopep8", - "python.formatting.autopep8Args": [ - "--max-line-length", "200", - "--ignore=F541" - ], - "python.testing.pytestArgs": [ - "tests/batch/" - ], - - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true -} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile deleted file mode 100644 index fc0fde90..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.12-slim-bookworm - -ENV TZ="Asia/Tokyo" -# pythonの標準出力をバッファリングしないフラグ -ENV PYTHONUNBUFFERED=1 -# pythonのバイトコードを生成しないフラグ -ENV PYTHONDONTWRITEBYTECODE=1 - -WORKDIR /usr/src/app -COPY Pipfile Pipfile.lock ./ -RUN \ - apt update -y && \ - pip install pipenv --no-cache-dir && \ - pipenv install --system --deploy && \ - pip uninstall -y pipenv virtualenv-clone virtualenv - -COPY src ./src -COPY entrypoint.py entrypoint.py - -CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile deleted file mode 100644 index a40e6c17..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile +++ /dev/null @@ -1,29 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[scripts] -"test:ultmarc" = "pytest tests/batch/ultmarc/" -"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" -"test:vjsk" = "pytest tests/batch/vjsk/" -"test:vjsk:cov" = "pytest --cov=src/batch/vjsk/ --cov-branch --cov-report=term-missing tests/batch/vjsk/" - -[packages] -boto3 = "*" -PyMySQL = "*" -sqlalchemy = "*" -tenacity = "*" - -[dev-packages] -autopep8 = "*" -flake8 = "*" -pytest = "*" -pytest-cov = "*" -boto3 = "*" - -[requires] -python_version = "3.9" - -[pipenv] -allow_prereleases = true diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock deleted file mode 100644 index 60fdb517..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock +++ /dev/null @@ -1,397 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.9" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "boto3": { - "hashes": [ - "sha256:6633bce2b73284acce1453ca85834c7c5a59e0dbcce1170be461cc079bdcdfcf", - "sha256:668400d13889d2d2fcd66ce785cc0b0fc040681f58a9c7f67daa9149a52b6c63" - ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==1.38.13" - }, - "botocore": { - "hashes": [ - "sha256:22feee15753cd3f9f7179d041604078a1024701497d27b22be7c6707e8d13ccb", - "sha256:de29fee43a1f02787fb5b3756ec09917d5661ed95b2b2d64797ab04196f69e14" - ], - "markers": "python_version >= '3.9'", - "version": "==1.38.13" - }, - "jmespath": { - "hashes": [ - "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", - "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" - ], - "markers": "python_version >= '3.7'", - "version": "==1.0.1" - }, - "pymysql": { - "hashes": [ - "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", - "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0" - ], - "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==1.1.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", - "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.9.0.post0" - }, - "s3transfer": { - "hashes": [ - "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18", - "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c" - ], - "markers": "python_version >= '3.9'", - "version": "==0.12.0" - }, - "six": { - "hashes": [ - "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", - "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==1.17.0" - }, - "sqlalchemy": { - "hashes": [ - "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a", - "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", - "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", - "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", - "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", - "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", - "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", - "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", - "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", - "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", - "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", - "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", - "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867", - "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", - "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff", - "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", - "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2", - "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5", - "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", - "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", - "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", - "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", - "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", - "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", - "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", - "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", - "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", - "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", - "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", - "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37", - "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", - "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625", - "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", - "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47", - "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", - "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", - "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", - "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", - "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", - "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96", - "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", - "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", - "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438", - "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", - "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e", - "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", - "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08", - "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3", - "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", - "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", - "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8", - "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", - "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", - "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", - "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", - "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", - "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106" - ], - "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==2.0.40" - }, - "tenacity": { - "hashes": [ - "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", - "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138" - ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==9.1.2" - }, - "typing-extensions": { - "hashes": [ - "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", - "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" - ], - "markers": "python_version >= '3.8'", - "version": "==4.13.2" - }, - "urllib3": { - "hashes": [ - "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", - "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.20" - } - }, - "develop": { - "autopep8": { - "hashes": [ - "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda", - "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.3.1" - }, - "boto3": { - "hashes": [ - "sha256:9edf49640c79a05b0a72f4c2d1e24dfc164344b680535a645f455ac624dc3680", - "sha256:db58348849a5af061f0f5ec9c3b699da5221ca83354059fdccb798e3ddb6b62a" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==1.35.57" - }, - "botocore": { - "hashes": [ - "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34", - "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327" - ], - "markers": "python_version >= '3.8'", - "version": "==1.35.57" - }, - "coverage": { - "extras": [ - "toml" - ], - "hashes": [ - "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376", - "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", - "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111", - "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172", - "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491", - "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546", - "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", - "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", - "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", - "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c", - "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", - "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", - "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", - "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0", - "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db", - "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf", - "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", - "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", - "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", - "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e", - "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522", - "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25", - "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", - "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea", - "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", - "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", - "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07", - "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", - "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa", - "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901", - "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b", - "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", - "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0", - "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", - "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19", - "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", - "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51", - "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", - "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3", - "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", - "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076", - "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", - "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718", - "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", - "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e", - "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", - "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", - "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09", - "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", - "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", - "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f", - "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72", - "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a", - "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", - "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b", - "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", - "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", - "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", - "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b", - "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", - "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", - "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858" - ], - "markers": "python_version >= '3.9'", - "version": "==7.6.4" - }, - "exceptiongroup": { - "hashes": [ - "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", - "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" - ], - "markers": "python_version < '3.11'", - "version": "==1.2.2" - }, - "flake8": { - "hashes": [ - "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38", - "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213" - ], - "index": "pypi", - "markers": "python_full_version >= '3.8.1'", - "version": "==7.1.1" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "jmespath": { - "hashes": [ - "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", - "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" - ], - "markers": "python_version >= '3.7'", - "version": "==1.0.1" - }, - "mccabe": { - "hashes": [ - "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", - "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" - ], - "markers": "python_version >= '3.6'", - "version": "==0.7.0" - }, - "packaging": { - "hashes": [ - "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", - "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" - ], - "markers": "python_version >= '3.8'", - "version": "==24.2" - }, - "pluggy": { - "hashes": [ - "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", - "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" - ], - "markers": "python_version >= '3.8'", - "version": "==1.5.0" - }, - "pycodestyle": { - "hashes": [ - "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", - "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521" - ], - "markers": "python_version >= '3.8'", - "version": "==2.12.1" - }, - "pyflakes": { - "hashes": [ - "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", - "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" - ], - "markers": "python_version >= '3.8'", - "version": "==3.2.0" - }, - "pytest": { - "hashes": [ - "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", - "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==8.3.3" - }, - "pytest-cov": { - "hashes": [ - "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", - "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0" - ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==6.0.0" - }, - "python-dateutil": { - "hashes": [ - "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", - "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.9.0.post0" - }, - "s3transfer": { - "hashes": [ - "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d", - "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c" - ], - "markers": "python_version >= '3.8'", - "version": "==0.10.3" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "tomli": { - "hashes": [ - "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", - "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.2" - }, - "urllib3": { - "hashes": [ - "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", - "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" - ], - "markers": "python_version < '3.10'", - "version": "==1.26.20" - } - } -} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md deleted file mode 100644 index 144cf9b8..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md +++ /dev/null @@ -1,292 +0,0 @@ -# 実消化&アルトマーク 日次バッチ - -## 概要 - -実消化&アルトマークの日次バッチ処理。 - -## 環境情報 - -- Python 3.9 -- MySQL 8.23 -- VSCode - -## 環境構築 - -- Python の構築 - - - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 - - 「Pipenv の導入」までを行っておくこと - - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する - - `pipenv install --dev --python ` - - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく - -- MySQL の環境構築 - - Windows の場合、以下のリンクからダウンロードする - - - - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 - - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると - - `docker-compose up -d` - - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと - - データを投入する - - 立ち上げたデータベースに「src05」スキーマを作成する - - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する - - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` -- 環境変数の設定 - - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する - - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください -- VSCode の設定 - - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する - - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する - -## 実行 - -- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 -- 「entrypoint.py」が、バッチ処理のエントリーポイント。 -- 実際の処理は、「src/jobctrl_daily.py」で行っている。 - -## 単体テスト(アルトマーク取込処理) - -アルトマーク取込処理は、単体テストコードを使用してテスト自動化を行う - -### テスト準備 - -- VSCodeで以下の拡張機能をインストールする - - Python - - Python Test Explorer for Visual Studio Code - - Test Explorer UI -- VSCode 上でショートカット「ctrl」+「shift」+「P」でコマンドパレットを開く -- コマンドパレットの検索窓に「Python」と入力し、「Python: テストを構成する」を押下する -- 現在のワークスペースを選び、「pytest」を選択する -- 「tests」フォルダを選択する -- バックグランドで、pytest モジュールのインストールが始まれば成功 - -### テスト用のサブコマンド一覧 - -- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる - - `Pipfile`内の「scripts」セクションに宣言されている - -| コマンド | 概要 | -| ---------------- | -------------------------------------------------------------------------------------------- | -| test:ultmarc | tests/batch/ultmarc フォルダ配下のユニットテストを実行する | -| test:ultmarc:cov | tests/batch/ultmarc フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | - -### テスト共通関数の仕様 - -- tests/testing_utility.py内の共通関数の仕様について記載する - -#### create_ultmarc_test_data_from_csv - -- 引数 - - file_path: str -- 戻り値 - - src.batch.ultmarc.datfile.DatFileのインスタンス -- 処理概要 - - CSVファイルから、アルトマークのインプットデータを作成する - - データフォーマットは以下 - - 文字コード: UTF-8 - - 改行コード:LF - - ヘッダ: なし - - 値囲い: ダブルクォート - - アルトマークデータと文字コードを合わせるため、指定されたファイルを一時ディレクトリに、文字コード「cp932」で書き出してからテストデータとして読み込む - - テストデータそのものはUTF-8の文字コードで作成すること - -### create_db_data_from_csv - -- 引数 - - file_path: str -- 戻り値 - - テーブルのレコードに相当する辞書のリスト -- 処理概要 - - CSVファイルから、アルトマークテーブルに相当するテストデータを作成する - - テストの初期データ、期待値データを作成するのに利用する - - データフォーマットは以下 - - 文字コード: UTF-8 - - 改行コード:LF - - ヘッダ: なし - - 値囲い: ダブルクォート - - ファイル内の、以下の形式のデータを自動的に変換する - - `NULL` - - `None`に変換される - - `yyyy-mm-dd`もしくは、`yyyy/mm/dd`の文字 - - Date型に変換される - - `yyyy-mm-dd hh:mm:ss`もしくは、`yyyy/mm/dd hh:mm:ss`の文字 - - DateTime型に変換される - -### create_insert_sql_with_parameter - -- 引数 - - table_name: str テーブル名 - - column_names: list[str] カラム名のリスト - - test_data: list[str]: 値のリスト -- 戻り値 - - INSERT文とバインドパラメータ辞書 -- 処理概要 - - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でINSERT文、バインドパラメータを作成する - -### create_delete_sql_with_parameter - -- 引数 - - table_name: str テーブル名 - - column_names: list[str] カラム名のリスト - - test_data: list[str]: 値のリスト -- 戻り値 - - DELETE文とバインドパラメータ辞書 -- 処理概要 - - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でDELETE文、バインドパラメータを作成する - -### create_ultmarc_table_mapper_sut - -- 引数 - - line: src.batch.ultmarc.datfile.DatFileLine アルトマークデータファイルの1行 - - db: src.db.Database データベース操作クラス -- 戻り値 - - マッパークラス -- 処理概要 - - src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory.UltmarcTableMapperFactoryを通じて、テスト対象のマッパークラスを生成して返す - -### assert_table_results - -- 引数 - - actual_rows: list[dict] テスト結果の辞書リスト - - expect_rows: list[dict] 期待値の辞書リスト - - ignore_col_name: list 比較を無視するDBのカラム名. Default None. -- 戻り値 - - なし -- 処理概要 - - テスト結果データと期待値データを突き合わせ、期待値どおりとなっているかを確認する - - ignore_col_nameに指定したカラムは、呼び出し元のテストコード内で個別に突き合わせする - - -## 単体テスト(実消化データ取込処理) - -実消化データは、単体テストコードを使用してテスト自動化を行う - -### テスト準備 - -※単体テスト(アルトマーク取込処理)と同じ - -### テスト用のサブコマンド一覧 - -- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる - - `Pipfile`内の「scripts」セクションに宣言されている - -| コマンド | 概要 | -| ---------------- | -------------------------------------------------------------------------------------------- | -| test:vjsk | tests/batch/vjsk フォルダ配下のユニットテストを実行する | -| test:vjsk:cov | tests/batch/vjsk フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | - -### テスト共通関数の仕様 - -- tests/testing_vjsk_utility.py内の共通関数の仕様について記載する - -#### create_vjsk_assertion_list - -- 概要 - - DB登録期待値リストを作成する -- Args: - - file_path (str): DB登録期待値ファイル(tsvファイル)のパス - - memo: ※DB登録期待値ファイルの前提 - - memo: 受領データファイルと同じ - - memo: BOM付きtsv形式 - - memo: 一行目はカラム名になっているヘッダ行 - - Returns: - - List(dict) DB登録期待値辞書リスト - - -## フォルダ構成 - -```text -. -├── Pipfile -- Pythonモジュールの依存関係を管理するファイル -├── Dockerfile -- Dockerイメージを作成するためのファイル -├── Pipfile -- Pythonモジュールの依存関係を管理するファイル -├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル -├── README.md -- 当ファイル -├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル -├── src -- ソースコードの保管場所 -│ ├── aws -- AWS関連処理 -│ │ └── s3.py -- S3クライアントとバケット処理 -│ ├── batch -- バッチ処理関連ソース置き場 -│ │ ├── batch_functions.py -- バッチ処理共通関数置き場 -│ │ ├── datachange -- 実績洗替関連ソース置き場 -│ │ │ └── emp_chg_inst_lau.py -- 施設担当者マスタ洗替 -│ │ └── jissekiaraigae.py -- 実績洗替処理のエントリーポイント -│ │ └── ultmarc -- アルトマーク関連処理 -│ │ ├── ultmarc_process.py -- アルトマーク関連処理のエントリーポイント -│ │ ├── datfile.py -- データファイル読込 -│ │ └── utmp_tables -- アルトマークテーブルへの登録関連 -│ │ ├── table_mapper -- テーブルへのデータマッピング処理 -│ │ │ ├── concrete -- テーブルマッパーのマッピング処理を行う具象クラス(全テーブル分) -│ │ │ │ ├── com_alma_mapper.py -│ │ │ │ ├── ... -│ │ │ │ └── null_mapper.py -- テスト用、空振りするマッパークラス -│ │ │ └── ultmarc_table_mapper.py -- テーブルへの登録処理を行う抽象クラス -│ │ ├── tables -- アルトマークデータのDTOクラス(全テーブル分) -│ │ │ ├── com_alma.py -│ │ │ ├── ... -│ │ │ └── ultmarc_table.py -- アルトマークテーブルの抽象クラス -│ │ └── ultmarc_table_mapper_factory.py -- テーブルマッパー生成クラス -│ ├── db -│ │ └── database.py -- データベース操作共通処理 -│ ├── error -│ │ └── exceptions.py -- カスタム例外 -│ ├── jobctrl_daily.py -- 日次バッチ処理のエントリーポイント。「entrypoint.py」 から呼ばれる。 -│ ├── logging -│ │ └── get_logger.py -- ログ出力の共通処理 -│ ├── system_var -│ │ └── environment.py -- 環境変数 -│ └── time -│ └── elapsed_time.py -- 実行時間計測用 -└── tests -- ユニットテストのルートディレクト - ├── batch - │ └── ultmarc -- アルトマーク関連のユニットテストを格納する - │ │ └── utmp_tables - │ │ └── table_mapper -- 以下、マッパークラス単位でフォルダを切る - │ │ └── com_alma - │ │ ├── test_com_alma_mapper.py -- テストコード本体 - │ │ ├── com_alma_insert.csv -- S3に配置される想定のテストCSVデータ。ケースごとに用意する。 - │ │ ... - │ │ ├── db_com_alma_before_update.csv -- テスト時に事前にDBに登録しておくデータ。CSVで用意する。 - │ │ ... - │ │ ├── expect_com_alma_insert.csv -- テストの期待値データ。CSVで用意する。 - │ │ ... - │ └─vjsk -- 実消化データ取込処理関連のユニットテストを格納する - │ │ - │ ├─vjsk_file_check -- 受領ファイルチェック処理関連のユニットテストを格納する - │ │ ├─conftest.py -- テスト内で共通利用できるフィクスチャの宣言 - │ │ └─test_vjsk_file_check.py -- テストクラス本体 - │ │ - │ └─vjsk_load -- 受領データ登録処理関連のユニットテストを格納する - │ │ conftest.py -- テスト内で共通利用できるフィクスチャの宣言 - │ │ test_vjsk_load.py -- テストクラス本体 - │ │ - │ └─testdata -- テストモジュールが使用するテストデータを格納する - │ │ bio_slip_data_202304280000.tsv -- 正常ケースの単体確認用 - │ │ ... -- *20230428* は新規4件の登録確認用 - │ │ whs_mst_202304290000.tsv -- *20230429* は更新2件+追加新規2件の登録確認用 - │ │ - │ ├─NoData -- 正常ケースの単体確認用 - │ │ bio_slip_data_nodatarecord.tsv -- ヘッダ行のみでデータが0件の動作確認用 - │ │ ... - │ │ whs_mst_nodatarecord.tsv - │ │ - │ ├─TestFormatErrorFile -- 異常ケースの単体確認用 - │ │ bio_slip_data_formaterror.tsv -- 末尾行のタブ数が想定と異なる(ファイル欠落がある)ときの動作確認用 - │ │ ... - │ │ whs_mst_formaterror.tsv - │ │ - │ ├─TestImportFileToDb -- 正常ケースの単体確認用 - │ │ bio_slip_data_202304270000.gz -- 対向元システムから送られてきた状態(gz圧縮)の受領データファイルの動作確認用 - │ │ ... - │ │ whs_mst_202304270000.gz - │ │ - │ └─UnzipError -- 異常ケースの単体確認用 - │ bio_slip_data_202304270000.gz -- gz圧縮ファイルが解凍できないときの動作確認用 - │ ... - │ whs_mst_202304270000.gz - │ - ├── conftest.py -- テスト内で共通利用できるフィクスチャを宣言する(執筆時点ではDBのみ) - ├── testing_utility.py -- テストの共通関数 - └── testing_vjsk_utility.py -- テストの共通関数(実消化データ取込処理関連) -``` - diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py deleted file mode 100644 index 62891bf7..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py +++ /dev/null @@ -1,10 +0,0 @@ -"""実消化&アルトマーク DCF施設削除新規マスタ作成のエントリーポイント""" -from src import jobctrl_jobctrl_dcfInstMergeIo - -if __name__ == '__main__': - try: - exit(jobctrl_jobctrl_dcfInstMergeIo.exec()) - except Exception: - # エラーが起きても、正常系のコードで返す。 - # エラーが起きた事実はbatch_process内でログを出す。 - exit(0) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini deleted file mode 100644 index 5dbe2661..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -log_format = %(levelname)s %(asctime)s %(message)s -log_date_format = %Y-%m-%d %H:%M:%S diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py deleted file mode 100644 index 6203868d..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py +++ /dev/null @@ -1,185 +0,0 @@ -import gzip -import os -import os.path as path -import shutil -import tempfile - -import boto3 - -from src.system_var import environment - - -class S3Client: - __s3_client = boto3.client('s3') - _bucket_name: str - - def list_objects(self, bucket_name: str, folder_name: str): - response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) - if response['KeyCount'] == 0: - return [] - contents = response['Contents'] - # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする - objects = [{'filename': content['Key'], 'size': content['Size']} - for content in contents if not content['Key'].endswith('/')] - return objects - - def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: - copy_source = {'Bucket': src_bucket, 'Key': src_key} - self.__s3_client.copy(copy_source, dest_bucket, dest_key) - return - - def download_file(self, bucket_name: str, file_key: str, file): - self.__s3_client.download_fileobj( - Bucket=bucket_name, - Key=file_key, - Fileobj=file - ) - return - - def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): - self.__s3_client.upload_file( - local_file_path, - Bucket=bucket_name, - Key=file_key - ) - - def delete_file(self, bucket_name: str, file_key: str): - self.__s3_client.delete_object( - Bucket=bucket_name, - Key=file_key - ) - - -class S3Bucket(): - _s3_client = S3Client() - _bucket_name: str = None - - -class UltmarcBucket(S3Bucket): - _bucket_name = environment.ULTMARC_DATA_BUCKET - _folder = environment.ULTMARC_DATA_FOLDER - - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def download_dat_file(self, dat_filename: str): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, dat_filename, f) - f.seek(0) - return temporary_file_path - - def backup_dat_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - ultmarc_backup_bucket = UltmarcBackupBucket() - backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) - # コピー元のファイルを削除 - self._s3_client.delete_file(self._bucket_name, dat_file_key) - - -class ConfigBucket(S3Bucket): - _bucket_name = environment.JSKULT_CONFIG_BUCKET - - def download_holiday_list(self): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) - holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}' - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, holiday_list_key, f) - f.seek(0) - return temporary_file_path - - def download_wholesaler_stock_input_day_list(self): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) - wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) - f.seek(0) - return temporary_file_path - - def download_ultmarc_hex_convert_config(self): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) - hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}' - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f) - f.seek(0) - return temporary_file_path - - -class JskUltBackupBucket(S3Bucket): - _bucket_name = environment.JSKULT_BACKUP_BUCKET - - -class UltmarcBackupBucket(JskUltBackupBucket): - _folder = environment.ULTMARC_BACKUP_FOLDER - - -class VjskBackupBucket(JskUltBackupBucket): - _folder = environment.VJSK_BACKUP_FOLDER - - -class VjskReceiveBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER - - _s3_file_list = None - - def get_s3_file_list(self): - self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) - return self._s3_file_list - - def download_data_file(self, data_filename: str): - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, data_filename, f) - f.seek(0) - return temporary_file_path - - def unzip_data_file(self, filename: str): - temp_dir = os.path.dirname(filename) - decompress_filename = os.path.basename(filename).replace('.gz', '') - decompress_file_path = os.path.join(temp_dir, decompress_filename) - with gzip.open(filename, 'rb') as gz: - with open(decompress_file_path, 'wb') as decompressed_file: - shutil.copyfileobj(gz, decompressed_file) - - ret = [decompress_file_path] - return ret - - def backup_dat_file(self, target_files: list, datetime_key: str): - jskult_backup_bucket = VjskBackupBucket() - for target_file in target_files: - backup_from_file_path = target_file.get("filename") - backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") - backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' - self._s3_client.copy(self._bucket_name, backup_from_file_path, - jskult_backup_bucket._bucket_name, backup_key) - self._s3_client.delete_file(self._bucket_name, backup_from_file_path) - - -class VjskSendBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _send_folder = environment.VJSK_DATA_SEND_FOLDER - - def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): - # S3バケットにファイルを移動 - csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' - s3_client = S3Client() - s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) - return - - def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - vjsk_backup_bucket = VjskBackupBucket() - dat_key = f'{self._send_folder}/{dat_file_key}' - backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py deleted file mode 100644 index 5ddaba4e..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py +++ /dev/null @@ -1,195 +0,0 @@ -from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, - create_engine, text) -from sqlalchemy.engine.url import URL -from tenacity import retry, stop_after_attempt, wait_exponential - -from src.error.exceptions import DBException -from src.logging.get_logger import get_logger -from src.system_var import environment - -logger = get_logger(__name__) - - -class Database: - """データベース操作クラス""" - __connection: Connection = None - __transactional_engine: Engine = None - __autocommit_engine: Engine = None - __host: str = None - __port: str = None - __username: str = None - __password: str = None - __schema: str = None - __autocommit: bool = None - __connection_string: str = None - - def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: - """このクラスの新たなインスタンスを初期化します - - Args: - username (str): DBユーザー名 - password (str): DBパスワード - host (str): DBホスト名 - port (int): DBポート - schema (str): DBスキーマ名 - autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. - """ - self.__username = username - self.__password = password - self.__host = host - self.__port = int(port) - self.__schema = schema - self.__autocommit = autocommit - - self.__connection_string = URL.create( - drivername='mysql+pymysql', - username=self.__username, - password=self.__password, - host=self.__host, - port=self.__port, - database=self.__schema, - query={"charset": "utf8mb4", "local_infile": "1"}, - ) - - self.__transactional_engine = create_engine( - self.__connection_string, - pool_timeout=5, - poolclass=QueuePool - ) - - self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') - - @classmethod - def get_instance(cls, autocommit=False): - """インスタンスを取得します - - Args: - autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. - Returns: - Database: DB操作クラスインスタンス - """ - return cls( - username=environment.DB_USERNAME, - password=environment.DB_PASSWORD, - host=environment.DB_HOST, - port=environment.DB_PORT, - schema=environment.DB_SCHEMA, - autocommit=autocommit - ) - - @retry( - wait=wait_exponential( - multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, - min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, - max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS - ), - stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT), - retry_error_cls=DBException - ) - def connect(self): - """ - DBに接続します。接続に失敗した場合、リトライします。\n - インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) - Raises: - DBException: 接続失敗 - """ - try: - self.__connection = ( - self.__autocommit_engine.connect() if self.__autocommit is True - else self.__transactional_engine.connect()) - except Exception as e: - raise DBException(e) - - def execute_select(self, select_query: str, parameters=None) -> list[dict]: - """SELECTクエリを実行します。 - - Args: - select_query (str): SELECT文 - parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. - - Raises: - DBException: DBエラー - - Returns: - list[dict]: カラム名: 値の辞書リスト - """ - if self.__connection is None: - raise DBException('DBに接続していません') - - result = None - try: - # トランザクションが開始している場合は、トランザクションを引き継ぐ - if self.__connection.in_transaction(): - result = self.__connection.execute(text(select_query), parameters) - else: - # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 - result = self.__execute_with_transaction(select_query, parameters) - except Exception as e: - raise DBException(f'SQL Error: {e}') - - result_rows = result.mappings().all() - return result_rows - - def execute(self, query: str, parameters=None) -> CursorResult: - """SQLクエリを実行します。 - - Args: - query (str): SQL文 - parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. - - Raises: - DBException: DBエラー - - Returns: - CursorResult: 取得結果 - """ - if self.__connection is None: - raise DBException('DBに接続していません') - - result = None - try: - # トランザクションが開始している場合は、トランザクションを引き継ぐ - if self.__connection.in_transaction(): - result = self.__connection.execute(text(query), parameters) - else: - # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 - result = self.__execute_with_transaction(query, parameters) - except Exception as e: - raise DBException(f'SQL Error: {e}') - - return result - - def begin(self): - """トランザクションを開始します。""" - if not self.__connection.in_transaction(): - self.__connection.begin() - - def commit(self): - """トランザクションをコミットします""" - if self.__connection.in_transaction(): - self.__connection.commit() - - def rollback(self): - """トランザクションをロールバックします""" - if self.__connection.in_transaction(): - self.__connection.rollback() - - def disconnect(self): - """DB接続を切断します。""" - if self.__connection is not None: - self.__connection.close() - self.__connection = None - - def to_jst(self): - self.execute('SET time_zone = "+9:00"') - - def __execute_with_transaction(self, query: str, parameters: dict): - # トランザクションを開始してクエリを実行する - with self.__connection.begin(): - try: - result = self.__connection.execute(text(query), parameters=parameters) - except Exception as e: - self.__connection.rollback() - raise e - # ここでコミットされる - return result diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py deleted file mode 100644 index 055c24f6..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py +++ /dev/null @@ -1,10 +0,0 @@ -class MeDaCaException(Exception): - pass - - -class DBException(MeDaCaException): - pass - - -class BatchOperationException(MeDaCaException): - pass diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py deleted file mode 100644 index 9c29840c..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py +++ /dev/null @@ -1,4 +0,0 @@ -"""実消化&アルトマーク DCF施設削除新規マスタ作成""" - -def exec(): - pass \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py deleted file mode 100644 index f36f1199..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging - -from src.system_var.environment import LOG_LEVEL - -# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する -for name in ["boto3", "botocore", "s3transfer", "urllib3"]: - logging.getLogger(name).setLevel(logging.WARNING) - - -def get_logger(log_name: str) -> logging.Logger: - """一意のログ出力モジュールを取得します。 - - Args: - log_name (str): ロガー名 - - Returns: - _type_: _description_ - """ - logger = logging.getLogger(log_name) - level = logging.getLevelName(LOG_LEVEL) - if not isinstance(level, int): - level = logging.INFO - logger.setLevel(level) - - if not logger.hasHandlers(): - handler = logging.StreamHandler() - logger.addHandler(handler) - - formatter = logging.Formatter( - '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', - '%Y-%m-%d %H:%M:%S' - ) - - for handler in logger.handlers: - handler.setFormatter(formatter) - - return logger diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py deleted file mode 100644 index 8a0ccbb3..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py +++ /dev/null @@ -1,17 +0,0 @@ -# バッチ正常終了コード -BATCH_EXIT_CODE_SUCCESS = 0 - -# バッチ処理中フラグ:未処理 -BATCH_ACTF_BATCH_UNPROCESSED = '0' -# バッチ処理中フラグ:処理中 -BATCH_ACTF_BATCH_IN_PROCESSING = '1' -# dump取得状態区分:未処理 -DUMP_STATUS_KBN_UNPROCESSED = '0' -# dump取得状態区分:dump取得正常終了 -DUMP_STATUS_KBN_COMPLETE = '2' - -# カレンダーファイルのコメントシンボル -CALENDAR_COMMENT_SYMBOL = '#' - -# 月曜日(datetime.weekday()で月曜日を表す数字) -WEEKDAY_MONDAY = 0 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py deleted file mode 100644 index 0af7a118..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py +++ /dev/null @@ -1,38 +0,0 @@ -import os - -# Database -DB_HOST = os.environ['DB_HOST'] -DB_PORT = int(os.environ['DB_PORT']) -DB_USERNAME = os.environ['DB_USERNAME'] -DB_PASSWORD = os.environ['DB_PASSWORD'] -DB_SCHEMA = os.environ['DB_SCHEMA'] - -# AWS -ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] -ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] -JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] -ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] -VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] -JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] -JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] -VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] -VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] -JSKULT_CONFIG_CONVERT_FOLDER = os.environ['JSKULT_CONFIG_CONVERT_FOLDER'] -JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME = os.environ['JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME'] -VJSK_DATA_RECEIVE_FOLDER = os.environ['VJSK_DATA_RECEIVE_FOLDER'] - -# 初期値がある環境変数 -LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') -DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) -DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) - -# 連携データ抽出期間 -SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) -# 洗替対象テーブル名 -SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] -# 卸実績洗替で作成するデータの期間(年単位) -SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] From cd1e663b4a133daf948114efbd385ba4ad37d656 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Mon, 26 May 2025 19:54:23 +0900 Subject: [PATCH 10/30] =?UTF-8?q?DCF=E6=96=BD=E8=A8=AD=E5=89=8A=E9=99=A4?= =?UTF-8?q?=E6=96=B0=E8=A6=8F=E3=83=9E=E3=82=B9=E3=82=BF=E3=81=AE=E4=BD=9C?= =?UTF-8?q?=E6=88=90=E3=81=A8DCF=E6=96=BD=E8=A8=AD=E7=B5=B1=E5=90=88?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF(DCF=5FINST=5FMERGE)=E3=81=AE?= =?UTF-8?q?=E5=8F=96=E3=82=8A=E8=BE=BC=E3=81=BF=E3=80=81=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=81=AE=E5=87=BA=E5=8A=9B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge_io.py | 191 +++++++++++++++++- 1 file changed, 188 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index 710380c9..fcb104bc 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,10 +1,195 @@ -from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint +import os +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint +from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager +from src.manager.jskult_batch_status_manager import JskultBatchStatusManager +from src.logging.get_logger import get_logger + +logger = get_logger('DCF削除新規マスタ作成') + +LOG_LEVEL = os.environ["LOG_LEVEL"] +PROCESS_NAME = os.environ["PROCESS_NAME"] +POST_PROCESS = os.environ["POST_PROCESS"] +MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] +RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] +JSK_DATA_SEND_FOLDER = os.environ["JSK_DATA_SEND_FOLDER"] +JSK_BACKUP_FOLDER = os.environ["JSK_BACKUP_FOLDER"] +TRANSFER_RESULT_FOLDER = os.environ["TRANSFER_RESULT_FOLDER"] +DCF_INST_MERGE_SEND_FILE_NAME = os.environ["DCF_INST_MERGE_SEND_FILE_NAME"] +DB_CONNECTION_MAX_RETRY_ATTEMPT = os.environ["DB_CONNECTION_MAX_RETRY_ATTEMPT"] +DB_CONNECTION_RETRY_INTERVAL_INIT = os.environ["DB_CONNECTION_RETRY_INTERVAL_INIT"] +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS"] +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS"] class DcfInstMergeIO(JskultBatchEntrypoint): def __init__(self): super().__init__() + def execute(self): - # TODO: ここでDCF削除新規マスタ作成/データ出力処理を実行する - pass + jskultHdkeTblManager = JskultHdkeTblManager() + + if not jskultHdkeTblManager.can_run_process(): + return + + jskultBatchStatusManager = JskultBatchStatusManager( + PROCESS_NAME, + POST_PROCESS, + MAX_RUN_COUNT_FLG, + RECEIVE_FILE_COUNT + ) + + if not jskultBatchStatusManager.can_run_post_process(): + + # 処理ステータスを「処理待」に設定 + jskultBatchStatusManager.set_process_status("retry") + return + + # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 + if jskultBatchStatusManager.is_done_ultmarc_import(): + try: + self._db = Database.get_instance() + self._db.connect() + self._db.begin() + self._db.to_jst() + (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + if is_add_dcf_inst_merge: + _output_add_dcf_inst_merge_log(duplication_inst_records) + + except Exception as e: + self._db.rollback() + raise BatchOperationException(e) + finally: + self._db.disconnect() + + # TODO DCF施設削除新規マスタをS3に出力 + + + + def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: + sql ="""\ + SELECT + ci.DCF_DSF_INST_CD, + ci.FORM_INST_NAME_KANJI, + ci.DELETE_SCHE_REASON_CD, + ci.DUP_OPP_CD, + ci.SYS_UPDATE_DATE + FROM + COM_INST AS ci + WHERE + ci.DUP_OPP_CD IS NOT NULL + AND + ci.DELETE_SCHE_REASON_CD = 'D' + AND + ci.DELETE_DATA IS NULL + AND + ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() + AND + NOT EXISTS ( + SELECT + dim.DCF_INST_CD + FROM + DCF_INST_MERGE AS dim + WHERE + dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + ) + AND + (ci.DCF_DSF_INST_CD EXISTS( + SELECT + mia.INST_CD + FROM + MST_INST_ASSN as mia + WHERE + mia.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + ap.PRSB_INST_CD + FROM + ATC_PHARM AS ap + WHERE + ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + vtrd.INST_CD + FROM + VW_TRN_RESULT_DATA AS vtrd + WHERE + vtrd.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + ; + + """ + duplication_inst_records = self._db.execute_select(sql) + + # DCF施設統合マスタ取り込み + values_clauses = [] + params = {} + for clauses_no, row in enumerate(duplication_inst_records, start=1): + dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" + dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" + values_clause = f"""(:{dcf_inst_cd_arr}, + :{dup_opp_cd_arr}, + DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), + NULL, + NULL, + NULL, + "Y", + batchuser, + SYSDATE(), + batchuser, + SYSDATE() + )""" + values_clauses.append(values_clause) + params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] + params[dup_opp_cd_arr] = row['DUP_OPP_CD'] + insert_sql = f""" + INSERT INTO + src07.dcf_inst_merge ( + DCF_INST_CD, + DUP_OPP_CD, + START_MONTH, + INVALID_FLG, + REMARKS, + DCF_INST_CD_NEW, + ENABLED_FLG, + CREATER, + CREATE_DATE, + UPDATER, + UPDATE_DATE + ) + VALUES + {','.join(values_clauses)} + """ + return (True, duplication_inst_records) + + + def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): + sys_update_date = duplication_inst_records[0]['sys_update_date'] + set_year_month = '{set_year}年{set_month}月'.format( + set_year=sys_update_date[0:4], + set_month=sys_update_date[-2:] + ) + + add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' + add_dct_inst_merge_list = [] + for row in duplication_inst_records: + add_dct_inst_merge_list.append(add_dct_inst_merge.format(**row)) + add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) + + # 顧客報告用にログ出力 + logger.info( + f"""DCF削除新規マスタが追加されました。 +********************************************************** +適用月度 {set_year_month} +********************************************************** +{add_dct_inst_merge_list} +********************************************************** +合計 {len(duplication_inst_records)}件""" + ) + return \ No newline at end of file From 45268db27507ecbc0ada7e42e4845815566b55f2 Mon Sep 17 00:00:00 2001 From: yono Date: Tue, 27 May 2025 10:02:59 +0900 Subject: [PATCH 11/30] =?UTF-8?q?feat:=20integrity=E3=83=8F=E3=83=83?= =?UTF-8?q?=E3=82=B7=E3=83=A5=E5=80=A4=E3=81=AE=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/templates/_header.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/templates/_header.html b/ecs/jskult-webapp/src/templates/_header.html index 62f6e2ab..f5a71ad9 100644 --- a/ecs/jskult-webapp/src/templates/_header.html +++ b/ecs/jskult-webapp/src/templates/_header.html @@ -15,5 +15,5 @@ - + \ No newline at end of file From c246572a7f6e7540e5a35870df76f268268dc773 Mon Sep 17 00:00:00 2001 From: yono Date: Tue, 27 May 2025 11:34:45 +0900 Subject: [PATCH 12/30] =?UTF-8?q?fix:integrity=E3=81=AE=E8=A8=AD=E5=AE=9A?= =?UTF-8?q?=E5=80=A4=E3=83=9F=E3=82=B9=E3=81=AE=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/templates/_header.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/templates/_header.html b/ecs/jskult-webapp/src/templates/_header.html index f5a71ad9..c4ff212f 100644 --- a/ecs/jskult-webapp/src/templates/_header.html +++ b/ecs/jskult-webapp/src/templates/_header.html @@ -15,5 +15,5 @@ - + \ No newline at end of file From e50b827d9f0d36d3770edbde3665044810aab50e Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Tue, 27 May 2025 16:38:47 +0900 Subject: [PATCH 13/30] =?UTF-8?q?=E5=8B=95=E4=BD=9C=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=89=8D=E3=82=B3=E3=83=9F=E3=83=83=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/src/aws/s3.py | 95 +----- .../src/batch/common/batch_context.py | 48 +++ .../src/batch/dcf_inst_merge_io.py | 285 ++++++++++-------- 3 files changed, 226 insertions(+), 202 deletions(-) create mode 100644 ecs/jskult-batch/src/batch/common/batch_context.py diff --git a/ecs/jskult-batch/src/aws/s3.py b/ecs/jskult-batch/src/aws/s3.py index 6203868d..f70cc049 100644 --- a/ecs/jskult-batch/src/aws/s3.py +++ b/ecs/jskult-batch/src/aws/s3.py @@ -54,33 +54,8 @@ class S3Bucket(): _s3_client = S3Client() _bucket_name: str = None - -class UltmarcBucket(S3Bucket): - _bucket_name = environment.ULTMARC_DATA_BUCKET - _folder = environment.ULTMARC_DATA_FOLDER - - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def download_dat_file(self, dat_filename: str): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, dat_filename, f) - f.seek(0) - return temporary_file_path - - def backup_dat_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - ultmarc_backup_bucket = UltmarcBackupBucket() - backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) - # コピー元のファイルを削除 - self._s3_client.delete_file(self._bucket_name, dat_file_key) - - class ConfigBucket(S3Bucket): + # TODO 日付更新処理で内容の修正を行う _bucket_name = environment.JSKULT_CONFIG_BUCKET def download_holiday_list(self): @@ -118,68 +93,24 @@ class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET -class UltmarcBackupBucket(JskUltBackupBucket): - _folder = environment.ULTMARC_BACKUP_FOLDER - -class VjskBackupBucket(JskUltBackupBucket): +# TODO 設定値をecsタスク定義書から確認 +class JskBackupBucket(JskUltBackupBucket): _folder = environment.VJSK_BACKUP_FOLDER +class JskSendBucket(S3Bucket): + _bucket_name = environment.JSKULT_DATA_BUCKET + _send_folder = environment.JSKULT_DATA_SEND_FOLDER -class VjskReceiveBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER - - _s3_file_list = None - - def get_s3_file_list(self): - self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) - return self._s3_file_list - - def download_data_file(self, data_filename: str): - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, data_filename, f) - f.seek(0) - return temporary_file_path - - def unzip_data_file(self, filename: str): - temp_dir = os.path.dirname(filename) - decompress_filename = os.path.basename(filename).replace('.gz', '') - decompress_file_path = os.path.join(temp_dir, decompress_filename) - with gzip.open(filename, 'rb') as gz: - with open(decompress_file_path, 'wb') as decompressed_file: - shutil.copyfileobj(gz, decompressed_file) - - ret = [decompress_file_path] - return ret - - def backup_dat_file(self, target_files: list, datetime_key: str): - jskult_backup_bucket = VjskBackupBucket() - for target_file in target_files: - backup_from_file_path = target_file.get("filename") - backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") - backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' - self._s3_client.copy(self._bucket_name, backup_from_file_path, - jskult_backup_bucket._bucket_name, backup_key) - self._s3_client.delete_file(self._bucket_name, backup_from_file_path) - - -class VjskSendBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _send_folder = environment.VJSK_DATA_SEND_FOLDER - - def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + def upload_dcf_inst_merge_csv_file(self, jskult_create_csv: str, csv_file_path: str): # S3バケットにファイルを移動 - csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' - s3_client = S3Client() - s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + csv_file_name = f'{self._send_folder}/{jskult_create_csv}' + self._s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) return - def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): + def backup_dcf_inst_merge_csv_file(self, dat_file_key: str, datetime_key: str): # バックアップバケットにコピー - vjsk_backup_bucket = VjskBackupBucket() + jskult_backup_bucket = JskUltBackupBucket() dat_key = f'{self._send_folder}/{dat_file_key}' - backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) + backup_key = f'{jskult_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, jskult_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch/src/batch/common/batch_context.py b/ecs/jskult-batch/src/batch/common/batch_context.py new file mode 100644 index 00000000..b3fc4967 --- /dev/null +++ b/ecs/jskult-batch/src/batch/common/batch_context.py @@ -0,0 +1,48 @@ +class BatchContext: + __instance = None + __syor_date: str # 処理日(yyyy/mm/dd形式) + __is_not_business_day: bool # 日次バッチ起動日フラグ + __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ + __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ + + def __init__(self) -> None: + self.__is_not_business_day = False + self.__is_ultmarc_imported = False + + @classmethod + def get_instance(cls): + if cls.__instance is None: + cls.__instance = cls() + return cls.__instance + + @property + def syor_date(self): + return self.__syor_date + + @syor_date.setter + def syor_date(self, syor_date_str: str): + self.__syor_date = syor_date_str + + @property + def is_not_business_day(self): + return self.__is_not_business_day + + @is_not_business_day.setter + def is_not_business_day(self, flag: bool): + self.__is_not_business_day = flag + + @property + def is_ultmarc_imported(self): + return self.__is_ultmarc_imported + + @is_ultmarc_imported.setter + def is_ultmarc_imported(self, flag: bool): + self.__is_ultmarc_imported = flag + + @property + def is_vjsk_stock_import_day(self): + return self.__is_vjsk_stock_import_day + + @is_vjsk_stock_import_day.setter + def is_vjsk_stock_import_day(self, flag: bool): + self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index fcb104bc..5a0199f0 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,6 +1,12 @@ import os +import csv +import os.path as path +import tempfile +from src.aws.s3 import JskSendBucket + from src.db.database import Database from src.error.exceptions import BatchOperationException +from src.batch.common.batch_context import BatchContext from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager @@ -9,19 +15,11 @@ from src.logging.get_logger import get_logger logger = get_logger('DCF削除新規マスタ作成') -LOG_LEVEL = os.environ["LOG_LEVEL"] PROCESS_NAME = os.environ["PROCESS_NAME"] POST_PROCESS = os.environ["POST_PROCESS"] MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] -JSK_DATA_SEND_FOLDER = os.environ["JSK_DATA_SEND_FOLDER"] -JSK_BACKUP_FOLDER = os.environ["JSK_BACKUP_FOLDER"] -TRANSFER_RESULT_FOLDER = os.environ["TRANSFER_RESULT_FOLDER"] -DCF_INST_MERGE_SEND_FILE_NAME = os.environ["DCF_INST_MERGE_SEND_FILE_NAME"] -DB_CONNECTION_MAX_RETRY_ATTEMPT = os.environ["DB_CONNECTION_MAX_RETRY_ATTEMPT"] -DB_CONNECTION_RETRY_INTERVAL_INIT = os.environ["DB_CONNECTION_RETRY_INTERVAL_INIT"] -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS"] -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS"] +CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] class DcfInstMergeIO(JskultBatchEntrypoint): def __init__(self): @@ -49,125 +47,135 @@ class DcfInstMergeIO(JskultBatchEntrypoint): # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 if jskultBatchStatusManager.is_done_ultmarc_import(): + + (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + if is_add_dcf_inst_merge: + + # COM_施設からDCF削除新規マスタに登録 + _output_add_dcf_inst_merge_log(duplication_inst_records) + + # CSV出力 + file_path = _make_csv_data(CSV_FILE_NAME) + + # CSVをS3にアップロード + _upload_dcf_inst_merge_csv_file(CSV_FILE_NAME, file_path) + + def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: + # com_instからdcf_inst_mergeにinsert try: self._db = Database.get_instance() self._db.connect() self._db.begin() self._db.to_jst() - (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) - if is_add_dcf_inst_merge: - _output_add_dcf_inst_merge_log(duplication_inst_records) + sql ="""\ + SELECT + ci.DCF_DSF_INST_CD, + ci.FORM_INST_NAME_KANJI, + ci.DELETE_SCHE_REASON_CD, + ci.DUP_OPP_CD, + ci.SYS_UPDATE_DATE + FROM + src05.COM_INST AS ci + WHERE + ci.DUP_OPP_CD IS NOT NULL + AND + ci.DELETE_SCHE_REASON_CD = 'D' + AND + ci.DELETE_DATA IS NULL + AND + ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() + AND + NOT EXISTS ( + SELECT + dim.DCF_INST_CD + FROM + src07.DCF_INST_MERGE AS dim + WHERE + dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + ) + AND + + (ci.DCF_DSF_INST_CD EXISTS( + SELECT + mia.INST_CD + FROM + src07.MST_INST_ASSN as mia + WHERE + mia.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + ap.PRSB_INST_CD + FROM + src07.ATC_PHARM AS ap + WHERE + ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + trd.INST_CD + FROM + src07.TRN_RESULT_DATA AS trd + WHERE + trd.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + ; + + """ + duplication_inst_records = self._db.execute_select(sql) + + # DCF削除新規マスタ取り込み + values_clauses = [] + params = {} + for clauses_no, row in enumerate(duplication_inst_records, start=1): + dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" + dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" + values_clause = f"""(:{dcf_inst_cd_arr}, + :{dup_opp_cd_arr}, + DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), + NULL, + NULL, + NULL, + "Y", + batchuser, + SYSDATE(), + batchuser, + SYSDATE() + )""" + values_clauses.append(values_clause) + params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] + params[dup_opp_cd_arr] = row['DUP_OPP_CD'] + insert_sql = f""" + INSERT INTO + src07.dcf_inst_merge ( + DCF_INST_CD, + DUP_OPP_CD, + START_MONTH, + INVALID_FLG, + REMARKS, + DCF_INST_CD_NEW, + ENABLED_FLG, + CREATER, + CREATE_DATE, + UPDATER, + UPDATE_DATE + ) + VALUES + {','.join(values_clauses)} + """ + + self._db.execute(insert_sql, params) + + return (True, duplication_inst_records) except Exception as e: self._db.rollback() raise BatchOperationException(e) finally: self._db.disconnect() - # TODO DCF施設削除新規マスタをS3に出力 - - - - def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: - sql ="""\ - SELECT - ci.DCF_DSF_INST_CD, - ci.FORM_INST_NAME_KANJI, - ci.DELETE_SCHE_REASON_CD, - ci.DUP_OPP_CD, - ci.SYS_UPDATE_DATE - FROM - COM_INST AS ci - WHERE - ci.DUP_OPP_CD IS NOT NULL - AND - ci.DELETE_SCHE_REASON_CD = 'D' - AND - ci.DELETE_DATA IS NULL - AND - ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() - AND - NOT EXISTS ( - SELECT - dim.DCF_INST_CD - FROM - DCF_INST_MERGE AS dim - WHERE - dim.DCF_INST_CD = ci.DCF_DSF_INST_CD - ) - AND - (ci.DCF_DSF_INST_CD EXISTS( - SELECT - mia.INST_CD - FROM - MST_INST_ASSN as mia - WHERE - mia.INST_CD = ci.DCF_DSF_INST_CD - ) - ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - ap.PRSB_INST_CD - FROM - ATC_PHARM AS ap - WHERE - ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD - ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - vtrd.INST_CD - FROM - VW_TRN_RESULT_DATA AS vtrd - WHERE - vtrd.INST_CD = ci.DCF_DSF_INST_CD - ) - ) - ; - - """ - duplication_inst_records = self._db.execute_select(sql) - - # DCF施設統合マスタ取り込み - values_clauses = [] - params = {} - for clauses_no, row in enumerate(duplication_inst_records, start=1): - dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" - dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" - values_clause = f"""(:{dcf_inst_cd_arr}, - :{dup_opp_cd_arr}, - DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), - NULL, - NULL, - NULL, - "Y", - batchuser, - SYSDATE(), - batchuser, - SYSDATE() - )""" - values_clauses.append(values_clause) - params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] - params[dup_opp_cd_arr] = row['DUP_OPP_CD'] - insert_sql = f""" - INSERT INTO - src07.dcf_inst_merge ( - DCF_INST_CD, - DUP_OPP_CD, - START_MONTH, - INVALID_FLG, - REMARKS, - DCF_INST_CD_NEW, - ENABLED_FLG, - CREATER, - CREATE_DATE, - UPDATER, - UPDATE_DATE - ) - VALUES - {','.join(values_clauses)} - """ - return (True, duplication_inst_records) - def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): sys_update_date = duplication_inst_records[0]['sys_update_date'] @@ -181,15 +189,52 @@ class DcfInstMergeIO(JskultBatchEntrypoint): for row in duplication_inst_records: add_dct_inst_merge_list.append(add_dct_inst_merge.format(**row)) add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) - # 顧客報告用にログ出力 logger.info( - f"""DCF削除新規マスタが追加されました。 + f"""DCF施設統合マスタが追加されました。 ********************************************************** 適用月度 {set_year_month} ********************************************************** {add_dct_inst_merge_list} ********************************************************** 合計 {len(duplication_inst_records)}件""" - ) - return \ No newline at end of file + ) + return + + + def _make_csv_data(record_inst: list, csv_file_name: str): + # CSVファイルを作成する + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, csv_file_name) + + head_str = ['DCF_INST_CD','DUP_OPP_CD','START_MONTH', + 'INVALID_FLG','REMARKS','DCF_INST_CD_NEW','ENABLED_FLG', + 'CREATER','CREATE_DATE','UPDATER','UPDATE_DATE'] + + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") + + # Shift-JIS、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = ['' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) + + return csv_file_path + + def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): + # S3バケットにファイルを移動 + jsk_send_bucket = JskSendBucket() + # バッチ共通設定を取得 + batch_context = BatchContext.get_instance() + + jsk_send_bucket.upload_dcf_inst_merge_csv_file(csv_file_name, csv_file_path) + jsk_send_bucket.backup_dcf_inst_merge_csv_file(csv_file_name, batch_context.syor_date) + return \ No newline at end of file From 0086486841576d3f50a301814fc2aff4e09d7614 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Tue, 27 May 2025 20:14:03 +0900 Subject: [PATCH 14/30] =?UTF-8?q?=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C=20en?= =?UTF-8?q?trypoint=E5=AE=9F=E8=A1=8C=E6=B8=88=20DB=E6=8E=A5=E7=B6=9A?= =?UTF-8?q?=E3=81=AE=E9=83=A8=E5=88=86=E3=81=AF=E6=9C=AA=E7=A2=BA=E8=AA=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/.vscode/launch.json | 3 +- ecs/jskult-batch/src/aws/s3.py | 37 ++-- .../src/batch/dcf_inst_merge_io.py | 176 +++++++++++------- .../src/batch/jskult_batch_entrypoint.py | 2 +- ecs/jskult-batch/src/error/exceptions.py | 4 + .../src/system_var/environment.py | 23 ++- ecs/jskult-batch/test.py | 0 7 files changed, 159 insertions(+), 86 deletions(-) create mode 100644 ecs/jskult-batch/test.py diff --git a/ecs/jskult-batch/.vscode/launch.json b/ecs/jskult-batch/.vscode/launch.json index bcd1c6dd..8bbb94a0 100644 --- a/ecs/jskult-batch/.vscode/launch.json +++ b/ecs/jskult-batch/.vscode/launch.json @@ -10,7 +10,8 @@ "request": "launch", "program": "entrypoint.py", "console": "integratedTerminal", - "justMyCode": true + "justMyCode": true, + "envFile": "${workspaceFolder}/.env" } ] } \ No newline at end of file diff --git a/ecs/jskult-batch/src/aws/s3.py b/ecs/jskult-batch/src/aws/s3.py index f70cc049..66032e1c 100644 --- a/ecs/jskult-batch/src/aws/s3.py +++ b/ecs/jskult-batch/src/aws/s3.py @@ -14,7 +14,8 @@ class S3Client: _bucket_name: str def list_objects(self, bucket_name: str, folder_name: str): - response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + response = self.__s3_client.list_objects_v2( + Bucket=bucket_name, Prefix=folder_name) if response['KeyCount'] == 0: return [] contents = response['Contents'] @@ -54,6 +55,7 @@ class S3Bucket(): _s3_client = S3Client() _bucket_name: str = None + class ConfigBucket(S3Bucket): # TODO 日付更新処理で内容の修正を行う _bucket_name = environment.JSKULT_CONFIG_BUCKET @@ -61,30 +63,36 @@ class ConfigBucket(S3Bucket): def download_holiday_list(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) + temporary_file_path = path.join( + temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + self._s3_client.download_file( + self._bucket_name, holiday_list_key, f) f.seek(0) return temporary_file_path def download_wholesaler_stock_input_day_list(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + temporary_file_path = path.join( + temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) + self._s3_client.download_file( + self._bucket_name, wholesaler_stock_input_day_list_key, f) f.seek(0) return temporary_file_path def download_ultmarc_hex_convert_config(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) + temporary_file_path = path.join( + temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f) + self._s3_client.download_file( + self._bucket_name, hex_convert_config_key, f) f.seek(0) return temporary_file_path @@ -93,19 +101,19 @@ class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET - -# TODO 設定値をecsタスク定義書から確認 class JskBackupBucket(JskUltBackupBucket): - _folder = environment.VJSK_BACKUP_FOLDER + _folder = environment.JSKULT_BACKUP_BUCKET + class JskSendBucket(S3Bucket): - _bucket_name = environment.JSKULT_DATA_BUCKET - _send_folder = environment.JSKULT_DATA_SEND_FOLDER + _bucket_name = environment.JSK_IO_BUCKET + _send_folder = environment.JSK_DATA_SEND_FOLDER def upload_dcf_inst_merge_csv_file(self, jskult_create_csv: str, csv_file_path: str): # S3バケットにファイルを移動 csv_file_name = f'{self._send_folder}/{jskult_create_csv}' - self._s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + self._s3_client.upload_file( + csv_file_path, self._bucket_name, csv_file_name) return def backup_dcf_inst_merge_csv_file(self, dat_file_key: str, datetime_key: str): @@ -113,4 +121,5 @@ class JskSendBucket(S3Bucket): jskult_backup_bucket = JskUltBackupBucket() dat_key = f'{self._send_folder}/{dat_file_key}' backup_key = f'{jskult_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.copy(self._bucket_name, dat_key, + jskult_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index 5a0199f0..e3e7ed12 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,64 +1,110 @@ -import os import csv import os.path as path import tempfile + from src.aws.s3 import JskSendBucket - -from src.db.database import Database -from src.error.exceptions import BatchOperationException from src.batch.common.batch_context import BatchContext - from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint +from src.db.database import Database +from src.error.exceptions import BatchOperationException, MaxRunCountReachedException +from src.manager.jskult_batch_run_manager import JskultBatchRunManager from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager from src.manager.jskult_batch_status_manager import JskultBatchStatusManager +from src.system_var import environment from src.logging.get_logger import get_logger logger = get_logger('DCF削除新規マスタ作成') -PROCESS_NAME = os.environ["PROCESS_NAME"] -POST_PROCESS = os.environ["POST_PROCESS"] -MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] -RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] -CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] class DcfInstMergeIO(JskultBatchEntrypoint): def __init__(self): super().__init__() - def execute(self): - jskultHdkeTblManager = JskultHdkeTblManager() - - if not jskultHdkeTblManager.can_run_process(): - return - + jskultBatchRunManager = JskultBatchRunManager( + environment.BATCH_EXECUTION_ID) jskultBatchStatusManager = JskultBatchStatusManager( - PROCESS_NAME, - POST_PROCESS, - MAX_RUN_COUNT_FLG, - RECEIVE_FILE_COUNT + environment.PROCESS_NAME, + environment.POST_PROCESS, + environment.MAX_RUN_COUNT_FLG, + environment.RECEIVE_FILE_COUNT ) - if not jskultBatchStatusManager.can_run_post_process(): + try: + jskultHdkeTblManager = JskultHdkeTblManager() - # 処理ステータスを「処理待」に設定 - jskultBatchStatusManager.set_process_status("retry") - return - - # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 - if jskultBatchStatusManager.is_done_ultmarc_import(): - - (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + if not jskultHdkeTblManager.can_run_process(): + logger.error( + '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') + return + + jskultBatchStatusManager.set_process_status("start") + try: + if not jskultBatchStatusManager.can_run_post_process(): + # リトライ判断された場合 + # 処理ステータスを「処理待」に設定 + jskultBatchStatusManager.set_process_status("waiting") + + # バッチ実行管理テーブルに「retry」で登録 + jskultBatchRunManager.batch_retry() + + return + except MaxRunCountReachedException as e: + logger.info('最大起動回数に到達したため、DCF削除新規マスタ作成処理を実行します。') + + jskultBatchStatusManager.set_process_status("doing") + + # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 + if jskultBatchStatusManager.is_done_ultmarc_import(): + + # + (is_add_dcf_inst_merge, + duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) if is_add_dcf_inst_merge: - + # COM_施設からDCF削除新規マスタに登録 _output_add_dcf_inst_merge_log(duplication_inst_records) - - # CSV出力 - file_path = _make_csv_data(CSV_FILE_NAME) + dcf_inst_merge_all_records = _select_dcf_inst_merge_all() + # CSV出力 + file_path = _make_csv_data( + dcf_inst_merge_all_records, environment.CSV_FILE_NAME) - # CSVをS3にアップロード - _upload_dcf_inst_merge_csv_file(CSV_FILE_NAME, file_path) + # CSVをS3にアップロード + _upload_dcf_inst_merge_csv_file( + file_path, environment.CSV_FILE_NAME) + + # 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録 + logger.info("DCF削除新規マスタ作成処理を正常終了します。") + + jskultBatchRunManager.batch_success() + jskultBatchStatusManager.set_process_status("done") + + except: + # 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録 + logger.error("エラーが発生したため、DCF削除新規マスタ作成処理を終了します。") + jskultBatchRunManager.batch_failed() + jskultBatchStatusManager.set_process_status("failed") + + def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: + try: + self._db = Database.get_instance() + self._db.connect() + self._db.begin() + self._db.to_jst() + sql = """\ + SELECT + * + FROM + src07.dcf_inst_merge + """ + dcf_inst_merge_all_records = self._db.execute_select(sql) + return dcf_inst_merge_all_records + + except Exception as e: + self._db.rollback() + raise BatchOperationException(e) + finally: + self._db.disconnect() def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: # com_instからdcf_inst_mergeにinsert @@ -68,7 +114,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): self._db.begin() self._db.to_jst() - sql ="""\ + sql = """\ SELECT ci.DCF_DSF_INST_CD, ci.FORM_INST_NAME_KANJI, @@ -176,18 +222,18 @@ class DcfInstMergeIO(JskultBatchEntrypoint): finally: self._db.disconnect() - def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): sys_update_date = duplication_inst_records[0]['sys_update_date'] set_year_month = '{set_year}年{set_month}月'.format( set_year=sys_update_date[0:4], set_month=sys_update_date[-2:] - ) + ) add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' add_dct_inst_merge_list = [] for row in duplication_inst_records: - add_dct_inst_merge_list.append(add_dct_inst_merge.format(**row)) + add_dct_inst_merge_list.append( + add_dct_inst_merge.format(**row)) add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) # 顧客報告用にログ出力 logger.info( @@ -201,40 +247,42 @@ class DcfInstMergeIO(JskultBatchEntrypoint): ) return + def _make_csv_data(csv_file_name: str, record_inst: list): + # CSVファイルを作成する + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, csv_file_name) - def _make_csv_data(record_inst: list, csv_file_name: str): - # CSVファイルを作成する - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, csv_file_name) + head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH', + 'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG', + 'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE'] - head_str = ['DCF_INST_CD','DUP_OPP_CD','START_MONTH', - 'INVALID_FLG','REMARKS','DCF_INST_CD_NEW','ENABLED_FLG', - 'CREATER','CREATE_DATE','UPDATER','UPDATE_DATE'] + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") - with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: - # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) - csv_file.write(f"{','.join(head_str)}\n") + # Shift-JIS、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) - # Shift-JIS、CRLF、価囲いありで書き込む - writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', - quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, - strict=True - ) + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = [ + '' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) - # データ部分書き込み(施設) - for record_inst_data in record_inst: - record_inst_value = list(record_inst_data.values()) - csv_data = ['' if n is None else n for n in record_inst_value] - writer.writerow(csv_data) + return csv_file_path - return csv_file_path - def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): # S3バケットにファイルを移動 jsk_send_bucket = JskSendBucket() # バッチ共通設定を取得 batch_context = BatchContext.get_instance() - jsk_send_bucket.upload_dcf_inst_merge_csv_file(csv_file_name, csv_file_path) - jsk_send_bucket.backup_dcf_inst_merge_csv_file(csv_file_name, batch_context.syor_date) - return \ No newline at end of file + jsk_send_bucket.upload_dcf_inst_merge_csv_file( + csv_file_name, csv_file_path) + jsk_send_bucket.backup_dcf_inst_merge_csv_file( + csv_file_name, batch_context.syor_date) + return diff --git a/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py b/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py index 291a8d1f..47f34952 100644 --- a/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py +++ b/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py @@ -3,6 +3,6 @@ import abc class JskultBatchEntrypoint(metaclass=abc.ABCMeta): - @abc.abstractmethod() + @abc.abstractmethod def execute(self): pass diff --git a/ecs/jskult-batch/src/error/exceptions.py b/ecs/jskult-batch/src/error/exceptions.py index 055c24f6..aa5f9be6 100644 --- a/ecs/jskult-batch/src/error/exceptions.py +++ b/ecs/jskult-batch/src/error/exceptions.py @@ -8,3 +8,7 @@ class DBException(MeDaCaException): class BatchOperationException(MeDaCaException): pass + + +class MaxRunCountReachedException(MeDaCaException): + pass diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index e70d8bb4..2d5d5f41 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -6,13 +6,24 @@ DB_PORT = int(os.environ['DB_PORT']) DB_USERNAME = os.environ['DB_USERNAME'] DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] - -# 処理名 +JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] +POST_PROCESS = os.environ["POST_PROCESS"] +MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] +RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] +CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] +JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] +JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET'] +JSK_DATA_SEND_FOLDER = os.environ['JSK_DATA_SEND_FOLDER'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') -DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) -DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) +DB_CONNECTION_MAX_RETRY_ATTEMPT = int( + os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int( + os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int( + os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int( + os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) diff --git a/ecs/jskult-batch/test.py b/ecs/jskult-batch/test.py new file mode 100644 index 00000000..e69de29b From 198f35d6a70f23dff88d2689458c96107416db5c Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Tue, 27 May 2025 22:22:24 +0900 Subject: [PATCH 15/30] =?UTF-8?q?format:=20=E3=83=95=E3=82=A9=E3=83=BC?= =?UTF-8?q?=E3=83=9E=E3=83=83=E3=83=88=E9=81=A9=E7=94=A8=E3=80=82=E6=9C=AA?= =?UTF-8?q?=E4=BD=BF=E7=94=A8=E3=81=AEimport=E3=82=92=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/aws/s3.py | 17 ++++--- .../src/batch/archive_jsk_data.py | 48 ++++++++++++------- .../src/batch/jskult_archive_manager.py | 33 +++++++------ .../src/db/database.py | 17 ++++--- .../src/system_var/environment.py | 12 +++-- 5 files changed, 74 insertions(+), 53 deletions(-) diff --git a/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py b/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py index a6e0074a..3f40d8ef 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py +++ b/ecs/jskult-batch-archive-jsk-data/src/aws/s3.py @@ -1,8 +1,3 @@ -import gzip -import os -import os.path as path -import shutil -import tempfile import boto3 from src.system_var import environment @@ -12,7 +7,8 @@ class S3Client: _bucket_name: str def list_objects(self, bucket_name: str, folder_name: str): - response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + response = self.__s3_client.list_objects_v2( + Bucket=bucket_name, Prefix=folder_name) if response['KeyCount'] == 0: return [] contents = response['Contents'] @@ -47,16 +43,19 @@ class S3Client: Key=file_key ) + class S3Bucket(): _s3_client = S3Client() _bucket_name: str = None -class JskultArchiveBucket(S3Bucket): + +class JskultArchiveBucket(S3Bucket): _bucket_name = environment.JSKULT_ARCHIVE_BUCKET def upload_archive_zip_file(self, archive_zip: str, archive_zip_path: str, send_folder: str): # S3バケットにファイルを移動 archive_zip_name = f'{send_folder}/{archive_zip}' s3_client = S3Client() - s3_client.upload_file(archive_zip_path, self._bucket_name, archive_zip_name) - return f"{self._bucket_name}/{archive_zip_name}" \ No newline at end of file + s3_client.upload_file( + archive_zip_path, self._bucket_name, archive_zip_name) + return f"{self._bucket_name}/{archive_zip_name}" diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py b/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py index d9693c98..9be0432b 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py +++ b/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py @@ -1,15 +1,16 @@ -from src.logging.get_logger import get_logger -from src.batch.jskult_archive_manager import JskultArchiveManager -from src.aws.s3 import JskultArchiveBucket -import os.path as path -from datetime import timedelta -import tempfile import csv +import os.path as path +import tempfile import zipfile +from datetime import timedelta +from src.aws.s3 import JskultArchiveBucket +from src.batch.jskult_archive_manager import JskultArchiveManager +from src.logging.get_logger import get_logger logger = get_logger("実消化_過去データアーカイブ処理") + def exec(): try: logger.info("処理開始:実消化_過去データアーカイブ処理") @@ -20,25 +21,29 @@ def exec(): # 取得したレコード分繰り返す for jskult_archive_manage_data in jskult_archive_manage_data_list: # 対象テーブルで条件項目が条件年月以前のデータを取得 - archive_data = jskult_archive_manager.get_archive_data(jskult_archive_manage_data["target_table"], jskult_archive_manage_data["filter_column"], jskult_archive_manage_data["filter_date"]) + archive_data = jskult_archive_manager.get_archive_data( + jskult_archive_manage_data["target_table"], jskult_archive_manage_data["filter_column"], jskult_archive_manage_data["filter_date"]) # 取得データが0件の場合、スキップする if not archive_data: - logger.info(f"アーカイブ対象データがありませんでした。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}") + logger.info( + f"アーカイブ対象データがありませんでした。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}") continue # 一時フォルダ作成 - with tempfile.TemporaryDirectory() as temporary_dir: + with tempfile.TemporaryDirectory() as temporary_dir: # 取得したデータをCSVに出力 - day_after_prev_filter_date = jskult_archive_manage_data["prev_filter_date"] + timedelta(days=1) + day_after_prev_filter_date = jskult_archive_manage_data["prev_filter_date"] + timedelta( + days=1) file_name = f'{jskult_archive_manage_data["target_table"]}_{day_after_prev_filter_date.strftime('%Y%m%d')}_{jskult_archive_manage_data["filter_date"].strftime('%Y%m%d')}' csv_file_path = path.join(temporary_dir, f"{file_name}.csv") headers = archive_data[0].keys() with open(csv_file_path, 'w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=headers, quoting=csv.QUOTE_ALL) + writer = csv.DictWriter( + file, fieldnames=headers, quoting=csv.QUOTE_ALL) writer.writeheader() writer.writerows(archive_data) logger.info(f"CSVファイル作成に成功しました。{file_name}.csv") - + # 作成したCSVをzip形式に圧縮 zip_file_path = path.join(temporary_dir, f"{file_name}.zip") with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) as zipf: @@ -47,16 +52,23 @@ def exec(): # 圧縮したCSVを保存先へアップロード archive_bucket = JskultArchiveBucket() - upload_file_path = archive_bucket.upload_archive_zip_file(f"{file_name}.zip", zip_file_path, jskult_archive_manage_data["archive_storage"]) + upload_file_path = archive_bucket.upload_archive_zip_file( + f"{file_name}.zip", zip_file_path, jskult_archive_manage_data["archive_storage"]) logger.info(f"{upload_file_path}へのアップロードに成功しました。") # アーカイブしたデータをDBから削除 - jskult_archive_manager.delete_archive_data(jskult_archive_manage_data["target_table"], jskult_archive_manage_data["filter_column"], jskult_archive_manage_data["filter_date"]) - logger.info(f"アーカイブしたデータのDBから削除に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}") + jskult_archive_manager.delete_archive_data( + jskult_archive_manage_data["target_table"], + jskult_archive_manage_data["filter_column"], + jskult_archive_manage_data["filter_date"]) + logger.info( + f"アーカイブしたデータのDBから削除に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}") # 次回に向けてアーカイブ管理テーブルを更新する - jskult_archive_manager.update_archive_manage(jskult_archive_manage_data["target_table"]) - logger.info(f"アーカイブ管理テーブルの更新に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']}") + jskult_archive_manager.update_archive_manage( + jskult_archive_manage_data["target_table"]) + logger.info( + f"アーカイブ管理テーブルの更新に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']}") logger.info("処理終了:実消化_過去データアーカイブ処理") except Exception as e: - logger.info(f"異常終了:実消化_過去データアーカイブ処理 {e}") \ No newline at end of file + logger.info(f"異常終了:実消化_過去データアーカイブ処理 {e}") diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py b/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py index 033be013..8abe9234 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py +++ b/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py @@ -1,9 +1,12 @@ from src.db.database import Database from src.logging.get_logger import get_logger + logger = get_logger("アーカイブ管理テーブル操作") + class JskultArchiveManager: - _db : Database = None + _db: Database = None + def __init__(self): self._db = Database.get_instance() @@ -17,7 +20,7 @@ class JskultArchiveManager: , filter_date , run_interval_months , prev_filter_date - , archive_storage + , archive_storage from internal07.jskult_archive_manage; """ @@ -27,11 +30,11 @@ class JskultArchiveManager: return jskult_archive_manage_data except Exception as e: logger.info("異常終了:アーカイブ管理テーブル取得") - raise + raise e finally: self._db.disconnect() - def get_archive_data(self,target_table:str, filter_column:str, filter_date:str): + def get_archive_data(self, target_table: str, filter_column: str, filter_date: str): try: logger.info("処理開始:アーカイブデータ取得") sql = f""" @@ -48,11 +51,11 @@ class JskultArchiveManager: return target_table_data except Exception as e: logger.info("異常終了:アーカイブ管理テーブル取得") - raise + raise e finally: self._db.disconnect() - def delete_archive_data(self, target_table:str, filter_column:str, filter_date:str): + def delete_archive_data(self, target_table: str, filter_column: str, filter_date: str): try: logger.info("処理開始:アーカイブ後データ削除") sql = f""" @@ -67,25 +70,25 @@ class JskultArchiveManager: self._db.commit() logger.info("処理終了:アーカイブ後データ削除") return - except: + except Exception as e: self._db.rollback() logger.info("異常終了:アーカイブ後データ削除") - raise + raise e finally: self._db.disconnect() - def update_archive_manage(self, target_table:str): + def update_archive_manage(self, target_table: str): try: logger.info("処理開始:アーカイブ管理テーブル条件年月更新") sql = f""" - update internal07.jskult_archive_manage + update internal07.jskult_archive_manage set prev_filter_date = filter_date - , filter_date = LAST_DAY( + , filter_date = LAST_DAY( DATE_ADD(filter_date, INTERVAL run_interval_months MONTH) - ) + ) , upd_user = CURRENT_USER () - , upd_date = NOW() + , upd_date = NOW() where target_table = '{target_table}'; """ @@ -95,9 +98,9 @@ class JskultArchiveManager: self._db.commit() logger.info("処理終了:アーカイブ管理テーブル条件年月更新") return - except: + except Exception as e: self._db.rollback() logger.info("異常終了:アーカイブ管理テーブル条件年月更新") - raise + raise e finally: self._db.disconnect() diff --git a/ecs/jskult-batch-archive-jsk-data/src/db/database.py b/ecs/jskult-batch-archive-jsk-data/src/db/database.py index 5ddaba4e..3f6ce8ea 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/db/database.py +++ b/ecs/jskult-batch-archive-jsk-data/src/db/database.py @@ -1,11 +1,10 @@ from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, create_engine, text) from sqlalchemy.engine.url import URL -from tenacity import retry, stop_after_attempt, wait_exponential - from src.error.exceptions import DBException from src.logging.get_logger import get_logger from src.system_var import environment +from tenacity import retry, stop_after_attempt, wait_exponential logger = get_logger(__name__) @@ -57,7 +56,8 @@ class Database: poolclass=QueuePool ) - self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + self.__autocommit_engine = self.__transactional_engine.execution_options( + isolation_level='AUTOCOMMIT') @classmethod def get_instance(cls, autocommit=False): @@ -120,10 +120,12 @@ class Database: try: # トランザクションが開始している場合は、トランザクションを引き継ぐ if self.__connection.in_transaction(): - result = self.__connection.execute(text(select_query), parameters) + result = self.__connection.execute( + text(select_query), parameters) else: # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 - result = self.__execute_with_transaction(select_query, parameters) + result = self.__execute_with_transaction( + select_query, parameters) except Exception as e: raise DBException(f'SQL Error: {e}') @@ -181,13 +183,14 @@ class Database: self.__connection = None def to_jst(self): - self.execute('SET time_zone = "+9:00"') + self.execute('SET time_zone = "+9:00"') def __execute_with_transaction(self, query: str, parameters: dict): # トランザクションを開始してクエリを実行する with self.__connection.begin(): try: - result = self.__connection.execute(text(query), parameters=parameters) + result = self.__connection.execute( + text(query), parameters=parameters) except Exception as e: self.__connection.rollback() raise e diff --git a/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py b/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py index 249aa4f0..41ac760f 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py +++ b/ecs/jskult-batch-archive-jsk-data/src/system_var/environment.py @@ -12,7 +12,11 @@ JSKULT_ARCHIVE_BUCKET = os.environ['JSKULT_ARCHIVE_BUCKET'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') -DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) -DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) +DB_CONNECTION_MAX_RETRY_ATTEMPT = int( + os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int( + os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int( + os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int( + os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) From 2a076e69c976ae82b1c852d317fffbc78f60d7f8 Mon Sep 17 00:00:00 2001 From: yono Date: Wed, 28 May 2025 10:27:46 +0900 Subject: [PATCH 16/30] =?UTF-8?q?feat:=E4=B8=8D=E8=A6=81=E9=83=A8=E5=88=86?= =?UTF-8?q?=E5=89=8A=E9=99=A4=EF=BC=88=E3=83=A6=E3=83=8B=E3=83=83=E3=83=88?= =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E9=83=A8=E5=88=86=EF=BC=89=E3=80=81?= =?UTF-8?q?readme=E3=82=BF=E3=82=A4=E3=83=88=E3=83=AB=E4=BF=AE=E6=AD=A3?= =?UTF-8?q?=E3=80=81=E3=82=A8=E3=83=A9=E3=83=BC=E7=99=BA=E7=94=9F=E6=99=82?= =?UTF-8?q?=E3=81=AE=E3=83=AD=E3=82=B0=E3=83=AC=E3=83=99=E3=83=AB=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3=E3=80=81JskultArchiveManager=E3=81=AE=E3=83=AD?= =?UTF-8?q?=E3=82=B0=E3=83=A1=E3=83=83=E3=82=BB=E3=83=BC=E3=82=B8=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3=E3=80=81=E3=83=89=E3=82=AD=E3=83=A5=E3=83=A1=E3=83=B3?= =?UTF-8?q?=E3=83=86=E3=83=BC=E3=82=B7=E3=83=A7=E3=83=B3=E3=82=B3=E3=83=A1?= =?UTF-8?q?=E3=83=B3=E3=83=88=E8=BF=BD=E5=8A=A0=E3=80=81SQL=E3=81=AE?= =?UTF-8?q?=E6=9D=A1=E4=BB=B6=E3=81=AE=E5=80=A4=E3=83=91=E3=83=A9=E3=83=A1?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E3=81=AB=E5=A4=89=E6=9B=B4=E3=80=81=E7=84=A1?= =?UTF-8?q?=E9=A7=84=E3=81=AA=E3=82=B9=E3=83=9A=E3=83=BC=E3=82=B9=E5=89=8A?= =?UTF-8?q?=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-archive-jsk-data/Pipfile | 6 --- ecs/jskult-batch-archive-jsk-data/README.md | 15 +------ .../src/batch/archive_jsk_data.py | 3 +- .../src/batch/jskult_archive_manager.py | 40 +++++++++++-------- 4 files changed, 26 insertions(+), 38 deletions(-) diff --git a/ecs/jskult-batch-archive-jsk-data/Pipfile b/ecs/jskult-batch-archive-jsk-data/Pipfile index 2b56d8c0..12961dde 100644 --- a/ecs/jskult-batch-archive-jsk-data/Pipfile +++ b/ecs/jskult-batch-archive-jsk-data/Pipfile @@ -3,12 +3,6 @@ url = "https://pypi.org/simple" verify_ssl = true name = "pypi" -[scripts] -"test:ultmarc" = "pytest tests/batch/ultmarc/" -"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" -"test:vjsk" = "pytest tests/batch/vjsk/" -"test:vjsk:cov" = "pytest --cov=src/batch/vjsk/ --cov-branch --cov-report=term-missing tests/batch/vjsk/" - [packages] boto3 = "*" PyMySQL = "*" diff --git a/ecs/jskult-batch-archive-jsk-data/README.md b/ecs/jskult-batch-archive-jsk-data/README.md index b9a9340c..c5b79335 100644 --- a/ecs/jskult-batch-archive-jsk-data/README.md +++ b/ecs/jskult-batch-archive-jsk-data/README.md @@ -1,4 +1,4 @@ -# 実消化&アルトマーク 日次バッチ +# 実消化過去データアーカイブ処理 ## 概要 @@ -45,19 +45,6 @@ - 実際の処理は、「src/jobctrl_daily.py」で行っている。 -### テスト準備 - -- VSCodeで以下の拡張機能をインストールする - - Python - - Python Test Explorer for Visual Studio Code - - Test Explorer UI -- VSCode 上でショートカット「ctrl」+「shift」+「P」でコマンドパレットを開く -- コマンドパレットの検索窓に「Python」と入力し、「Python: テストを構成する」を押下する -- 現在のワークスペースを選び、「pytest」を選択する -- 「tests」フォルダを選択する -- バックグランドで、pytest モジュールのインストールが始まれば成功 - - ## フォルダ構成 ```text diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py b/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py index d9693c98..e02b3da8 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py +++ b/ecs/jskult-batch-archive-jsk-data/src/batch/archive_jsk_data.py @@ -11,6 +11,7 @@ import zipfile logger = get_logger("実消化_過去データアーカイブ処理") def exec(): + """実消化_過去データアーカイブ処理""" try: logger.info("処理開始:実消化_過去データアーカイブ処理") jskult_archive_manager = JskultArchiveManager() @@ -59,4 +60,4 @@ def exec(): logger.info(f"アーカイブ管理テーブルの更新に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']}") logger.info("処理終了:実消化_過去データアーカイブ処理") except Exception as e: - logger.info(f"異常終了:実消化_過去データアーカイブ処理 {e}") \ No newline at end of file + logger.exception(f"異常終了:実消化_過去データアーカイブ処理 {e}") \ No newline at end of file diff --git a/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py b/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py index 033be013..3d027aa9 100644 --- a/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py +++ b/ecs/jskult-batch-archive-jsk-data/src/batch/jskult_archive_manager.py @@ -8,8 +8,9 @@ class JskultArchiveManager: self._db = Database.get_instance() def get_archive_manage(self): + """対象テーブル、対象項目、対象年月、実行間隔(月)、前回対象年月、保存先を取得""" try: - logger.info("処理開始:アーカイブ管理テーブル取得") + logger.info("処理開始:get_archive_manage") sql = """ select target_table @@ -23,60 +24,65 @@ class JskultArchiveManager: """ self._db.connect() jskult_archive_manage_data = self._db.execute_select(sql) - logger.info("処理終了:アーカイブ管理テーブル取得") + logger.info("処理終了:get_archive_manage") return jskult_archive_manage_data except Exception as e: - logger.info("異常終了:アーカイブ管理テーブル取得") + logger.info("異常終了:get_archive_manage") raise finally: self._db.disconnect() def get_archive_data(self,target_table:str, filter_column:str, filter_date:str): + """アーカイブするデータを取得""" try: - logger.info("処理開始:アーカイブデータ取得") + logger.info("処理開始:get_archive_data") sql = f""" select * from src07.{target_table} where - str_to_date({filter_column},'%Y%m%d') <= '{filter_date}'; + str_to_date({filter_column},'%Y%m%d') <= :filter_date; """ self._db.connect() - target_table_data = self._db.execute_select(sql) - logger.info("処理開始:アーカイブデータ終了") + parameter_dict = {'filter_date' : filter_date} + target_table_data = self._db.execute_select(sql, parameter_dict) + logger.info("処理終了:get_archive_data") return target_table_data except Exception as e: - logger.info("異常終了:アーカイブ管理テーブル取得") + logger.info("異常終了:get_archive_data") raise finally: self._db.disconnect() def delete_archive_data(self, target_table:str, filter_column:str, filter_date:str): + """アーカイブしたデータを削除""" try: - logger.info("処理開始:アーカイブ後データ削除") + logger.info("処理開始:delete_archive_data") sql = f""" delete from src07.{target_table} where - str_to_date({filter_column},'%Y%m%d') <= '{filter_date}'; + str_to_date({filter_column},'%Y%m%d') <= :filter_date; """ self._db.connect() self._db.begin() - self._db.execute(sql) + parameter_dict = {'filter_date' : filter_date} + self._db.execute(sql, parameter_dict) self._db.commit() - logger.info("処理終了:アーカイブ後データ削除") + logger.info("処理終了:delete_archive_data") return except: self._db.rollback() - logger.info("異常終了:アーカイブ後データ削除") + logger.info("異常終了:delete_archive_data") raise finally: self._db.disconnect() def update_archive_manage(self, target_table:str): + """アーカイブ管理テーブルの指定した対象テーブルのレコードを更新する""" try: - logger.info("処理開始:アーカイブ管理テーブル条件年月更新") + logger.info("処理開始:update_archive_manage") sql = f""" update internal07.jskult_archive_manage set @@ -84,7 +90,7 @@ class JskultArchiveManager: , filter_date = LAST_DAY( DATE_ADD(filter_date, INTERVAL run_interval_months MONTH) ) - , upd_user = CURRENT_USER () + , upd_user = CURRENT_USER() , upd_date = NOW() where target_table = '{target_table}'; @@ -93,11 +99,11 @@ class JskultArchiveManager: self._db.begin() self._db.execute(sql) self._db.commit() - logger.info("処理終了:アーカイブ管理テーブル条件年月更新") + logger.info("処理終了:update_archive_manage") return except: self._db.rollback() - logger.info("異常終了:アーカイブ管理テーブル条件年月更新") + logger.info("異常終了:update_archive_manage") raise finally: self._db.disconnect() From d243768e5858400f6341bdd1d6730f23971ed682 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Wed, 28 May 2025 12:09:43 +0900 Subject: [PATCH 17/30] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/common/batch_context.py | 48 --- .../src/batch/dcf_inst_merge_io.py | 403 +++++++++--------- .../src/system_var/environment.py | 4 +- 3 files changed, 202 insertions(+), 253 deletions(-) delete mode 100644 ecs/jskult-batch/src/batch/common/batch_context.py diff --git a/ecs/jskult-batch/src/batch/common/batch_context.py b/ecs/jskult-batch/src/batch/common/batch_context.py deleted file mode 100644 index b3fc4967..00000000 --- a/ecs/jskult-batch/src/batch/common/batch_context.py +++ /dev/null @@ -1,48 +0,0 @@ -class BatchContext: - __instance = None - __syor_date: str # 処理日(yyyy/mm/dd形式) - __is_not_business_day: bool # 日次バッチ起動日フラグ - __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ - __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ - - def __init__(self) -> None: - self.__is_not_business_day = False - self.__is_ultmarc_imported = False - - @classmethod - def get_instance(cls): - if cls.__instance is None: - cls.__instance = cls() - return cls.__instance - - @property - def syor_date(self): - return self.__syor_date - - @syor_date.setter - def syor_date(self, syor_date_str: str): - self.__syor_date = syor_date_str - - @property - def is_not_business_day(self): - return self.__is_not_business_day - - @is_not_business_day.setter - def is_not_business_day(self, flag: bool): - self.__is_not_business_day = flag - - @property - def is_ultmarc_imported(self): - return self.__is_ultmarc_imported - - @is_ultmarc_imported.setter - def is_ultmarc_imported(self, flag: bool): - self.__is_ultmarc_imported = flag - - @property - def is_vjsk_stock_import_day(self): - return self.__is_vjsk_stock_import_day - - @is_vjsk_stock_import_day.setter - def is_vjsk_stock_import_day(self, flag: bool): - self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index e3e7ed12..a7298153 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -2,8 +2,7 @@ import csv import os.path as path import tempfile -from src.aws.s3 import JskSendBucket -from src.batch.common.batch_context import BatchContext +from src.aws.s3 import S3Client, JskSendBucket from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint from src.db.database import Database from src.error.exceptions import BatchOperationException, MaxRunCountReachedException @@ -23,16 +22,24 @@ class DcfInstMergeIO(JskultBatchEntrypoint): def execute(self): jskultBatchRunManager = JskultBatchRunManager( environment.BATCH_EXECUTION_ID) + jskultHdkeTblManager = JskultHdkeTblManager() + + # /transfer_result/yyyy/mm/dd/ + jskult_backuo_folder_name = f"""/transfer_result/{jskultHdkeTblManager.get_batch_statuses()[2]}""" + + receive_file_count = S3Client.list_objects( + environment.JSKULT_BACKUP_BUCKET, jskult_backuo_folder_name).count() + jskultBatchStatusManager = JskultBatchStatusManager( environment.PROCESS_NAME, + + # TODO チケットNEWDWH2021-1847の実装で作成した定数に置き換え environment.POST_PROCESS, - environment.MAX_RUN_COUNT_FLG, - environment.RECEIVE_FILE_COUNT + environment.MAX_RUN_COUNT, + receive_file_count ) try: - jskultHdkeTblManager = JskultHdkeTblManager() - if not jskultHdkeTblManager.can_run_process(): logger.error( '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') @@ -41,7 +48,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): jskultBatchStatusManager.set_process_status("start") try: if not jskultBatchStatusManager.can_run_post_process(): - # リトライ判断された場合 + # 後続処理の起動条件を満たしていない場合 # 処理ステータスを「処理待」に設定 jskultBatchStatusManager.set_process_status("waiting") @@ -57,21 +64,23 @@ class DcfInstMergeIO(JskultBatchEntrypoint): # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 if jskultBatchStatusManager.is_done_ultmarc_import(): - # + # COM_施設からDCF削除新規マスタに登録 (is_add_dcf_inst_merge, - duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(self) if is_add_dcf_inst_merge: - # COM_施設からDCF削除新規マスタに登録 - _output_add_dcf_inst_merge_log(duplication_inst_records) - dcf_inst_merge_all_records = _select_dcf_inst_merge_all() + self._output_add_dcf_inst_merge_log( + duplication_inst_records) + dcf_inst_merge_all_records = self._select_dcf_inst_merge_all() # CSV出力 - file_path = _make_csv_data( - dcf_inst_merge_all_records, environment.CSV_FILE_NAME) + file_path = self._make_csv_data( + environment.DCF_INST_MERGE_SEND_FILE_NAME, + dcf_inst_merge_all_records) # CSVをS3にアップロード - _upload_dcf_inst_merge_csv_file( - file_path, environment.CSV_FILE_NAME) + + self._upload_dcf_inst_merge_csv_file( + file_path, environment.DCF_INST_MERGE_SEND_FILE_NAME) # 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録 logger.info("DCF削除新規マスタ作成処理を正常終了します。") @@ -79,210 +88,198 @@ class DcfInstMergeIO(JskultBatchEntrypoint): jskultBatchRunManager.batch_success() jskultBatchStatusManager.set_process_status("done") - except: + except Exception as e: # 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録 - logger.error("エラーが発生したため、DCF削除新規マスタ作成処理を終了します。") + logger.exception(f'予期せぬエラーが発生したため、DCF削除新規マスタ作成処理を終了します。{e}') + jskultBatchRunManager.batch_failed() jskultBatchStatusManager.set_process_status("failed") - def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: - try: - self._db = Database.get_instance() - self._db.connect() - self._db.begin() - self._db.to_jst() - sql = """\ - SELECT - * - FROM - src07.dcf_inst_merge - """ - dcf_inst_merge_all_records = self._db.execute_select(sql) - return dcf_inst_merge_all_records + def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: + try: + self._db = Database.get_instance() + self._db.connect() + sql = """\ + SELECT + * + FROM + src07.dcf_inst_merge + """ + dcf_inst_merge_all_records = self._db.execute_select(sql) + return dcf_inst_merge_all_records + except Exception as e: + raise BatchOperationException(e) + finally: + self._db.disconnect() - except Exception as e: - self._db.rollback() - raise BatchOperationException(e) - finally: - self._db.disconnect() + # com_instからdcf_inst_mergeにinsert + def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: - def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: - # com_instからdcf_inst_mergeにinsert - try: - self._db = Database.get_instance() - self._db.connect() - self._db.begin() - self._db.to_jst() - - sql = """\ - SELECT - ci.DCF_DSF_INST_CD, - ci.FORM_INST_NAME_KANJI, - ci.DELETE_SCHE_REASON_CD, - ci.DUP_OPP_CD, - ci.SYS_UPDATE_DATE - FROM - src05.COM_INST AS ci - WHERE - ci.DUP_OPP_CD IS NOT NULL - AND - ci.DELETE_SCHE_REASON_CD = 'D' - AND - ci.DELETE_DATA IS NULL - AND - ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() - AND - NOT EXISTS ( - SELECT - dim.DCF_INST_CD - FROM - src07.DCF_INST_MERGE AS dim - WHERE - dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + try: + self._db = Database.get_instance() + self._db.connect() + self._db.begin() + self._db.to_jst() + sql = """\ + SELECT + ci.DCF_DSF_INST_CD, + ci.FORM_INST_NAME_KANJI, + ci.DELETE_SCHE_REASON_CD, + ci.DUP_OPP_CD, + ci.SYS_UPDATE_DATE + FROM + src05.COM_INST AS ci + WHERE + ci.DUP_OPP_CD IS NOT NULL + AND + ci.DELETE_SCHE_REASON_CD = 'D' + AND + ci.DELETE_DATA IS NULL + AND + ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() + AND + NOT EXISTS ( + SELECT + dim.DCF_INST_CD + FROM + src07.DCF_INST_MERGE AS dim + WHERE + dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + ) + AND + + (ci.DCF_DSF_INST_CD EXISTS( + SELECT + mia.INST_CD + FROM + src07.MST_INST_ASSN as mia + WHERE + mia.INST_CD = ci.DCF_DSF_INST_CD + ) ) - AND - - (ci.DCF_DSF_INST_CD EXISTS( - SELECT - mia.INST_CD - FROM - src07.MST_INST_ASSN as mia - WHERE - mia.INST_CD = ci.DCF_DSF_INST_CD - ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + ap.PRSB_INST_CD + FROM + src07.ATC_PHARM AS ap + WHERE + ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - ap.PRSB_INST_CD - FROM - src07.ATC_PHARM AS ap - WHERE - ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD - ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - trd.INST_CD - FROM - src07.TRN_RESULT_DATA AS trd - WHERE - trd.INST_CD = ci.DCF_DSF_INST_CD - ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + trd.INST_CD + FROM + src07.TRN_RESULT_DATA AS trd + WHERE + trd.INST_CD = ci.DCF_DSF_INST_CD ) - ; + ) + ; + """ + duplication_inst_records = self._db.execute_select(sql) + # DCF削除新規マスタ取り込み + values_clauses = [] + params = {} + for clauses_no, row in enumerate(duplication_inst_records, start=1): + dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" + dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" + values_clause = f"""(:{dcf_inst_cd_arr}, + :{dup_opp_cd_arr}, + DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), + NULL, + NULL, + NULL, + "Y", + batchuser, + SYSDATE(), + batchuser, + SYSDATE() + )""" + values_clauses.append(values_clause) + params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] + params[dup_opp_cd_arr] = row['DUP_OPP_CD'] + insert_sql = f""" + INSERT INTO + src07.dcf_inst_merge ( + DCF_INST_CD, + DUP_OPP_CD, + START_MONTH, + INVALID_FLG, + REMARKS, + DCF_INST_CD_NEW, + ENABLED_FLG, + CREATER, + CREATE_DATE, + UPDATER, + UPDATE_DATE + ) + VALUES + {','.join(values_clauses)} + """ + self._db.execute(insert_sql, params) + return (True, duplication_inst_records) + except Exception as e: + self._db.rollback() + raise BatchOperationException(e) + finally: + self._db.disconnect() - """ - duplication_inst_records = self._db.execute_select(sql) - - # DCF削除新規マスタ取り込み - values_clauses = [] - params = {} - for clauses_no, row in enumerate(duplication_inst_records, start=1): - dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" - dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" - values_clause = f"""(:{dcf_inst_cd_arr}, - :{dup_opp_cd_arr}, - DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), - NULL, - NULL, - NULL, - "Y", - batchuser, - SYSDATE(), - batchuser, - SYSDATE() - )""" - values_clauses.append(values_clause) - params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] - params[dup_opp_cd_arr] = row['DUP_OPP_CD'] - insert_sql = f""" - INSERT INTO - src07.dcf_inst_merge ( - DCF_INST_CD, - DUP_OPP_CD, - START_MONTH, - INVALID_FLG, - REMARKS, - DCF_INST_CD_NEW, - ENABLED_FLG, - CREATER, - CREATE_DATE, - UPDATER, - UPDATE_DATE - ) - VALUES - {','.join(values_clauses)} - """ - - self._db.execute(insert_sql, params) - - return (True, duplication_inst_records) - except Exception as e: - self._db.rollback() - raise BatchOperationException(e) - finally: - self._db.disconnect() - - def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): - sys_update_date = duplication_inst_records[0]['sys_update_date'] - set_year_month = '{set_year}年{set_month}月'.format( - set_year=sys_update_date[0:4], - set_month=sys_update_date[-2:] - ) - - add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' - add_dct_inst_merge_list = [] - for row in duplication_inst_records: - add_dct_inst_merge_list.append( - add_dct_inst_merge.format(**row)) - add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) - # 顧客報告用にログ出力 - logger.info( - f"""DCF施設統合マスタが追加されました。 + def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): + sys_update_date = duplication_inst_records[0]['sys_update_date'] + set_year_month = '{set_year}年{set_month}月'.format( + set_year=sys_update_date[0:4], + set_month=sys_update_date[-2:] + ) + add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' + add_dct_inst_merge_list = [] + for row in duplication_inst_records: + add_dct_inst_merge_list.append( + add_dct_inst_merge.format(**row)) + add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) + # 顧客報告用にログ出力 + logger.info( + f"""DCF施設統合マスタが追加されました。 ********************************************************** 適用月度 {set_year_month} ********************************************************** {add_dct_inst_merge_list} ********************************************************** 合計 {len(duplication_inst_records)}件""" - ) - return + ) + return - def _make_csv_data(csv_file_name: str, record_inst: list): - # CSVファイルを作成する - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, csv_file_name) + def _make_csv_data(csv_file_name: str, record_inst: list): + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, csv_file_name) + head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH', + 'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG', + 'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE'] + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") + # UTF-8、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\r\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = [ + '' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) + return csv_file_path - head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH', - 'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG', - 'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE'] + # CSVファイルをバックアップ + def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): + # S3バケットにファイルを移動 + jsk_send_bucket = JskSendBucket() - with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: - # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) - csv_file.write(f"{','.join(head_str)}\n") + # 処理日を取得 + _, _, syor_date = JskultHdkeTblManager.get_batch_statuses() - # Shift-JIS、CRLF、価囲いありで書き込む - writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', - quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, - strict=True - ) - - # データ部分書き込み(施設) - for record_inst_data in record_inst: - record_inst_value = list(record_inst_data.values()) - csv_data = [ - '' if n is None else n for n in record_inst_value] - writer.writerow(csv_data) - - return csv_file_path - - def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): - # S3バケットにファイルを移動 - jsk_send_bucket = JskSendBucket() - # バッチ共通設定を取得 - batch_context = BatchContext.get_instance() - - jsk_send_bucket.upload_dcf_inst_merge_csv_file( - csv_file_name, csv_file_path) - jsk_send_bucket.backup_dcf_inst_merge_csv_file( - csv_file_name, batch_context.syor_date) - return + jsk_send_bucket.upload_dcf_inst_merge_csv_file( + csv_file_name, csv_file_path) + jsk_send_bucket.backup_dcf_inst_merge_csv_file( + csv_file_name, syor_date) + return diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index 2d5d5f41..91a060e6 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -9,9 +9,9 @@ DB_SCHEMA = os.environ['DB_SCHEMA'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] POST_PROCESS = os.environ["POST_PROCESS"] -MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] +MAX_RUN_COUNT = os.environ["MAX_RUN_COUNT"] RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] -CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] +DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET'] From 95ce00a122d660123a4c981753be72f77e497860 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 13:58:44 +0900 Subject: [PATCH 18/30] =?UTF-8?q?feat:=20=E8=BB=A2=E9=80=81=E3=83=87?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E3=83=AA=E3=82=B9=E3=83=88=E3=82=92=E5=8F=96?= =?UTF-8?q?=E5=BE=97=E3=81=99=E3=82=8B=E9=83=A8=E5=88=86=E3=82=92=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/.env.example | 2 + ecs/jskult-batch/src/aws/s3.py | 25 +++- .../src/batch/dcf_inst_merge_io.py | 115 ++++++++++-------- .../src/manager/jskult_hdke_tbl_manager.py | 2 +- ecs/jskult-batch/src/system_var/constants.py | 2 +- .../src/system_var/environment.py | 6 +- 6 files changed, 92 insertions(+), 60 deletions(-) diff --git a/ecs/jskult-batch/.env.example b/ecs/jskult-batch/.env.example index 500f843d..d0bf48d3 100644 --- a/ecs/jskult-batch/.env.example +++ b/ecs/jskult-batch/.env.example @@ -18,6 +18,8 @@ VJSK_DATA_BUCKET=************* JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json +TRANSFER_RESULT_FOLDER=transfer_result +TRANSFER_RESULT_FILE_NAME=transfer_result.json # 連携データ抽出期間 SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 # 洗替対象テーブル名 diff --git a/ecs/jskult-batch/src/aws/s3.py b/ecs/jskult-batch/src/aws/s3.py index 66032e1c..6e5755be 100644 --- a/ecs/jskult-batch/src/aws/s3.py +++ b/ecs/jskult-batch/src/aws/s3.py @@ -1,11 +1,7 @@ -import gzip -import os import os.path as path -import shutil import tempfile import boto3 - from src.system_var import environment @@ -77,7 +73,9 @@ class ConfigBucket(S3Bucket): temporary_dir = tempfile.mkdtemp() temporary_file_path = path.join( temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) - wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + wholesaler_stock_input_day_list_key = \ + f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: self._s3_client.download_file( self._bucket_name, wholesaler_stock_input_day_list_key, f) @@ -105,6 +103,23 @@ class JskBackupBucket(JskUltBackupBucket): _folder = environment.JSKULT_BACKUP_BUCKET +class JskTransferListBucket(JskUltBackupBucket): + _folder = environment.TRANSFER_RESULT_FOLDER + + def download_transfer_result_file(self, process_date_yyyymmdd: str): + file_name = environment.TRANSFER_RESULT_FILE_NAME + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join( + temporary_dir, file_name) + holiday_list_key = f'{self._folder}/{process_date_yyyymmdd}/{file_name}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file( + self._bucket_name, holiday_list_key, f) + f.seek(0) + return temporary_file_path + + class JskSendBucket(S3Bucket): _bucket_name = environment.JSK_IO_BUCKET _send_folder = environment.JSK_DATA_SEND_FOLDER diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index a7298153..ad311d42 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,16 +1,18 @@ import csv +import json import os.path as path import tempfile -from src.aws.s3 import S3Client, JskSendBucket +from src.aws.s3 import JskSendBucket, JskTransferListBucket from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint from src.db.database import Database -from src.error.exceptions import BatchOperationException, MaxRunCountReachedException -from src.manager.jskult_batch_run_manager import JskultBatchRunManager -from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager -from src.manager.jskult_batch_status_manager import JskultBatchStatusManager -from src.system_var import environment +from src.error.exceptions import (BatchOperationException, + MaxRunCountReachedException) from src.logging.get_logger import get_logger +from src.manager.jskult_batch_run_manager import JskultBatchRunManager +from src.manager.jskult_batch_status_manager import JskultBatchStatusManager +from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager +from src.system_var import environment logger = get_logger('DCF削除新規マスタ作成') @@ -20,80 +22,92 @@ class DcfInstMergeIO(JskultBatchEntrypoint): super().__init__() def execute(self): - jskultBatchRunManager = JskultBatchRunManager( + jskult_hdke_tbl_manager = JskultHdkeTblManager() + jskult_batch_run_manager = JskultBatchRunManager( environment.BATCH_EXECUTION_ID) - jskultHdkeTblManager = JskultHdkeTblManager() + if not jskult_hdke_tbl_manager.can_run_process(): + logger.error( + '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') + # バッチ実行管理テーブルをfailedで登録 + jskult_batch_run_manager.batch_failed() + return - # /transfer_result/yyyy/mm/dd/ - jskult_backuo_folder_name = f"""/transfer_result/{jskultHdkeTblManager.get_batch_statuses()[2]}""" + # 業務日付を取得 + _, _, process_date = jskult_hdke_tbl_manager.get_batch_statuses() - receive_file_count = S3Client.list_objects( - environment.JSKULT_BACKUP_BUCKET, jskult_backuo_folder_name).count() + # 転送ファイル一覧を取得し、転送件数を取得 + try: + transfer_list_bucket = JskTransferListBucket() + transfer_list_file_path = transfer_list_bucket.download_transfer_result_file( + process_date) + except Exception as e: + logger.exception(f'転送ファイル一覧の取得に失敗しました。 {e}') + # バッチ実行管理テーブルをfailedで登録 + jskult_batch_run_manager.batch_failed() - jskultBatchStatusManager = JskultBatchStatusManager( + with open(transfer_list_file_path) as f: + transfer_list = json.load(f) + + # 実消化データ + アルトマークデータの転送件数を合算し、受信ファイル件数とする + receive_file_count = len( + transfer_list['jsk_transfer_list']) + len(transfer_list['ult_transfer_list']) + + jskult_batch_status_manager = JskultBatchStatusManager( environment.PROCESS_NAME, - # TODO チケットNEWDWH2021-1847の実装で作成した定数に置き換え - environment.POST_PROCESS, + 'post_process', environment.MAX_RUN_COUNT, receive_file_count ) - try: - if not jskultHdkeTblManager.can_run_process(): - logger.error( - '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') - return - jskultBatchStatusManager.set_process_status("start") + jskult_batch_status_manager.set_process_status("start") try: - if not jskultBatchStatusManager.can_run_post_process(): + if not jskult_batch_status_manager.can_run_post_process(): # 後続処理の起動条件を満たしていない場合 # 処理ステータスを「処理待」に設定 - jskultBatchStatusManager.set_process_status("waiting") + jskult_batch_status_manager.set_process_status("waiting") # バッチ実行管理テーブルに「retry」で登録 - jskultBatchRunManager.batch_retry() + jskult_batch_run_manager.batch_retry() return - except MaxRunCountReachedException as e: + except MaxRunCountReachedException: logger.info('最大起動回数に到達したため、DCF削除新規マスタ作成処理を実行します。') - jskultBatchStatusManager.set_process_status("doing") + jskult_batch_status_manager.set_process_status("doing") # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 - if jskultBatchStatusManager.is_done_ultmarc_import(): - + if jskult_batch_status_manager.is_done_ultmarc_import(): # COM_施設からDCF削除新規マスタに登録 (is_add_dcf_inst_merge, duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(self) if is_add_dcf_inst_merge: - self._output_add_dcf_inst_merge_log( duplication_inst_records) - dcf_inst_merge_all_records = self._select_dcf_inst_merge_all() + # CSV出力 + dcf_inst_merge_all_records = self._select_dcf_inst_merge_all() file_path = self._make_csv_data( environment.DCF_INST_MERGE_SEND_FILE_NAME, dcf_inst_merge_all_records) # CSVをS3にアップロード - self._upload_dcf_inst_merge_csv_file( - file_path, environment.DCF_INST_MERGE_SEND_FILE_NAME) + file_path, process_date, environment.DCF_INST_MERGE_SEND_FILE_NAME) # 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録 logger.info("DCF削除新規マスタ作成処理を正常終了します。") + jskult_batch_run_manager.batch_success() + jskult_batch_status_manager.set_process_status("done") - jskultBatchRunManager.batch_success() - jskultBatchStatusManager.set_process_status("done") + return except Exception as e: # 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録 logger.exception(f'予期せぬエラーが発生したため、DCF削除新規マスタ作成処理を終了します。{e}') - - jskultBatchRunManager.batch_failed() - jskultBatchStatusManager.set_process_status("failed") + jskult_batch_run_manager.batch_failed() + jskult_batch_status_manager.set_process_status("failed") def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: try: @@ -121,7 +135,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): self._db.begin() self._db.to_jst() sql = """\ - SELECT + SELECT ci.DCF_DSF_INST_CD, ci.FORM_INST_NAME_KANJI, ci.DELETE_SCHE_REASON_CD, @@ -137,7 +151,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): ci.DELETE_DATA IS NULL AND ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() - AND + AND NOT EXISTS ( SELECT dim.DCF_INST_CD @@ -147,11 +161,11 @@ class DcfInstMergeIO(JskultBatchEntrypoint): dim.DCF_INST_CD = ci.DCF_DSF_INST_CD ) AND - + (ci.DCF_DSF_INST_CD EXISTS( SELECT mia.INST_CD - FROM + FROM src07.MST_INST_ASSN as mia WHERE mia.INST_CD = ci.DCF_DSF_INST_CD @@ -160,7 +174,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): OR ci.DCF_DSF_INST_CD EXISTS( SELECT ap.PRSB_INST_CD - FROM + FROM src07.ATC_PHARM AS ap WHERE ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD @@ -201,7 +215,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): insert_sql = f""" INSERT INTO src07.dcf_inst_merge ( - DCF_INST_CD, + DCF_INST_CD, DUP_OPP_CD, START_MONTH, INVALID_FLG, @@ -246,6 +260,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): ********************************************************** 合計 {len(duplication_inst_records)}件""" ) + return def _make_csv_data(csv_file_name: str, record_inst: list): @@ -268,18 +283,18 @@ class DcfInstMergeIO(JskultBatchEntrypoint): csv_data = [ '' if n is None else n for n in record_inst_value] writer.writerow(csv_data) + return csv_file_path - # CSVファイルをバックアップ - def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): - # S3バケットにファイルを移動 + def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, process_date: str, csv_file_path: str): jsk_send_bucket = JskSendBucket() - # 処理日を取得 - _, _, syor_date = JskultHdkeTblManager.get_batch_statuses() - + # S3バケットにファイルをアップロード jsk_send_bucket.upload_dcf_inst_merge_csv_file( csv_file_name, csv_file_path) + + # CSVファイルをバックアップ jsk_send_bucket.backup_dcf_inst_merge_csv_file( - csv_file_name, syor_date) + csv_file_name, process_date) + return diff --git a/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py b/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py index 4a804ef3..f6c8a9f0 100644 --- a/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py +++ b/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py @@ -113,7 +113,7 @@ class JskultHdkeTblManager: finally: self._db.disconnect() # 日次バッチ処理中ではない場合、後続の処理は行わない - if batch_processing_flag != constants.BATCH_ACTF_BATCH_START: + if batch_processing_flag != constants.BATCH_ACTF_BATCH_START: return False # dump取得が正常終了していない場合、後続の処理は行わない if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: diff --git a/ecs/jskult-batch/src/system_var/constants.py b/ecs/jskult-batch/src/system_var/constants.py index 8a0ccbb3..10cd7fb8 100644 --- a/ecs/jskult-batch/src/system_var/constants.py +++ b/ecs/jskult-batch/src/system_var/constants.py @@ -4,7 +4,7 @@ BATCH_EXIT_CODE_SUCCESS = 0 # バッチ処理中フラグ:未処理 BATCH_ACTF_BATCH_UNPROCESSED = '0' # バッチ処理中フラグ:処理中 -BATCH_ACTF_BATCH_IN_PROCESSING = '1' +BATCH_ACTF_BATCH_START = '1' # dump取得状態区分:未処理 DUMP_STATUS_KBN_UNPROCESSED = '0' # dump取得状態区分:dump取得正常終了 diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index 91a060e6..e4a230c3 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -8,9 +8,9 @@ DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] -POST_PROCESS = os.environ["POST_PROCESS"] -MAX_RUN_COUNT = os.environ["MAX_RUN_COUNT"] -RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] +MAX_RUN_COUNT = os.environ['MAX_RUN_COUNT'] +TRANSFER_RESULT_FOLDER = os.environ['TRANSFER_RESULT_FOLDER'] +TRANSFER_RESULT_FILE_NAME = os.environ['TRANSFER_RESULT_FILE_NAME'] DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] From 8adec779b2797972b49641650f5209a0b1b2c42c Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 14:55:32 +0900 Subject: [PATCH 19/30] =?UTF-8?q?.env.example=E3=81=A8=E3=82=BF=E3=82=B9?= =?UTF-8?q?=E3=82=AF=E8=A8=AD=E5=AE=9A=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=82=92=E4=BD=9C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/.env.example | 44 +++++++++---------- .../src/system_var/environment.py | 3 ++ .../dcf_inst_merge_io_task_settings.env | 13 ++++++ 3 files changed, 36 insertions(+), 24 deletions(-) create mode 100644 s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env diff --git a/ecs/jskult-batch/.env.example b/ecs/jskult-batch/.env.example index d0bf48d3..b5ac18f8 100644 --- a/ecs/jskult-batch/.env.example +++ b/ecs/jskult-batch/.env.example @@ -1,28 +1,24 @@ -DB_HOST=************ -DB_PORT=************ -DB_USERNAME=************ -DB_PASSWORD=************ +DB_HOST****************** +DB_PORT=***************** +DB_USERNAME=************* +DB_PASSWORD=************* DB_SCHEMA=src05 +JSK_IO_BUCKET=mbj-newdwh2021-staging-jskult-io +JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult +BATCH_MANAGE_DYNAMODB_TABLE_NAME=mbj-newdwh2021-staging-jskult-batch-run-manage +BATCH_EXECUTION_ID=localtest +MAX_RUN_COUNT=3 LOG_LEVEL=INFO -ULTMARC_DATA_BUCKET=**************** -ULTMARC_DATA_FOLDER=recv -JSKULT_BACKUP_BUCKET=**************** -ULTMARC_BACKUP_FOLDER=ultmarc -VJSK_BACKUP_FOLDER=vjsk -JSKULT_CONFIG_BUCKET=********************** -JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt -VJSK_DATA_SEND_FOLDER=send -VJSK_DATA_RECEIVE_FOLDER=recv -VJSK_DATA_BUCKET=************* -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt -JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert -JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json +PROCESS_NAME=jskult-batch-dcf-inst-merge-io +JSK_DATA_SEND_FOLDER=send +JSK_BACKUP_FOLDER=jsk/send TRANSFER_RESULT_FOLDER=transfer_result TRANSFER_RESULT_FILE_NAME=transfer_result.json -# 連携データ抽出期間 -SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 -# 洗替対象テーブル名 -SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau -# 卸実績洗替で作成するデータの期間(年単位) -SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 +DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv +JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config + +# DB接続リトライ設定 +DB_CONNECTION_MAX_RETRY_ATTEMPT=1 +DB_CONNECTION_RETRY_INTERVAL_INIT=1 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=1 diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index e4a230c3..4e220ba0 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -6,6 +6,8 @@ DB_PORT = int(os.environ['DB_PORT']) DB_USERNAME = os.environ['DB_USERNAME'] DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] MAX_RUN_COUNT = os.environ['MAX_RUN_COUNT'] @@ -15,6 +17,7 @@ DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET'] +JSK_BACKUP_FOLDER = os.environ['JSK_BACKUP_FOLDER'] JSK_DATA_SEND_FOLDER = os.environ['JSK_DATA_SEND_FOLDER'] # 初期値がある環境変数 diff --git a/s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env b/s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env new file mode 100644 index 00000000..8469e66f --- /dev/null +++ b/s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env @@ -0,0 +1,13 @@ +# task environment file. +LOG_LEVEL=INFO +PROCESS_NAME=jskult-batch-dcf-inst-merge-io +JSK_DATA_SEND_FOLDER=send +JSK_BACKUP_FOLDER=jsk/send +TRANSFER_RESULT_FOLDER=transfer_result +TRANSFER_RESULT_FILE_NAME=transfer_result.json +DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv +JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config +DB_CONNECTION_MAX_RETRY_ATTEMPT=1 +DB_CONNECTION_RETRY_INTERVAL_INIT=1 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=1 From 4d2bffcf3bc68ac80a17cb3412522675f184f29d Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 17:08:02 +0900 Subject: [PATCH 20/30] =?UTF-8?q?feat:=20DCF=E5=89=8A=E9=99=A4=E6=96=B0?= =?UTF-8?q?=E8=A6=8F=20=E9=96=8B=E5=A7=8B=E3=83=AD=E3=82=B0=E3=82=92?= =?UTF-8?q?=E5=87=BA=E3=81=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge_io.py | 1 + stepfunctions/TOOLS/convert_config.yaml | 34 +++++ ...-jskult-batch-dcf-inst-merge-io-state.json | 122 ++++++++++++++++++ 3 files changed, 157 insertions(+) create mode 100644 stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index ad311d42..82c7cfbe 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -22,6 +22,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): super().__init__() def execute(self): + logger.info("DCF削除新規マスタ作成処理を開始します。") jskult_hdke_tbl_manager = JskultHdkeTblManager() jskult_batch_run_manager = JskultBatchRunManager( environment.BATCH_EXECUTION_ID) diff --git a/stepfunctions/TOOLS/convert_config.yaml b/stepfunctions/TOOLS/convert_config.yaml index 560a6594..98700734 100644 --- a/stepfunctions/TOOLS/convert_config.yaml +++ b/stepfunctions/TOOLS/convert_config.yaml @@ -32,6 +32,8 @@ resource: - &STG_SG_JSKULT_TRANSFER_RECEIVE_FILE "sg-08d43e8e118178d39" # セキュリティグループ(ecs-jskult-batch-ultmarc-io) - &STG_SG_JSKULT_ULTMARC_IO "sg-014caf29e738d106a" + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + - &STG_SG_DCF_INST_MERGE_IO "sg-0b5d1639a83d28f66" # 本番環境 product: # サブネット(PrivateSubnet1) @@ -56,6 +58,9 @@ resource: # セキュリティグループ(ecs-jskult-batch-ultmarc-io) # TODO: 本番リリース時にIDを正式版にする - &PRD_SG_JSKULT_ULTMARC_IO "sg-xxxxxxxxxxxxx" + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + # TODO: 本番リリース時にIDを正式版にする + - &PRD_SG_DCF_INST_MERGE_IO "sg-xxxxxxxxxxxxx" config: # CRMデータ取得 r-crm-datafetch-state: @@ -281,3 +286,32 @@ config: SG_ECS_ALL: *PRD_SG_ECS_ALL # セキュリティグループ(ecs-jskult-batch-ultmarc-io) SG_JSKULT_ULTMARC_IO: *PRD_SG_JSKULT_ULTMARC_IO + r-jskult-batch-dcf-inst-merge-io-state: + # ステージング環境 + staging: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *STG_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *STG_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *STG_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_DCF_INST_MERGE_IO: *STG_SG_DCF_INST_MERGE_IO + # 本番環境 + product: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *PRD_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_DCF_INST_MERGE_IO: *PRD_SG_DCF_INST_MERGE_IO diff --git a/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json b/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json new file mode 100644 index 00000000..4497f85c --- /dev/null +++ b/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json @@ -0,0 +1,122 @@ +{ + "Comment": "実消化&アルトマーク DCF施設削除新規マスタ作成ステートマシン", + "StartAt": "params", + "States": { + "params": { + "Comment": "パラメータ設定", + "Type": "Pass", + "Parameters": { + "ecs": { + "LaunchType": "FARGATE", + "Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-dcf-inst-merge-io-ecs", + "TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-dcf-inst-merge-io", + "NetworkConfiguration": { + "AwsvpcConfiguration": { + "Subnets": [ + "#{SUBNET_PRI_1A}", + "#{SUBNET_PRI_1D}" + ], + "SecurityGroups": [ + "#{SG_ECS_ALL}", + "#{SG_DCF_INST_MERGE_IO}" + ], + "AssignPublicIp": "DISABLED" + } + }, + "Overrides": { + "ContainerOverrides": [ + { + "Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-dcf-inst-merge-io", + "Environment": [ + { + "Name": " BATCH_EXECUTION_ID", + "Value.$": "$$.Execution.Id" + }, + { + "Name": "MAX_RUN_COUNT", + "Value.$": "$.maxRunCount" + } + ] + } + ] + } + } + }, + "ResultPath": "$.params", + "Next": "exec-ecs-task" + }, + "exec-ecs-task": { + "Type": "Task", + "Resource": "arn:aws:states:::ecs:runTask.sync", + "Parameters": { + "LaunchType.$": "$.params.ecs.LaunchType", + "Cluster.$": "$.params.ecs.Cluster", + "TaskDefinition.$": "$.params.ecs.TaskDefinition", + "NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration", + "Overrides.$": "$.params.ecs.Overrides" + }, + "ResultPath": "$.result", + "Retry": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "BackoffRate": 2, + "IntervalSeconds": 3, + "MaxAttempts": 3 + } + ], + "Catch": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "Next": "ErrorEnd", + "ResultPath": "$.result" + } + ], + "Next": "scan-jskult-batch-run-manage", + "Comment": "ECSタスク起動" + }, + "scan-jskult-batch-run-manage": { + "Type": "Task", + "Resource": "arn:aws:states:::dynamodb:getItem", + "Parameters": { + "TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage", + "Key": { + "execution_id": { + "S.$": "$$.Execution.Id" + } + } + }, + "Next": "Choice", + "ResultPath": "$.scan" + }, + "Choice": { + "Type": "Choice", + "Choices": [ + { + "Variable": "$.scan.Item.batch_run_status.S", + "StringEquals": "retry", + "Next": "wait-for-retry" + } + ], + "Default": "NormalEnd" + }, + "wait-for-retry": { + "Type": "Wait", + "SecondsPath": "$.retryIntervalSecond", + "Next": "exec-ecs-task" + }, + "NormalEnd": { + "Comment": "正常終了", + "Type": "Succeed" + }, + "ErrorEnd": { + "Comment": "異常終了", + "Type": "Fail", + "Error": "StatesError", + "Cause": "StepFunctions ErrorEnd" + } + } +} \ No newline at end of file From a0ffea108644f3c33a60c636861b7ec0bbb4037f Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 17:14:35 +0900 Subject: [PATCH 21/30] =?UTF-8?q?feat:=20DCF=E5=89=8A=E9=99=A4=E6=96=B0?= =?UTF-8?q?=E8=A6=8F=E3=83=9E=E3=82=B9=E3=82=BF=E4=BD=9C=E6=88=90=E3=81=AE?= =?UTF-8?q?=E3=82=B9=E3=83=86=E3=83=BC=E3=83=88=E3=83=9E=E3=82=B7=E3=83=B3?= =?UTF-8?q?=E5=AE=9A=E7=BE=A9=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../r-jskult-batch-dcf-inst-merge-io-state.json | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json b/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json index 4497f85c..b4d3e751 100644 --- a/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json +++ b/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json @@ -95,13 +95,20 @@ "Choice": { "Type": "Choice", "Choices": [ + { + "Variable": "$.scan.Item", + "IsPresent": false, + "Next": "NormalEnd", + "Comment": "バッチ実行管理テーブルにデータが存在しない場合" + }, { "Variable": "$.scan.Item.batch_run_status.S", "StringEquals": "retry", - "Next": "wait-for-retry" + "Next": "wait-for-retry", + "Comment": "バッチ実行管理テーブルのスターテスがリトライの場合" } ], - "Default": "NormalEnd" + "Default": "ErrorEnd" }, "wait-for-retry": { "Type": "Wait", From 75072eb024b763107a511c46cb480eb331a3b9aa Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 19:00:14 +0900 Subject: [PATCH 22/30] =?UTF-8?q?feat:=20DCF=E5=89=8A=E9=99=A4=E6=96=B0?= =?UTF-8?q?=E8=A6=8F=E3=83=9E=E3=82=B9=E3=82=BF=E4=BD=9C=E6=88=90=E3=81=AE?= =?UTF-8?q?=E3=82=B9=E3=83=86=E3=83=BC=E3=83=88=E3=83=9E=E3=82=B7=E3=83=B3?= =?UTF-8?q?=E5=AE=9A=E7=BE=A9=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../r-jskult-batch-dcf-inst-merge-io-state.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json b/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json index b4d3e751..73816f70 100644 --- a/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json +++ b/stepfunctions/r-jskult-batch-dcf-inst-merge-io-state/r-jskult-batch-dcf-inst-merge-io-state.json @@ -29,7 +29,7 @@ "Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-dcf-inst-merge-io", "Environment": [ { - "Name": " BATCH_EXECUTION_ID", + "Name": "BATCH_EXECUTION_ID", "Value.$": "$$.Execution.Id" }, { @@ -98,7 +98,7 @@ { "Variable": "$.scan.Item", "IsPresent": false, - "Next": "NormalEnd", + "Next": "ErrorEnd", "Comment": "バッチ実行管理テーブルにデータが存在しない場合" }, { @@ -108,7 +108,7 @@ "Comment": "バッチ実行管理テーブルのスターテスがリトライの場合" } ], - "Default": "ErrorEnd" + "Default": "NormalEnd" }, "wait-for-retry": { "Type": "Wait", From e5222796490f1c57360061dda1088eddc56d31f6 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 19:24:06 +0900 Subject: [PATCH 23/30] =?UTF-8?q?feat:=20=E3=83=A1=E3=83=AB=E3=82=AF?= =?UTF-8?q?=E6=96=BD=E8=A8=AD=E3=83=9E=E3=82=B9=E3=82=BF=E4=BD=9C=E6=88=90?= =?UTF-8?q?=E3=82=B9=E3=83=86=E3=83=BC=E3=83=88=E3=83=9E=E3=82=B7=E3=83=B3?= =?UTF-8?q?=E3=82=92=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- stepfunctions/TOOLS/convert_config.yaml | 34 +++++ .../r-jskult-batch-mst-inst-all-state.json | 129 ++++++++++++++++++ 2 files changed, 163 insertions(+) create mode 100644 stepfunctions/r-jskult-batch-mst-inst-all-state/r-jskult-batch-mst-inst-all-state.json diff --git a/stepfunctions/TOOLS/convert_config.yaml b/stepfunctions/TOOLS/convert_config.yaml index 98700734..9c893fa5 100644 --- a/stepfunctions/TOOLS/convert_config.yaml +++ b/stepfunctions/TOOLS/convert_config.yaml @@ -34,6 +34,8 @@ resource: - &STG_SG_JSKULT_ULTMARC_IO "sg-014caf29e738d106a" # セキュリティグループ(ecs-jskult-batch-ultmarc-io) - &STG_SG_DCF_INST_MERGE_IO "sg-0b5d1639a83d28f66" + # セキュリティグループ(ecs-jskult-batch-mst-inst-all) + - &STG_SG_MST_INST_ALL "sg-0483fd75c76fa5808" # 本番環境 product: # サブネット(PrivateSubnet1) @@ -61,6 +63,9 @@ resource: # セキュリティグループ(ecs-jskult-batch-ultmarc-io) # TODO: 本番リリース時にIDを正式版にする - &PRD_SG_DCF_INST_MERGE_IO "sg-xxxxxxxxxxxxx" + # セキュリティグループ(ecs-jskult-batch-mst-inst-all) + # TODO: 本番リリース時にIDを正式版にする + - &PRD_SG_MST_INST_ALL "sg-xxxxxxxxxxxxx" config: # CRMデータ取得 r-crm-datafetch-state: @@ -315,3 +320,32 @@ config: SG_ECS_ALL: *PRD_SG_ECS_ALL # セキュリティグループ(ecs-jskult-batch-ultmarc-io) SG_DCF_INST_MERGE_IO: *PRD_SG_DCF_INST_MERGE_IO +r-jskult-batch-mst-inst-all-state: + # ステージング環境 + staging: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *STG_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *STG_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *STG_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_MST_INST_ALL: *STG_SG_MST_INST_ALL + # 本番環境 + product: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *PRD_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_MST_INST_ALL: *PRD_SG_MST_INST_ALL \ No newline at end of file diff --git a/stepfunctions/r-jskult-batch-mst-inst-all-state/r-jskult-batch-mst-inst-all-state.json b/stepfunctions/r-jskult-batch-mst-inst-all-state/r-jskult-batch-mst-inst-all-state.json new file mode 100644 index 00000000..54086560 --- /dev/null +++ b/stepfunctions/r-jskult-batch-mst-inst-all-state/r-jskult-batch-mst-inst-all-state.json @@ -0,0 +1,129 @@ +{ + "Comment": "実消化&アルトマーク メルク施設マスタステートマシン", + "StartAt": "params", + "States": { + "params": { + "Comment": "パラメータ設定", + "Type": "Pass", + "Parameters": { + "ecs": { + "LaunchType": "FARGATE", + "Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-mst-inst-all-ecs", + "TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-mst-inst-all", + "NetworkConfiguration": { + "AwsvpcConfiguration": { + "Subnets": [ + "#{SUBNET_PRI_1A}", + "#{SUBNET_PRI_1D}" + ], + "SecurityGroups": [ + "#{SG_ECS_ALL}", + "#{SG_MST_INST_ALL}" + ], + "AssignPublicIp": "DISABLED" + } + }, + "Overrides": { + "ContainerOverrides": [ + { + "Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-mst-inst-all", + "Environment": [ + { + "Name": "BATCH_EXECUTION_ID", + "Value.$": "$$.Execution.Id" + }, + { + "Name": "MAX_RUN_COUNT", + "Value.$": "$.maxRunCount" + } + ] + } + ] + } + } + }, + "ResultPath": "$.params", + "Next": "exec-ecs-task" + }, + "exec-ecs-task": { + "Type": "Task", + "Resource": "arn:aws:states:::ecs:runTask.sync", + "Parameters": { + "LaunchType.$": "$.params.ecs.LaunchType", + "Cluster.$": "$.params.ecs.Cluster", + "TaskDefinition.$": "$.params.ecs.TaskDefinition", + "NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration", + "Overrides.$": "$.params.ecs.Overrides" + }, + "ResultPath": "$.result", + "Retry": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "BackoffRate": 2, + "IntervalSeconds": 3, + "MaxAttempts": 3 + } + ], + "Catch": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "Next": "ErrorEnd", + "ResultPath": "$.result" + } + ], + "Next": "scan-jskult-batch-run-manage", + "Comment": "ECSタスク起動" + }, + "scan-jskult-batch-run-manage": { + "Type": "Task", + "Resource": "arn:aws:states:::dynamodb:getItem", + "Parameters": { + "TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage", + "Key": { + "execution_id": { + "S.$": "$$.Execution.Id" + } + } + }, + "Next": "Choice", + "ResultPath": "$.scan" + }, + "Choice": { + "Type": "Choice", + "Choices": [ + { + "Variable": "$.scan.Item", + "IsPresent": false, + "Next": "ErrorEnd", + "Comment": "バッチ実行管理テーブルにデータが存在しない場合" + }, + { + "Variable": "$.scan.Item.batch_run_status.S", + "StringEquals": "retry", + "Next": "wait-for-retry", + "Comment": "バッチ実行管理テーブルのスターテスがリトライの場合" + } + ], + "Default": "NormalEnd" + }, + "wait-for-retry": { + "Type": "Wait", + "SecondsPath": "$.retryIntervalSecond", + "Next": "exec-ecs-task" + }, + "NormalEnd": { + "Comment": "正常終了", + "Type": "Succeed" + }, + "ErrorEnd": { + "Comment": "異常終了", + "Type": "Fail", + "Error": "StatesError", + "Cause": "StepFunctions ErrorEnd" + } + } +} \ No newline at end of file From eda21bdc9424d3fe1d2562240032fd12818b4321 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 19:24:27 +0900 Subject: [PATCH 24/30] =?UTF-8?q?feat:=20=E5=90=84=E3=82=BF=E3=82=B9?= =?UTF-8?q?=E3=82=AF=E8=A8=AD=E5=AE=9A=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=82=92=E4=BD=9C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../task_settings/archive_jsk_data_task_settings.env | 11 +++++++++++ .../task_settings/mst_inst_all_task_settings.env | 9 +++++++++ .../trn_result_data_bio_lot_task_settings.env | 9 +++++++++ .../update_business_day_task_settings.env | 11 +++++++++++ 4 files changed, 40 insertions(+) create mode 100644 s3/config/jskult/task_settings/archive_jsk_data_task_settings.env create mode 100644 s3/config/jskult/task_settings/mst_inst_all_task_settings.env create mode 100644 s3/config/jskult/task_settings/trn_result_data_bio_lot_task_settings.env create mode 100644 s3/config/jskult/task_settings/update_business_day_task_settings.env diff --git a/s3/config/jskult/task_settings/archive_jsk_data_task_settings.env b/s3/config/jskult/task_settings/archive_jsk_data_task_settings.env new file mode 100644 index 00000000..1a8b2051 --- /dev/null +++ b/s3/config/jskult/task_settings/archive_jsk_data_task_settings.env @@ -0,0 +1,11 @@ +# task environment file. +LOG_LEVEL=INFO +ARCHIVE_TRN_RESULT_FOLDER=/jsk/trn_result_data +ARCHIVE_TRN_RESULT_BIO_FOLDER=/jsk/trn_result_data_bio +ARCHIVE_TRN_RESULT_BIO_LOT_FOLDER=/jsk/trn_result_data_bio_lot +ARCHIVE_TRN_RESULT_INVENTORY_FOLDER=/jsk/trn_recive_inventry +DB_CONNECTION_MAX_RETRY_ATTEMPT=4 +DB_CONNECTION_RETRY_INTERVAL_INIT=5 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50 + diff --git a/s3/config/jskult/task_settings/mst_inst_all_task_settings.env b/s3/config/jskult/task_settings/mst_inst_all_task_settings.env new file mode 100644 index 00000000..85e70ad1 --- /dev/null +++ b/s3/config/jskult/task_settings/mst_inst_all_task_settings.env @@ -0,0 +1,9 @@ +# task environment file. +LOG_LEVEL=INFO +ENTRYPOINT_MODULE_NAME=jskult-batch-mst-inst-all +TRANSFER_RESULT_FOLDER=transfer_result +TRANSFER_RESULT_FILE_NAME=transfer_result.json +DB_CONNECTION_MAX_RETRY_ATTEMPT=4 +DB_CONNECTION_RETRY_INTERVAL_INIT=5 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50 diff --git a/s3/config/jskult/task_settings/trn_result_data_bio_lot_task_settings.env b/s3/config/jskult/task_settings/trn_result_data_bio_lot_task_settings.env new file mode 100644 index 00000000..f5adb7ab --- /dev/null +++ b/s3/config/jskult/task_settings/trn_result_data_bio_lot_task_settings.env @@ -0,0 +1,9 @@ +# task environment file. +LOG_LEVEL=INFO +PROCESS_NAME=jskult-batch-trn-result-data-bio-lot +TRANSFER_RESULT_FOLDER=transfer_result +TRANSFER_RESULT_FILE_NAME=transfer_result.json +DB_CONNECTION_MAX_RETRY_ATTEMPT=4 +DB_CONNECTION_RETRY_INTERVAL_INIT=5 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50 diff --git a/s3/config/jskult/task_settings/update_business_day_task_settings.env b/s3/config/jskult/task_settings/update_business_day_task_settings.env new file mode 100644 index 00000000..b5217d60 --- /dev/null +++ b/s3/config/jskult/task_settings/update_business_day_task_settings.env @@ -0,0 +1,11 @@ +# task environment file. +LOG_LEVEL=INFO +PROCESS_NAME=jskult-batch-update-business-day +JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar +JSKULT_CONFIG_CALENDAR_RUN_ARCHIVE_DAY_FILE_NAME=jsk_archive_run_day.txt +JSKULT_CONFIG_EXPECTED_DATA_LIST_FOLDER=jskult/expected_data_list +JSKULT_CONFIG_EXPECTED_DATA_LIST_FILE_NAME=jsk_expected_data_list.json +DB_CONNECTION_MAX_RETRY_ATTEMPT=4 +DB_CONNECTION_RETRY_INTERVAL_INIT=5 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50 From 9c4848aab77edbfd5c6b05c53c79fbc9b343e9af Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 19:24:47 +0900 Subject: [PATCH 25/30] =?UTF-8?q?feat:=20config=E3=81=AE=E8=AA=A4=E3=82=8A?= =?UTF-8?q?=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- stepfunctions/TOOLS/convert_config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stepfunctions/TOOLS/convert_config.yaml b/stepfunctions/TOOLS/convert_config.yaml index 9c893fa5..6533829e 100644 --- a/stepfunctions/TOOLS/convert_config.yaml +++ b/stepfunctions/TOOLS/convert_config.yaml @@ -320,7 +320,7 @@ config: SG_ECS_ALL: *PRD_SG_ECS_ALL # セキュリティグループ(ecs-jskult-batch-ultmarc-io) SG_DCF_INST_MERGE_IO: *PRD_SG_DCF_INST_MERGE_IO -r-jskult-batch-mst-inst-all-state: + r-jskult-batch-mst-inst-all-state: # ステージング環境 staging: # AWSアカウントID From 25a37e45526b2771ef3b10d8bff0ae474e3dce8e Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 19:40:03 +0900 Subject: [PATCH 26/30] =?UTF-8?q?feat:=20=E7=94=9F=E7=89=A9=E7=94=B1?= =?UTF-8?q?=E6=9D=A5=E3=83=AD=E3=83=83=E3=83=88=E5=88=86=E8=A7=A3=E3=81=AE?= =?UTF-8?q?=E3=82=B9=E3=83=86=E3=83=BC=E3=83=88=E3=83=9E=E3=82=B7=E3=83=B3?= =?UTF-8?q?=E3=82=92=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- stepfunctions/TOOLS/convert_config.yaml | 36 ++++- ...t-batch-trn-result-data-bio-lot-state.json | 129 ++++++++++++++++++ 2 files changed, 164 insertions(+), 1 deletion(-) create mode 100644 stepfunctions/r-jskult-batch-trn-result-data-bio-lot-state/r-jskult-batch-trn-result-data-bio-lot-state.json diff --git a/stepfunctions/TOOLS/convert_config.yaml b/stepfunctions/TOOLS/convert_config.yaml index 6533829e..d22087bd 100644 --- a/stepfunctions/TOOLS/convert_config.yaml +++ b/stepfunctions/TOOLS/convert_config.yaml @@ -36,6 +36,8 @@ resource: - &STG_SG_DCF_INST_MERGE_IO "sg-0b5d1639a83d28f66" # セキュリティグループ(ecs-jskult-batch-mst-inst-all) - &STG_SG_MST_INST_ALL "sg-0483fd75c76fa5808" + # セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot) + - &STG_SG_TRN_RESULT_DATA_BIO_LOT "sg-07ca545e440dd83b7" # 本番環境 product: # サブネット(PrivateSubnet1) @@ -66,6 +68,9 @@ resource: # セキュリティグループ(ecs-jskult-batch-mst-inst-all) # TODO: 本番リリース時にIDを正式版にする - &PRD_SG_MST_INST_ALL "sg-xxxxxxxxxxxxx" + # セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot) + # TODO: 本番リリース時にIDを正式版にする + - &PRD_SG_TRN_RESULT_DATA_BIO_LOT "sg-xxxxxxxxxxxxx" config: # CRMデータ取得 r-crm-datafetch-state: @@ -348,4 +353,33 @@ config: # セキュリティグループ(ecs-all) SG_ECS_ALL: *PRD_SG_ECS_ALL # セキュリティグループ(ecs-jskult-batch-ultmarc-io) - SG_MST_INST_ALL: *PRD_SG_MST_INST_ALL \ No newline at end of file + SG_MST_INST_ALL: *PRD_SG_MST_INST_ALL + r-jskult-batch-trn-result-data-bio-lot-state: + # ステージング環境 + staging: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *STG_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *STG_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *STG_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_TRN_RESULT_DATA_BIO_LOT: *STG_SG_TRN_RESULT_DATA_BIO_LOT + # 本番環境 + product: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *PRD_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_TRN_RESULT_DATA_BIO_LOT: *PRD_SG_TRN_RESULT_DATA_BIO_LOT \ No newline at end of file diff --git a/stepfunctions/r-jskult-batch-trn-result-data-bio-lot-state/r-jskult-batch-trn-result-data-bio-lot-state.json b/stepfunctions/r-jskult-batch-trn-result-data-bio-lot-state/r-jskult-batch-trn-result-data-bio-lot-state.json new file mode 100644 index 00000000..4720e9e4 --- /dev/null +++ b/stepfunctions/r-jskult-batch-trn-result-data-bio-lot-state/r-jskult-batch-trn-result-data-bio-lot-state.json @@ -0,0 +1,129 @@ +{ + "Comment": "実消化&アルトマーク 生物由来ロット分解ステートマシン", + "StartAt": "params", + "States": { + "params": { + "Comment": "パラメータ設定", + "Type": "Pass", + "Parameters": { + "ecs": { + "LaunchType": "FARGATE", + "Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-trn-result-data-bio-lot-ecs", + "TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-trn-result-data-bio-lot", + "NetworkConfiguration": { + "AwsvpcConfiguration": { + "Subnets": [ + "#{SUBNET_PRI_1A}", + "#{SUBNET_PRI_1D}" + ], + "SecurityGroups": [ + "#{SG_ECS_ALL}", + "#{SG_TRN_RESULT_DATA_BIO_LOT}" + ], + "AssignPublicIp": "DISABLED" + } + }, + "Overrides": { + "ContainerOverrides": [ + { + "Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-trn-result-data-bio-lot", + "Environment": [ + { + "Name": "BATCH_EXECUTION_ID", + "Value.$": "$$.Execution.Id" + }, + { + "Name": "MAX_RUN_COUNT", + "Value.$": "$.maxRunCount" + } + ] + } + ] + } + } + }, + "ResultPath": "$.params", + "Next": "exec-ecs-task" + }, + "exec-ecs-task": { + "Type": "Task", + "Resource": "arn:aws:states:::ecs:runTask.sync", + "Parameters": { + "LaunchType.$": "$.params.ecs.LaunchType", + "Cluster.$": "$.params.ecs.Cluster", + "TaskDefinition.$": "$.params.ecs.TaskDefinition", + "NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration", + "Overrides.$": "$.params.ecs.Overrides" + }, + "ResultPath": "$.result", + "Retry": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "BackoffRate": 2, + "IntervalSeconds": 3, + "MaxAttempts": 3 + } + ], + "Catch": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "Next": "ErrorEnd", + "ResultPath": "$.result" + } + ], + "Next": "scan-jskult-batch-run-manage", + "Comment": "ECSタスク起動" + }, + "scan-jskult-batch-run-manage": { + "Type": "Task", + "Resource": "arn:aws:states:::dynamodb:getItem", + "Parameters": { + "TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage", + "Key": { + "execution_id": { + "S.$": "$$.Execution.Id" + } + } + }, + "Next": "Choice", + "ResultPath": "$.scan" + }, + "Choice": { + "Type": "Choice", + "Choices": [ + { + "Variable": "$.scan.Item", + "IsPresent": false, + "Next": "ErrorEnd", + "Comment": "バッチ実行管理テーブルにデータが存在しない場合" + }, + { + "Variable": "$.scan.Item.batch_run_status.S", + "StringEquals": "retry", + "Next": "wait-for-retry", + "Comment": "バッチ実行管理テーブルのスターテスがリトライの場合" + } + ], + "Default": "NormalEnd" + }, + "wait-for-retry": { + "Type": "Wait", + "SecondsPath": "$.retryIntervalSecond", + "Next": "exec-ecs-task" + }, + "NormalEnd": { + "Comment": "正常終了", + "Type": "Succeed" + }, + "ErrorEnd": { + "Comment": "異常終了", + "Type": "Fail", + "Error": "StatesError", + "Cause": "StepFunctions ErrorEnd" + } + } +} \ No newline at end of file From d45f39850ec62ed66abdb571bc2d92e8344353c3 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 19:51:21 +0900 Subject: [PATCH 27/30] =?UTF-8?q?feat:=20=E6=97=A5=E4=BB=98=E3=83=86?= =?UTF-8?q?=E3=83=BC=E3=83=96=E3=83=AB=E6=9B=B4=E6=96=B0=E3=81=AE=E3=82=B9?= =?UTF-8?q?=E3=83=86=E3=83=BC=E3=83=88=E3=83=9E=E3=82=B7=E3=83=B3=E3=82=92?= =?UTF-8?q?=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- stepfunctions/TOOLS/convert_config.yaml | 35 ++++- ...skult-batch-update-business-day-state.json | 129 ++++++++++++++++++ 2 files changed, 163 insertions(+), 1 deletion(-) create mode 100644 stepfunctions/r-jskult-batch-update-business-day-state/r-jskult-batch-update-business-day-state.json diff --git a/stepfunctions/TOOLS/convert_config.yaml b/stepfunctions/TOOLS/convert_config.yaml index d22087bd..27659dcf 100644 --- a/stepfunctions/TOOLS/convert_config.yaml +++ b/stepfunctions/TOOLS/convert_config.yaml @@ -38,6 +38,8 @@ resource: - &STG_SG_MST_INST_ALL "sg-0483fd75c76fa5808" # セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot) - &STG_SG_TRN_RESULT_DATA_BIO_LOT "sg-07ca545e440dd83b7" + # TODO: 本番リリース時にIDを正式版にする + - &STG_SG_UPDATE_BIS_DAY "sg-0b58046abfa017e0e" # 本番環境 product: # サブネット(PrivateSubnet1) @@ -71,6 +73,8 @@ resource: # セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot) # TODO: 本番リリース時にIDを正式版にする - &PRD_SG_TRN_RESULT_DATA_BIO_LOT "sg-xxxxxxxxxxxxx" + # TODO: 本番リリース時にIDを正式版にする + - &PRD_SG_UPDATE_BIS_DAY "sg-xxxxxxxxxxxxx" config: # CRMデータ取得 r-crm-datafetch-state: @@ -382,4 +386,33 @@ config: # セキュリティグループ(ecs-all) SG_ECS_ALL: *PRD_SG_ECS_ALL # セキュリティグループ(ecs-jskult-batch-ultmarc-io) - SG_TRN_RESULT_DATA_BIO_LOT: *PRD_SG_TRN_RESULT_DATA_BIO_LOT \ No newline at end of file + SG_TRN_RESULT_DATA_BIO_LOT: *PRD_SG_TRN_RESULT_DATA_BIO_LOT + r-jskult-batch-update-business-day-state: + # ステージング環境 + staging: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *STG_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *STG_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *STG_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_UPDATE_BIS_DAY: *STG_SG_UPDATE_BIS_DAY + # 本番環境 + product: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *PRD_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_UPDATE_BIS_DAY: *PRD_SG_UPDATE_BIS_DAY \ No newline at end of file diff --git a/stepfunctions/r-jskult-batch-update-business-day-state/r-jskult-batch-update-business-day-state.json b/stepfunctions/r-jskult-batch-update-business-day-state/r-jskult-batch-update-business-day-state.json new file mode 100644 index 00000000..b6cd2a25 --- /dev/null +++ b/stepfunctions/r-jskult-batch-update-business-day-state/r-jskult-batch-update-business-day-state.json @@ -0,0 +1,129 @@ +{ + "Comment": "実消化&アルトマーク 日付テーブル更新ステートマシン", + "StartAt": "params", + "States": { + "params": { + "Comment": "パラメータ設定", + "Type": "Pass", + "Parameters": { + "ecs": { + "LaunchType": "FARGATE", + "Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-update-business-day-ecs", + "TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-update-business-day", + "NetworkConfiguration": { + "AwsvpcConfiguration": { + "Subnets": [ + "#{SUBNET_PRI_1A}", + "#{SUBNET_PRI_1D}" + ], + "SecurityGroups": [ + "#{SG_ECS_ALL}", + "#{SG_UPDATE_BIS_DAY}" + ], + "AssignPublicIp": "DISABLED" + } + }, + "Overrides": { + "ContainerOverrides": [ + { + "Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-update-business-day", + "Environment": [ + { + "Name": "BATCH_EXECUTION_ID", + "Value.$": "$$.Execution.Id" + }, + { + "Name": "MAX_RUN_COUNT", + "Value.$": "$.maxRunCount" + } + ] + } + ] + } + } + }, + "ResultPath": "$.params", + "Next": "exec-ecs-task" + }, + "exec-ecs-task": { + "Type": "Task", + "Resource": "arn:aws:states:::ecs:runTask.sync", + "Parameters": { + "LaunchType.$": "$.params.ecs.LaunchType", + "Cluster.$": "$.params.ecs.Cluster", + "TaskDefinition.$": "$.params.ecs.TaskDefinition", + "NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration", + "Overrides.$": "$.params.ecs.Overrides" + }, + "ResultPath": "$.result", + "Retry": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "BackoffRate": 2, + "IntervalSeconds": 3, + "MaxAttempts": 3 + } + ], + "Catch": [ + { + "ErrorEquals": [ + "States.ALL" + ], + "Next": "ErrorEnd", + "ResultPath": "$.result" + } + ], + "Next": "scan-jskult-batch-run-manage", + "Comment": "ECSタスク起動" + }, + "scan-jskult-batch-run-manage": { + "Type": "Task", + "Resource": "arn:aws:states:::dynamodb:getItem", + "Parameters": { + "TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage", + "Key": { + "execution_id": { + "S.$": "$$.Execution.Id" + } + } + }, + "Next": "Choice", + "ResultPath": "$.scan" + }, + "Choice": { + "Type": "Choice", + "Choices": [ + { + "Variable": "$.scan.Item", + "IsPresent": false, + "Next": "ErrorEnd", + "Comment": "バッチ実行管理テーブルにデータが存在しない場合" + }, + { + "Variable": "$.scan.Item.batch_run_status.S", + "StringEquals": "retry", + "Next": "wait-for-retry", + "Comment": "バッチ実行管理テーブルのスターテスがリトライの場合" + } + ], + "Default": "NormalEnd" + }, + "wait-for-retry": { + "Type": "Wait", + "SecondsPath": "$.retryIntervalSecond", + "Next": "exec-ecs-task" + }, + "NormalEnd": { + "Comment": "正常終了", + "Type": "Succeed" + }, + "ErrorEnd": { + "Comment": "異常終了", + "Type": "Fail", + "Error": "StatesError", + "Cause": "StepFunctions ErrorEnd" + } + } +} \ No newline at end of file From 288c73b50f20193c772eaefecc150d1cf7350158 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 20:02:57 +0900 Subject: [PATCH 28/30] =?UTF-8?q?feat:=20=E3=82=A2=E3=83=BC=E3=82=AB?= =?UTF-8?q?=E3=82=A4=E3=83=96=E5=8F=96=E5=BE=97=E3=81=AE=E3=82=B9=E3=83=86?= =?UTF-8?q?=E3=83=BC=E3=83=88=E3=83=9E=E3=82=B7=E3=83=B3=E3=82=92=E8=BF=BD?= =?UTF-8?q?=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- stepfunctions/TOOLS/convert_config.yaml | 40 ++++++++++- ...r-jskult-batch-archive-jsk-data-state.json | 72 +++++++++++++++++++ 2 files changed, 110 insertions(+), 2 deletions(-) create mode 100644 stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json diff --git a/stepfunctions/TOOLS/convert_config.yaml b/stepfunctions/TOOLS/convert_config.yaml index 27659dcf..d68153e1 100644 --- a/stepfunctions/TOOLS/convert_config.yaml +++ b/stepfunctions/TOOLS/convert_config.yaml @@ -38,8 +38,11 @@ resource: - &STG_SG_MST_INST_ALL "sg-0483fd75c76fa5808" # セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot) - &STG_SG_TRN_RESULT_DATA_BIO_LOT "sg-07ca545e440dd83b7" - # TODO: 本番リリース時にIDを正式版にする + # セキュリティグループ(ecs-jskult-batch-update-business-day) - &STG_SG_UPDATE_BIS_DAY "sg-0b58046abfa017e0e" + # セキュリティグループ(ecs-jskult-batch-archive-jsk-data) + - &STG_SG_JSK_ARCHIVE "sg-0bbdd7d4ebb5ba222" + # 本番環境 product: # サブネット(PrivateSubnet1) @@ -73,8 +76,12 @@ resource: # セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot) # TODO: 本番リリース時にIDを正式版にする - &PRD_SG_TRN_RESULT_DATA_BIO_LOT "sg-xxxxxxxxxxxxx" + # セキュリティグループ(ecs-jskult-batch-update-business-day) # TODO: 本番リリース時にIDを正式版にする - &PRD_SG_UPDATE_BIS_DAY "sg-xxxxxxxxxxxxx" + # セキュリティグループ(ecs-jskult-batch-archive-jsk-data) + # TODO: 本番リリース時にIDを正式版にする + - &PRD_SG_JSK_ARCHIVE "sg-xxxxxxxxxxxxx" config: # CRMデータ取得 r-crm-datafetch-state: @@ -415,4 +422,33 @@ config: # セキュリティグループ(ecs-all) SG_ECS_ALL: *PRD_SG_ECS_ALL # セキュリティグループ(ecs-jskult-batch-ultmarc-io) - SG_UPDATE_BIS_DAY: *PRD_SG_UPDATE_BIS_DAY \ No newline at end of file + SG_UPDATE_BIS_DAY: *PRD_SG_UPDATE_BIS_DAY + r-jskult-batch-archive-jsk-data-state: + # ステージング環境 + staging: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *STG_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *STG_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *STG_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_JSK_ARCHIVE: *STG_SG_JSK_ARCHIVE + # 本番環境 + product: + # AWSアカウントID + AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID + # 東京リージョン + REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1 + # サブネット(PrivateSubnet1) + SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A + # サブネット(PrivateSubnet2) + SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D + # セキュリティグループ(ecs-all) + SG_ECS_ALL: *PRD_SG_ECS_ALL + # セキュリティグループ(ecs-jskult-batch-ultmarc-io) + SG_JSK_ARCHIVE: *PRD_SG_JSK_ARCHIVE diff --git a/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json b/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json new file mode 100644 index 00000000..3ff8d13b --- /dev/null +++ b/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json @@ -0,0 +1,72 @@ +{ + "Comment": "MeDaCA 実消化&アルトマーク 実消化過去データアーカイブ処理ステートマシン", + "StartAt": "params", + "States": { + "params": { + "Comment": "パラメータ設定", + "Type": "Pass", + "Parameters": { + "sns": { + "TopicArn": "arn:aws:sns:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:nds-notice-#{ENV_NAME}" + }, + "ecs": { + "Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-archive-jsk-data-ecs", + "LaunchType": "FARGATE", + "NetworkConfiguration": { + "AwsvpcConfiguration": { + "Subnets": [ + "#{SUBNET_PRI_1A}", + "#{SUBNET_PRI_1D}" + ], + "SecurityGroups": [ + "#{SG_ECS_ALL}", + "#{SG_JSK_ARCHIVE}" + ], + "AssignPublicIp": "DISABLED" + } + } + } + }, + "ResultPath": "$.params", + "Next": "exec-ecs-task" + }, + "exec-ecs-task": { + "Comment": "ECSタスク起動", + "Type": "Task", + "Resource": "arn:aws:states:::ecs:runTask.sync", + "Parameters": { + "Cluster.$": "$.params.ecs.Cluster", + "LaunchType.$": "$.params.ecs.LaunchType", + "TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-archive-jsk-data", + "NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration" + }, + "Retry": [ + { + "ErrorEquals": ["States.ALL"], + "BackoffRate": 2, + "IntervalSeconds": 5, + "MaxAttempts": 3 + } + ], + "Catch": [ + { + "ErrorEquals": ["States.ALL"], + "ResultPath": "$.result", + "Next": "ErrorEnd" + } + ], + "ResultPath": "$.result", + "Next": "NormalEnd" + }, + "NormalEnd": { + "Comment": "正常終了", + "Type": "Succeed" + }, + "ErrorEnd": { + "Comment": "異常終了", + "Type": "Fail", + "Error": "StatesError", + "Cause": "StepFunctions ErrorEnd" + } + } +} From 1324408a97969a39d6a2374e47e48ffa6115a72d Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 20:04:44 +0900 Subject: [PATCH 29/30] =?UTF-8?q?feat:=20MAX=5FRUN=5FCOUNT=E3=81=AF?= =?UTF-8?q?=E6=96=87=E5=AD=97=E5=88=97=E3=81=A7=E5=8F=97=E3=81=91=E5=8F=96?= =?UTF-8?q?=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/src/system_var/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index 4e220ba0..bd1c9c24 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -10,7 +10,7 @@ DB_SCHEMA = os.environ['DB_SCHEMA'] # AWS JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] -MAX_RUN_COUNT = os.environ['MAX_RUN_COUNT'] +MAX_RUN_COUNT = int(os.environ['MAX_RUN_COUNT']) TRANSFER_RESULT_FOLDER = os.environ['TRANSFER_RESULT_FOLDER'] TRANSFER_RESULT_FILE_NAME = os.environ['TRANSFER_RESULT_FILE_NAME'] DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] From c0a0b2ffdae66237b6e54da691e26da0f736b7fe Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 May 2025 14:38:23 +0900 Subject: [PATCH 30/30] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../r-jskult-batch-archive-jsk-data-state.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json b/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json index 3ff8d13b..44ef2300 100644 --- a/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json +++ b/stepfunctions/r-jskult-batch-archive-jsk-data-state/r-jskult-batch-archive-jsk-data-state.json @@ -1,5 +1,5 @@ { - "Comment": "MeDaCA 実消化&アルトマーク 実消化過去データアーカイブ処理ステートマシン", + "Comment": "実消化_過去データアーカイブ処理ステートマシン", "StartAt": "params", "States": { "params": {