From 9ba4eda8a32afa1e1c275032d56cb22bc3f4544c Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 25 Apr 2023 12:37:08 +0900 Subject: [PATCH 001/103] =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E5=AD=98=E5=9C=A8=E3=83=81=E3=82=A7=E3=83=83=E3=82=AF=E3=81=BE?= =?UTF-8?q?=E3=81=A7=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 3 + ecs/jskult-batch-daily/src/aws/s3.py | 27 ++- .../src/batch/common/batch_context.py | 9 + .../common/calendar_wholestocksaler_file.py | 32 ++++ .../src/batch/vjsk/vjsk_importer.py | 143 +++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 166 ++++++++++++++++++ ecs/jskult-batch-daily/src/jobctrl_daily.py | 2 + .../src/system_var/environment.py | 3 + 8 files changed, 384 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 95aef7fe..d95322fb 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -11,3 +11,6 @@ ULTMARC_BACKUP_FOLDER=ultmarc JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt +JSKULT_DATA_BUCKET=********************** +JSKULT_DATA_FOLDER_RECV=********************** diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2ac3efe6..2aebff4b 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -1,3 +1,4 @@ +import io import os.path as path import tempfile @@ -16,7 +17,8 @@ class S3Client: return [] contents = response['Contents'] # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする - objects = [{'filename': content['Key'], 'size': content['Size']} for content in contents if not content['Key'].endswith('/')] + objects = [{'filename': content['Key'], 'size': content['Size']} + for content in contents if not content['Key'].endswith('/')] return objects def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: @@ -89,6 +91,16 @@ class ConfigBucket(S3Bucket): f.seek(0) return temporary_file_path + def download_wholesaler_stock_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + f.seek(0) + return temporary_file_path + class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET @@ -96,3 +108,16 @@ class JskUltBackupBucket(S3Bucket): class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER + + +class VjskBucket(S3Bucket): + # TODO:V実消化バケットから見たり取ってきたりする実装をやる + _bucket_name = environment.JSKULT_DATA_BUCKET + _recv_folder = environment.JSKULT_DATA_FOLDER_RECV + + def get_file_list(self): + return self._s3_client.list_objects(self._bucket_name, self._recv_folder) + + # def download_data_file(self, data_filename: str): + # temporary_dir = tempfile.mkdtemp() + # temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') diff --git a/ecs/jskult-batch-daily/src/batch/common/batch_context.py b/ecs/jskult-batch-daily/src/batch/common/batch_context.py index 3b3ac157..b493ecca 100644 --- a/ecs/jskult-batch-daily/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-daily/src/batch/common/batch_context.py @@ -3,6 +3,7 @@ class BatchContext: __syor_date: str # 処理日(yyyy/mm/dd形式) __is_not_business_day: bool # 日次バッチ起動日フラグ __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ + __is_import_target_vjsk_stockslipdata: bool # 卸在庫データ取込対象フラグ def __init__(self) -> None: self.__is_not_business_day = False @@ -37,3 +38,11 @@ class BatchContext: @is_ultmarc_imported.setter def is_ultmarc_imported(self, flag: bool): self.__is_ultmarc_imported = flag + + @property + def is_import_target_vjsk_stockslipdata(self): + return self.__is_import_target_vjsk_stockslipdata + + @is_import_target_vjsk_stockslipdata.setter + def is_import_target_vjsk_stockslipdata(self, flag: bool): + self.__is_import_target_vjsk_stockslipdata = flag diff --git a/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py b/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py new file mode 100644 index 00000000..ba687514 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py @@ -0,0 +1,32 @@ +from src.system_var import constants + + +class CalendarWholwSalerStockFile: + """V実消化卸在庫データ連携日ファイル""" + + __calendar_file_lines: list[str] + + def __init__(self, calendar_file_path): + with open(calendar_file_path) as f: + self.__calendar_file_lines: list[str] = f.readlines() + + def compare_date(self, date_str: str) -> bool: + """与えられた日付がV実消化卸在庫データ連携日ファイル内に含まれているかどうか + V実消化卸在庫データ連携日ファイル内の日付はyyyy/mm/ddで書かれている前提 + コメント(#)が含まれている行は無視される + + Args: + date_str (str): yyyy/mm/dd文字列 + + Returns: + bool: 含まれていればTrue + """ + for calendar_date in self.__calendar_file_lines: + # コメント行が含まれている場合はスキップ + if constants.CALENDAR_COMMENT_SYMBOL in calendar_date: + continue + + if date_str in calendar_date: + return True + + return False diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py new file mode 100644 index 00000000..4c2cb0ee --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -0,0 +1,143 @@ +from src.aws.s3 import ConfigBucket, VjskBucket +from src.batch.common.batch_context import BatchContext +from src.batch.common.calendar_wholestocksaler_file import \ + CalendarWholwSalerStockFile +from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger + +# from src.batch.datachange import emp_chg_inst_lau + +logger = get_logger('V実消化データ取込') +batch_context = BatchContext.get_instance() +vjsk_recv_bucket = VjskBucket() +vjsk_mapper = VjskRecvFileMapper() + + +def _check_if_file_exists(src_list: list, key: str) -> bool: + pref = vjsk_mapper.get_file_prefix(key) + suff = vjsk_mapper.get_file_suffix(key) + for idx, elem in enumerate(src_list): + buf = elem.get("filename") + filename = buf[buf.rfind("/") + 1:] + if filename.startswith(pref) and filename.endswith(suff): + return True + return False + + +def _check_received_files(): + """V実消化連携データ存在確認処理""" + logger.debug('V実消化連携データ存在確認処理:開始') + + # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 + received_files = vjsk_recv_bucket.get_file_list() + logger.debug(f'ファイル一覧{received_files}') + + # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) + if batch_context.is_import_target_vjsk_stockslipdata: + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): + raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸販売データ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_SLIP_DATA): + raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸組織変換マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): + raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 施設統合マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): + raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_MST): + raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸ホールディングスマスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_HLD_MST): + raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 施設マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_FCL_MST): + raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 メーカー卸組織展開表 + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): + raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 取引区分マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): + raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 製品マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): + raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 製品価格マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): + raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸得意先情報マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): + raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 MDBコード変換マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): + raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_files}') + + logger.debug('V実消化連携データ存在確認処理:終了') + + return True + + +def _import_file_to_db(): + logger.debug('V実消化取込処理:開始') + + # diff_upsertに変わるやつを呼び出す + # emp_chg_inst_lau.batch_process() みたいに + + logger.debug('V実消化取込処理:終了') + + +def _determine_today_is_stockslipdata_target(): + try: + # 設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する + today = batch_context.syor_date + + holiday_list_file_path = ConfigBucket().download_wholesaler_stock_list() + targetdays = CalendarWholwSalerStockFile(holiday_list_file_path) + ret = targetdays.compare_date(today) + except Exception as e: + logger.error(f'{e}') + raise e + return ret + + +def exec(): + """V実消化データ取込""" + logger.info('Start Jitsusyouka Torikomi PGM.') + + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる + logger.debug('卸在庫データ取込対象日であるかを判定') + batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() + logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') + if batch_context.is_import_target_vjsk_stockslipdata: + logger.info('卸在庫データ取込対象日です') + + # V実消化データファイル受領チェック + logger.debug('V実消化データファイル受領チェック:開始') + try: + _check_received_files() + except BatchOperationException as e: + logger.error('受領したV実消化データファイルに欠落があります') + raise e + logger.debug('V実消化データファイル受領チェック:終了') + + # データベース取込 + logger.debug('V実消化データ取込:開始') + try: + _import_file_to_db() + except Exception as e: + logger.error(f'データベース登録失敗 {e}') + logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py new file mode 100644 index 00000000..6a997c9f --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -0,0 +1,166 @@ +class VjskRecvFileMapper: + CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ + CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ + CONDKEY_WHS_MST = "WHS_MST" # V卸マスタ + CONDKEY_MKR_ORG_HORIZON = "MKR_ORG_HORIZON" # Vメーカー卸組織展開表 + CONDKEY_ORG_CNV_MST = "ORG_CNV_MST" # V卸組織変換マスタ + CONDKEY_TRAN_KBN_MST = "TRAN_KBN_MST" # V取引区分マスタ + CONDKEY_FCL_MST = "FCL_MST" # V施設マスタ + CONDKEY_PHM_PRD_MST = "PHM_PRD_MST" # V製品マスタ + CONDKEY_PHM_PRICE_MST = "PHM_PRICE_MST" # V製品価格マスタ + CONDKEY_VOP_HCO_MERGE = "VOP_HCO_MERGE" # V施設統合マスタ + CONDKEY_WHS_CUSTOMER_MST = "WHS_CUSTOMER_MST" # V卸得意先情報マスタ + CONDKEY_MDB_CONV_MST = "MDB_CONV_MST" # MDBコード変換表 + CONDKEY_STOCK_SLIP_DATA = "STOCK_SLIP_DATA" # 卸在庫データ + CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ + CONDKEY_LOT_NUM_MS = "LOT_NUM_MS" # ロットマスタデータ + + _KEY_FILE_PREFIX = "file_prefix" + _KEY_FILE_SUFFIX = "file_suffix" + _KEY_ORG_TABLE = "org_table" + _KEY_SRC_TABLE = "src_table" + _VJSK_INTERFACE_MAPPING = { + # 販売実績データ + CONDKEY_SLIP_DATA: { + _KEY_FILE_PREFIX: "slip_data_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.sales", + _KEY_SRC_TABLE: "src05.sales" + }, + + # V卸ホールディングスマスタ + CONDKEY_HLD_MST: { + _KEY_FILE_PREFIX: "hld_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.hld_mst_v", + _KEY_SRC_TABLE: "src05.hld_mst_v" + }, + + # V卸マスタ + CONDKEY_WHS_MST: { + _KEY_FILE_PREFIX: "whs_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.whs_mst_v", + _KEY_SRC_TABLE: "src05.whs_mst_v" + }, + + # Vメーカー卸組織展開表 + CONDKEY_MKR_ORG_HORIZON: { + _KEY_FILE_PREFIX: "mkr_org_horizon_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", + _KEY_SRC_TABLE: "src05.mkr_org_horizon_v" + }, + + # V卸組織変換マスタ + CONDKEY_ORG_CNV_MST: { + _KEY_FILE_PREFIX: "org_cnv_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.org_cnv_mst_v", + _KEY_SRC_TABLE: "src05.org_cnv_mst_v" + }, + + # V取引区分マスタ + CONDKEY_TRAN_KBN_MST: { + _KEY_FILE_PREFIX: "tran_kbn_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", + _KEY_SRC_TABLE: "src05.tran_kbn_mst_v" + }, + + # V施設マスタ + CONDKEY_FCL_MST: { + _KEY_FILE_PREFIX: "fcl_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.fcl_mst_v", + _KEY_SRC_TABLE: "src05.fcl_mst_v" + }, + + # V製品マスタ + CONDKEY_PHM_PRD_MST: { + _KEY_FILE_PREFIX: "phm_prd_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.phm_prd_mst_v", + _KEY_SRC_TABLE: "src05.phm_prd_mst_v" + }, + + # V製品価格マスタ + CONDKEY_PHM_PRICE_MST: { + _KEY_FILE_PREFIX: "phm_price_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.phm_price_mst_v", + _KEY_SRC_TABLE: "src05.phm_price_mst_v" + }, + + # V施設統合マスタ + CONDKEY_VOP_HCO_MERGE: { + _KEY_FILE_PREFIX: "vop_hco_merge_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.vop_hco_merge_v", + _KEY_SRC_TABLE: "src05.vop_hco_merge_v" + }, + + # V卸得意先情報マスタ + CONDKEY_WHS_CUSTOMER_MST: { + _KEY_FILE_PREFIX: "whs_customer_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.whs_customer_mst_v", + _KEY_SRC_TABLE: "src05.whs_customer_mst_v" + }, + + # MDBコード変換表 + CONDKEY_MDB_CONV_MST: { + _KEY_FILE_PREFIX: "mdb_conv_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.mdb_conv_mst_v", + _KEY_SRC_TABLE: "src05.mdb_conv_mst_v" + }, + + # 卸在庫データ + CONDKEY_STOCK_SLIP_DATA: { + _KEY_FILE_PREFIX: "stock_slip_data_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.whole_stock", + _KEY_SRC_TABLE: "src05.whole_stock" + }, + + # 生物由来データ + CONDKEY_BIO_SLIP_DATA: { + _KEY_FILE_PREFIX: "bio_slip_data_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.bio_sales", + _KEY_SRC_TABLE: "src05.bio_sales" + }, + + # ロットマスタデータ + CONDKEY_LOT_NUM_MS: { + _KEY_FILE_PREFIX: "lot_num_ms_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.lot_num_mst", + _KEY_SRC_TABLE: "src05.lot_num_mst" + }, + } + + def get_file_prefix(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_PREFIX) + return ret + + def get_file_suffix(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_SUFFIX) + return ret + + def get_org_table(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_ORG_TABLE) + return ret + + def get_src_table(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) + return ret diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 370f2179..dc7146b7 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -9,6 +9,7 @@ from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile from src.batch.laundering import create_dcf_inst_merge, create_mst_inst from src.batch.ultmarc import ultmarc_process +from src.batch.vjsk import vjsk_importer from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants @@ -84,6 +85,7 @@ def exec(): logger.info('日次処理(V実消化)') try: logger.info('V実消化取込:起動') + vjsk_importer.exec() logger.info('V実消化取込:終了') except BatchOperationException as e: logger.exception(f'V実消化取込処理エラー(異常終了){e}') diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index b1730224..6a2fca0b 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -15,6 +15,9 @@ ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] +JSKULT_DATA_BUCKET = os.environ['JSKULT_DATA_BUCKET'] +JSKULT_DATA_FOLDER_RECV = os.environ['JSKULT_DATA_FOLDER_RECV'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') From 1fd6633bc838c9a626c2daa76a565ebe468b3a5b Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 28 Apr 2023 19:51:59 +0900 Subject: [PATCH 002/103] =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=83=AD=E3=83=BC=E3=83=89=E3=81=8B=E3=82=89DB=E7=99=BB?= =?UTF-8?q?=E9=8C=B2=E3=81=AE=E5=AE=9F=E8=A3=85(=E5=AE=9F=E8=A1=8C?= =?UTF-8?q?=E7=A2=BA=E8=AA=8D=E3=81=AF=E6=9C=AA)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 17 +++-- .../src/batch/vjsk/vjsk_data_load_manager.py | 56 +++++++++++++++++ .../src/batch/vjsk/vjsk_importer.py | 63 +++++++++++++++++-- .../src/batch/vjsk/vjsk_recv_file_manager.py | 60 ++++++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 9 +++ 5 files changed, 196 insertions(+), 9 deletions(-) create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2aebff4b..79c80db9 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -115,9 +115,16 @@ class VjskBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV - def get_file_list(self): - return self._s3_client.list_objects(self._bucket_name, self._recv_folder) + _s3_file_list = None - # def download_data_file(self, data_filename: str): - # temporary_dir = tempfile.mkdtemp() - # temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') + def get_s3_file_list(self): + self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) + return self._s3_file_list + + def download_data_file(self, data_filename: str): + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, data_filename, f) + f.seek(0) + return temporary_file_path diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py new file mode 100644 index 00000000..a412c3c7 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -0,0 +1,56 @@ +from src.batch.vjsk.vjsk_recv_file_manager import (VjskDatFile, + VjskRecvFileManager) +from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.db.database import Database +from src.logging.get_logger import get_logger + +logger = get_logger('V実消化データ取込(DB登録)') +mapper = VjskRecvFileMapper() + + +class JjskDataLoadManager: + def _import_to_db(dat_file: VjskDatFile, condkey: str): + db = Database.get_instance() + table_name_org = mapper.get_org_table(condkey) + table_name_src = mapper.get_org_table(condkey) + + try: + db.connect() # TODO:接続オプション local_infile = True が必要? + db.begin() + + # orgをtruncate + f"TRUNCATE TABLE {table_name_org};" + + # orgにload ※warningは1148エラーになるらしい + sql = f"LOAD DATA LOCAL INFILE {dat_file} INTO TABLE {table_name_org} FIELDS TERMINATED BY '\t' ENCLOSED BY ""'"" IGNORE 1 LINES;" + cnt = db.execute(sql) + logger.info(f'tsvデータをorgテーブルにLOAD : 件数({cnt})') + + # org→srcにinsert select + # TODO: INTO句とSELECT句はmapperに持たせてcondkeyで引っ張ってくるようにしたい + f"INSERT INTO {table_name_src} SELECT * FROM {table_name_org};" + + db.commit() + except Exception as e: # TODO:DB例外だけキャッチしたい + db.rollback() + logger.error(e) + raise e + finally: + db.disconnect() + return + + def Load(self, target: dict): + # target : {"condkey": key, "src_file_path":local_file_path} + + # データファイルオープン + dat_file = VjskRecvFileManager.file_open(target["local_file_path"]) + + # TODO: tsvファイルをload投入用のDMLに加工(システム日時つけたり、エンコードをUTF-8に変換したり) + # TODO: ファイルオンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 + # TODO: エンコード変換の参考 https://dev.classmethod.jp/articles/python-encoding/ + dat_file = dat_file + + # データベース登録 + self._import_to_db(dat_file, target["condkey"]) + + return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 4c2cb0ee..839058ab 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -2,6 +2,7 @@ from src.aws.s3 import ConfigBucket, VjskBucket from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_wholestocksaler_file import \ CalendarWholwSalerStockFile +from src.batch.vjsk.vjsk_data_load_manager import JjskDataLoadManager from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger @@ -30,7 +31,7 @@ def _check_received_files(): logger.debug('V実消化連携データ存在確認処理:開始') # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 - received_files = vjsk_recv_bucket.get_file_list() + received_files = vjsk_recv_bucket.get_s3_file_list() logger.debug(f'ファイル一覧{received_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) @@ -94,15 +95,67 @@ def _check_received_files(): def _import_file_to_db(): logger.debug('V実消化取込処理:開始') - # diff_upsertに変わるやつを呼び出す - # emp_chg_inst_lau.batch_process() みたいに + # 実消化&アルトマーク V実消化データ受領バケットにあるファイルパス一覧を取得 + received_s3_files = vjsk_recv_bucket.get_s3_file_list() + + # ファイルパス一覧にマッピング情報を参照するためのキーを持たせて辞書可する + target_dict = {} + for s3_file_path in received_s3_files: + local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path) + key = vjsk_mapper.get_condkey_by_s3_file_path(local_file_path) + if key is not None: + target_dict[key] = {"condkey": key, "src_file_path": local_file_path} + logger.debug(f'S3ファイルパス辞書{target_dict}') + + # TODO: diff_upsertに変わるやつを呼び出す + # TODO: emp_chg_inst_lau.batch_process() みたいに + + # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) + if batch_context.is_import_target_vjsk_stockslipdata: + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + + # # # ファイル存在確認 卸販売データ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + + # # # ファイル存在確認 卸組織変換マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + + # # # ファイル存在確認 施設統合マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + + # # # ファイル存在確認 卸マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + + # # # ファイル存在確認 卸ホールディングスマスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + + # # # ファイル存在確認 施設マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + + # # # ファイル存在確認 メーカー卸組織展開表 + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + + # # # ファイル存在確認 取引区分マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + + # # # ファイル存在確認 製品マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + + # # # ファイル存在確認 製品価格マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + + # # # ファイル存在確認 卸得意先情報マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + + # # # ファイル存在確認 MDBコード変換マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) logger.debug('V実消化取込処理:終了') def _determine_today_is_stockslipdata_target(): + """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する""" try: - # 設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する today = batch_context.syor_date holiday_list_file_path = ConfigBucket().download_wholesaler_stock_list() @@ -128,7 +181,9 @@ def exec(): # V実消化データファイル受領チェック logger.debug('V実消化データファイル受領チェック:開始') try: + # S3バケット上でV実消化データファイルの存在チェックをする _check_received_files() + except BatchOperationException as e: logger.error('受領したV実消化データファイルに欠落があります') raise e diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py new file mode 100644 index 00000000..5a0dd54f --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py @@ -0,0 +1,60 @@ +import csv +from io import TextIOWrapper + + +class VjskRecvFileManager: + layout_class: str + records: list[str] + + def __init__(self, dat_line: list[str]) -> None: + self.layout_class = dat_line[0] + self.records = dat_line + + +class VjskDatFile: + """V実消化データファイル""" + + lines: list[VjskRecvFileManager] + success_count: int = 0 + error_count: int = 0 + total_count: int = 0 + __i: int = 0 + + def __iter__(self): + return self + + def __next__(self) -> VjskRecvFileManager: + if self.__i == len(self.lines): + raise StopIteration() + line = self.lines[self.__i] + self.__i += 1 + return line + + def __init__(self, file: TextIOWrapper) -> None: + reader = csv.reader(file) + csv_rows = [VjskRecvFileManager(row) for row in reader] + + self.lines = csv_rows + self.total_count = len(csv_rows) + + def count_up_success(self): + self.success_count += 1 + + def count_up_error(self): + self.error_count += 1 + + @classmethod + def file_open(cls, local_file_path: str): + """V実消化データファイルを読み込み、新しいインスタンスを作成する + + Args: + local_file_path (str): ローカルのファイルパス + + Returns: + VjskDatFile: このクラスのインスタンス + """ + # cp932(Shift-JIS Windows拡張)でファイルを読み込む + file = open(local_file_path, encoding='cp932') + instance = cls(file) + file.close() + return instance diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 6a997c9f..d89f5063 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -164,3 +164,12 @@ class VjskRecvFileMapper: if condkey in self._VJSK_INTERFACE_MAPPING: ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) return ret + + def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: + ret = None + filename = s3_file_path[s3_file_path.rfind("/") + 1:] + for element in self._VJSK_INTERFACE_MAPPING: + if filename.startswith(element.get(self._KEY_FILE_PREFIX)) and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): + ret = element + break + return ret From aca85704dafb90c5bbb68e6ed3d11b45222fe433 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 8 May 2023 16:25:11 +0900 Subject: [PATCH 003/103] =?UTF-8?q?LOAD=20DATA=20=20LOCAL=20INFILE?= =?UTF-8?q?=E3=81=A73948=E3=82=A8=E3=83=A9=E3=83=BC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 2 +- .../src/batch/vjsk/vjsk_data_load_manager.py | 28 +++--- .../src/batch/vjsk/vjsk_importer.py | 42 ++++----- .../src/batch/vjsk/vjsk_recv_file_manager.py | 90 +++++++++---------- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 5 +- 5 files changed, 89 insertions(+), 78 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 79c80db9..2ee93eb8 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -123,7 +123,7 @@ class VjskBucket(S3Bucket): def download_data_file(self, data_filename: str): temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') with open(temporary_file_path, mode='wb') as f: self._s3_client.download_file(self._bucket_name, data_filename, f) f.seek(0) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index a412c3c7..9aa80070 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -1,5 +1,4 @@ -from src.batch.vjsk.vjsk_recv_file_manager import (VjskDatFile, - VjskRecvFileManager) +# from src.batch.vjsk.vjsk_recv_file_manager import VjskDatFile from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.db.database import Database from src.logging.get_logger import get_logger @@ -9,20 +8,26 @@ mapper = VjskRecvFileMapper() class JjskDataLoadManager: - def _import_to_db(dat_file: VjskDatFile, condkey: str): + def __init__(self): + pass + + def _import_to_db(src_file_name: str, condkey: str): db = Database.get_instance() table_name_org = mapper.get_org_table(condkey) - table_name_src = mapper.get_org_table(condkey) + table_name_src = mapper.get_src_table(condkey) try: db.connect() # TODO:接続オプション local_infile = True が必要? db.begin() # orgをtruncate - f"TRUNCATE TABLE {table_name_org};" + db.execute(f"TRUNCATE TABLE {table_name_org};") + + # load DATA local infileステートメント実行許可設定 + db.execute("SET GLOBAL local_infile=on;") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE {dat_file} INTO TABLE {table_name_org} FIELDS TERMINATED BY '\t' ENCLOSED BY ""'"" IGNORE 1 LINES;" + sql = f"LOAD DATA LOCAL INFILE '{src_file_name}' INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY \"'\" IGNORE 1 LINES;" cnt = db.execute(sql) logger.info(f'tsvデータをorgテーブルにLOAD : 件数({cnt})') @@ -39,18 +44,21 @@ class JjskDataLoadManager: db.disconnect() return + @classmethod def Load(self, target: dict): + logger.debug(f'JjskDataLoadManager#load start target:{target}') # target : {"condkey": key, "src_file_path":local_file_path} # データファイルオープン - dat_file = VjskRecvFileManager.file_open(target["local_file_path"]) + local_file_name = target["src_file_path"] + # dat_file = VjskDatFile.retrieve_from_file(local_file_name) # TODO: tsvファイルをload投入用のDMLに加工(システム日時つけたり、エンコードをUTF-8に変換したり) - # TODO: ファイルオンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 + # TODO: ファイルエンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 # TODO: エンコード変換の参考 https://dev.classmethod.jp/articles/python-encoding/ - dat_file = dat_file # データベース登録 - self._import_to_db(dat_file, target["condkey"]) + self._import_to_db(local_file_name, target["condkey"]) + logger.debug('JjskDataLoadManager#load end') return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 839058ab..7db2983e 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -9,7 +9,7 @@ from src.logging.get_logger import get_logger # from src.batch.datachange import emp_chg_inst_lau -logger = get_logger('V実消化データ取込') +_logger = get_logger('V実消化データ取込') batch_context = BatchContext.get_instance() vjsk_recv_bucket = VjskBucket() vjsk_mapper = VjskRecvFileMapper() @@ -28,11 +28,11 @@ def _check_if_file_exists(src_list: list, key: str) -> bool: def _check_received_files(): """V実消化連携データ存在確認処理""" - logger.debug('V実消化連携データ存在確認処理:開始') + _logger.debug('V実消化連携データ存在確認処理:開始') # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 received_files = vjsk_recv_bucket.get_s3_file_list() - logger.debug(f'ファイル一覧{received_files}') + _logger.debug(f'ファイル一覧{received_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: @@ -87,13 +87,13 @@ def _check_received_files(): if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_files}') - logger.debug('V実消化連携データ存在確認処理:終了') + _logger.debug('V実消化連携データ存在確認処理:終了') return True def _import_file_to_db(): - logger.debug('V実消化取込処理:開始') + _logger.debug('V実消化取込処理:開始') # 実消化&アルトマーク V実消化データ受領バケットにあるファイルパス一覧を取得 received_s3_files = vjsk_recv_bucket.get_s3_file_list() @@ -101,11 +101,11 @@ def _import_file_to_db(): # ファイルパス一覧にマッピング情報を参照するためのキーを持たせて辞書可する target_dict = {} for s3_file_path in received_s3_files: - local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path) - key = vjsk_mapper.get_condkey_by_s3_file_path(local_file_path) + local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path.get('filename')) + key = vjsk_mapper.get_condkey_by_s3_file_path(s3_file_path.get('filename')) if key is not None: target_dict[key] = {"condkey": key, "src_file_path": local_file_path} - logger.debug(f'S3ファイルパス辞書{target_dict}') + _logger.debug(f'S3ファイルパス辞書{target_dict}') # TODO: diff_upsertに変わるやつを呼び出す # TODO: emp_chg_inst_lau.batch_process() みたいに @@ -150,7 +150,7 @@ def _import_file_to_db(): # # # ファイル存在確認 MDBコード変換マスタ # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) - logger.debug('V実消化取込処理:終了') + _logger.debug('V実消化取込処理:終了') def _determine_today_is_stockslipdata_target(): @@ -162,37 +162,39 @@ def _determine_today_is_stockslipdata_target(): targetdays = CalendarWholwSalerStockFile(holiday_list_file_path) ret = targetdays.compare_date(today) except Exception as e: - logger.error(f'{e}') + _logger.error(f'{e}') raise e return ret def exec(): """V実消化データ取込""" - logger.info('Start Jitsusyouka Torikomi PGM.') + _logger.info('Start Jitsusyouka Torikomi PGM.') # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる - logger.debug('卸在庫データ取込対象日であるかを判定') + _logger.debug('卸在庫データ取込対象日であるかを判定') batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() - logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') + _logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') if batch_context.is_import_target_vjsk_stockslipdata: - logger.info('卸在庫データ取込対象日です') + _logger.info('卸在庫データ取込対象日です') # V実消化データファイル受領チェック - logger.debug('V実消化データファイル受領チェック:開始') + _logger.debug('V実消化データファイル受領チェック:開始') try: # S3バケット上でV実消化データファイルの存在チェックをする _check_received_files() except BatchOperationException as e: - logger.error('受領したV実消化データファイルに欠落があります') + _logger.error('受領したV実消化データファイルに欠落があります') raise e - logger.debug('V実消化データファイル受領チェック:終了') + _logger.debug('V実消化データファイル受領チェック:終了') # データベース取込 - logger.debug('V実消化データ取込:開始') + _logger.debug('V実消化データ取込:開始') try: _import_file_to_db() except Exception as e: - logger.error(f'データベース登録失敗 {e}') - logger.debug('V実消化データ取込:終了') + _logger.error(f'データベース登録失敗 {e}') + raise e + + _logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py index 5a0dd54f..7ddbb766 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py @@ -1,60 +1,60 @@ -import csv -from io import TextIOWrapper +# import csv +# from io import TextIOWrapper -class VjskRecvFileManager: - layout_class: str - records: list[str] +# class VjskRecvFileManager: +# layout_class: str +# records: list[str] - def __init__(self, dat_line: list[str]) -> None: - self.layout_class = dat_line[0] - self.records = dat_line +# def __init__(self, dat_line: list[str]) -> None: +# self.layout_class = dat_line[0] +# self.records = dat_line -class VjskDatFile: - """V実消化データファイル""" +# class VjskDatFile: +# """V実消化データファイル""" - lines: list[VjskRecvFileManager] - success_count: int = 0 - error_count: int = 0 - total_count: int = 0 - __i: int = 0 +# lines: list[VjskRecvFileManager] +# success_count: int = 0 +# error_count: int = 0 +# total_count: int = 0 +# __i: int = 0 - def __iter__(self): - return self +# def __iter__(self): +# return self - def __next__(self) -> VjskRecvFileManager: - if self.__i == len(self.lines): - raise StopIteration() - line = self.lines[self.__i] - self.__i += 1 - return line +# def __next__(self) -> VjskRecvFileManager: +# if self.__i == len(self.lines): +# raise StopIteration() +# line = self.lines[self.__i] +# self.__i += 1 +# return line - def __init__(self, file: TextIOWrapper) -> None: - reader = csv.reader(file) - csv_rows = [VjskRecvFileManager(row) for row in reader] +# def __init__(self, file: TextIOWrapper) -> None: +# reader = csv.reader(file) +# csv_rows = [VjskRecvFileManager(row) for row in reader] - self.lines = csv_rows - self.total_count = len(csv_rows) +# self.lines = csv_rows +# self.total_count = len(csv_rows) - def count_up_success(self): - self.success_count += 1 +# def count_up_success(self): +# self.success_count += 1 - def count_up_error(self): - self.error_count += 1 +# def count_up_error(self): +# self.error_count += 1 - @classmethod - def file_open(cls, local_file_path: str): - """V実消化データファイルを読み込み、新しいインスタンスを作成する +# @classmethod +# def retrieve_from_file(cls, local_file_path: str): +# """V実消化データファイルを読み込み、新しいインスタンスを作成する - Args: - local_file_path (str): ローカルのファイルパス +# Args: +# local_file_path (str): ローカルのファイルパス - Returns: - VjskDatFile: このクラスのインスタンス - """ - # cp932(Shift-JIS Windows拡張)でファイルを読み込む - file = open(local_file_path, encoding='cp932') - instance = cls(file) - file.close() - return instance +# Returns: +# VjskDatFile: このクラスのインスタンス +# """ +# # cp932(Shift-JIS Windows拡張)でファイルを読み込む +# file = open(local_file_path, encoding='cp932') +# instance = cls(file) +# file.close() +# return instance diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index d89f5063..4aeea24f 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -168,8 +168,9 @@ class VjskRecvFileMapper: def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: ret = None filename = s3_file_path[s3_file_path.rfind("/") + 1:] - for element in self._VJSK_INTERFACE_MAPPING: + for condkey in self._VJSK_INTERFACE_MAPPING: + element = self._VJSK_INTERFACE_MAPPING.get(condkey) if filename.startswith(element.get(self._KEY_FILE_PREFIX)) and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): - ret = element + ret = condkey break return ret From 4226ad9db1dc9d07cb3798b4f6da5e28e1958e59 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 8 May 2023 18:13:23 +0900 Subject: [PATCH 004/103] =?UTF-8?q?org=E3=81=AE=E3=83=86=E3=83=BC=E3=83=96?= =?UTF-8?q?=E3=83=AB=E3=81=ABLOAD=20DATA=20LOCAL=20INFILE=E3=81=8C?= =?UTF-8?q?=E9=80=9A=E3=81=A3=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 9 +++------ ecs/jskult-batch-daily/src/db/database.py | 2 +- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 9aa80070..af35a2aa 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -23,13 +23,10 @@ class JjskDataLoadManager: # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") - # load DATA local infileステートメント実行許可設定 - db.execute("SET GLOBAL local_infile=on;") - # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE '{src_file_name}' INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY \"'\" IGNORE 1 LINES;" - cnt = db.execute(sql) - logger.info(f'tsvデータをorgテーブルにLOAD : 件数({cnt})') + sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" + result = db.execute(sql, {"src_file_name": src_file_name}) + logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select # TODO: INTO句とSELECT句はmapperに持たせてcondkeyで引っ張ってくるようにしたい diff --git a/ecs/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-daily/src/db/database.py index f67a21b9..b67c3a3d 100644 --- a/ecs/jskult-batch-daily/src/db/database.py +++ b/ecs/jskult-batch-daily/src/db/database.py @@ -44,7 +44,7 @@ class Database: host=self.__host, port=self.__port, database=self.__schema, - query={"charset": "utf8mb4"} + query={"charset": "utf8mb4", "local_infile": "1"}, ) self.__engine = create_engine( From 0962172f4d7328757ec1977d28a3300bbdae9bab Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 8 May 2023 23:08:58 +0900 Subject: [PATCH 005/103] =?UTF-8?q?=E3=81=A8=E3=82=8A=E3=81=82=E3=81=88?= =?UTF-8?q?=E3=81=9A=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AA=E3=81=97=E3=81=A7?= =?UTF-8?q?=E9=80=9A=E3=81=9B=E3=82=8B=E7=A8=8B=E5=BA=A6=E3=81=AE=E4=BB=AE?= =?UTF-8?q?=E5=AE=9F=E8=A3=85=E5=AE=8C=E4=BA=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 16 +- .../src/batch/vjsk/vjsk_importer.py | 48 +- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 1349 ++++++++++++++++- 3 files changed, 1362 insertions(+), 51 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index af35a2aa..4b574dce 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -14,7 +14,7 @@ class JjskDataLoadManager: def _import_to_db(src_file_name: str, condkey: str): db = Database.get_instance() table_name_org = mapper.get_org_table(condkey) - table_name_src = mapper.get_src_table(condkey) + upsert_sql = mapper.get_upsert_sql(condkey) try: db.connect() # TODO:接続オプション local_infile = True が必要? @@ -24,13 +24,14 @@ class JjskDataLoadManager: db.execute(f"TRUNCATE TABLE {table_name_org};") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" + sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org}" \ + " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" result = db.execute(sql, {"src_file_name": src_file_name}) logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select - # TODO: INTO句とSELECT句はmapperに持たせてcondkeyで引っ張ってくるようにしたい - f"INSERT INTO {table_name_src} SELECT * FROM {table_name_org};" + result = db.execute(upsert_sql) + logger.info(f'orgテーブルをsrcテーブルにUPSERT : 件数({result.rowcount})') db.commit() except Exception as e: # TODO:DB例外だけキャッチしたい @@ -46,13 +47,8 @@ class JjskDataLoadManager: logger.debug(f'JjskDataLoadManager#load start target:{target}') # target : {"condkey": key, "src_file_path":local_file_path} - # データファイルオープン + # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 local_file_name = target["src_file_path"] - # dat_file = VjskDatFile.retrieve_from_file(local_file_name) - - # TODO: tsvファイルをload投入用のDMLに加工(システム日時つけたり、エンコードをUTF-8に変換したり) - # TODO: ファイルエンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 - # TODO: エンコード変換の参考 https://dev.classmethod.jp/articles/python-encoding/ # データベース登録 self._import_to_db(local_file_name, target["condkey"]) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 7db2983e..8b4b5197 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -114,41 +114,41 @@ def _import_file_to_db(): if batch_context.is_import_target_vjsk_stockslipdata: JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) - # # # ファイル存在確認 卸販売データ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + # DB登録 卸販売データ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) - # # # ファイル存在確認 卸組織変換マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + # DB登録 卸組織変換マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) - # # # ファイル存在確認 施設統合マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + # DB登録 施設統合マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) - # # # ファイル存在確認 卸マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + # DB登録 卸マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) - # # # ファイル存在確認 卸ホールディングスマスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + # DB登録 卸ホールディングスマスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) - # # # ファイル存在確認 施設マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + # DB登録 施設マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) - # # # ファイル存在確認 メーカー卸組織展開表 - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + # DB登録 メーカー卸組織展開表 + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) - # # # ファイル存在確認 取引区分マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + # DB登録 取引区分マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) - # # # ファイル存在確認 製品マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + # DB登録 製品マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) - # # # ファイル存在確認 製品価格マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + # DB登録 製品価格マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) - # # # ファイル存在確認 卸得意先情報マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + # DB登録 卸得意先情報マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) - # # # ファイル存在確認 MDBコード変換マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + # DB登録 MDBコード変換マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) _logger.debug('V実消化取込処理:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 4aeea24f..a6736e16 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -1,3 +1,6 @@ +import textwrap + + class VjskRecvFileMapper: CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ @@ -19,13 +22,274 @@ class VjskRecvFileMapper: _KEY_FILE_SUFFIX = "file_suffix" _KEY_ORG_TABLE = "org_table" _KEY_SRC_TABLE = "src_table" + _KEY_UPSERT_SQL = "upsert_sql" _VJSK_INTERFACE_MAPPING = { # 販売実績データ CONDKEY_SLIP_DATA: { _KEY_FILE_PREFIX: "slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.sales", - _KEY_SRC_TABLE: "src05.sales" + _KEY_SRC_TABLE: "src05.sales", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.sales ( + REC_DATA + ,REC_WHS_CD + ,REC_WHS_SUB_CD + ,REC_WHS_ORG_CD + ,REC_CUST_CD + ,REC_COMM_CD + ,REC_TRAN_KBN + ,REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK + ,REC_URAG_NUM + ,REC_QTY + ,REC_NONYU_PRICE + ,REC_NONYU_AMT + ,REC_COMM_NAME + ,REC_NONYU_FCL_NAME + ,FREE_ITEM + ,REC_NONYU_FCL_ADDR + ,REC_NONYU_FCL_POST + ,REC_NONYU_FCL_TEL + ,REC_BEF_HSDN_YMD + ,REC_BEF_SLIP_NUM + ,REC_YMD + ,SALE_DATA_CAT + ,SLIP_FILE_NAME + ,SLIP_MGT_NUM + ,ROW_NUM + ,HSDN_YMD + ,EXEC_DT + ,V_TRAN_CD + ,TRAN_KBN_NAME + ,WHS_ORG_CD + ,V_WHSORG_CD + ,WHS_ORG_NAME + ,WHS_ORG_KN + ,V_WHS_CD + ,WHS_NAME + ,NONYU_FCL_CD + ,V_INST_CD + ,V_INST_KN + ,V_INST_NAME + ,V_INST_ADDR + ,COMM_CD + ,COMM_NAME + ,NONYU_QTY + ,NONYU_PRICE + ,NONYU_AMT + ,SHIKIRI_PRICE + ,SHIKIRI_AMT + ,NHI_PRICE + ,NHI_AMT + ,WHSPOS_ERR_KBN + ,HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN + ,FCL_EXIS_KBN + ,BEF_HSDN_YMD + ,BEF_SLIP_NUM + ,SLIP_ORG_KBN + ,ERR_FLG1 + ,ERR_FLG2 + ,ERR_FLG3 + ,ERR_FLG4 + ,ERR_FLG5 + ,ERR_FLG6 + ,ERR_FLG7 + ,ERR_FLG8 + ,ERR_FLG9 + ,ERR_FLG10 + ,ERR_FLG11 + ,ERR_FLG12 + ,ERR_FLG13 + ,ERR_FLG14 + ,ERR_FLG15 + ,ERR_FLG16 + ,ERR_FLG17 + ,ERR_FLG18 + ,ERR_FLG19 + ,ERR_FLG20 + ,KJYO_YM + ,TKSNBK_KBN + ,FCL_EXEC_KBN + ,REC_STS_KBN + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.REC_DATA + ,t.REC_WHS_CD + ,t.REC_WHS_SUB_CD + ,t.REC_WHS_ORG_CD + ,t.REC_CUST_CD + ,t.REC_COMM_CD + ,t.REC_TRAN_KBN + ,t.REV_HSDNYMD_WRK + ,t.REV_HSDNYMD_SRK + ,t.REC_URAG_NUM + ,t.REC_QTY + ,t.REC_NONYU_PRICE + ,t.REC_NONYU_AMT + ,t.REC_COMM_NAME + ,t.REC_NONYU_FCL_NAME + ,t.FREE_ITEM + ,t.REC_NONYU_FCL_ADDR + ,t.REC_NONYU_FCL_POST + ,t.REC_NONYU_FCL_TEL + ,t.REC_BEF_HSDN_YMD + ,t.REC_BEF_SLIP_NUM + ,t.REC_YMD + ,t.SALE_DATA_CAT + ,t.SLIP_FILE_NAME + ,t.SLIP_MGT_NUM + ,t.ROW_NUM + ,t.HSDN_YMD + ,t.EXEC_DT + ,t.V_TRAN_CD + ,t.TRAN_KBN_NAME + ,t.WHS_ORG_CD + ,t.V_WHSORG_CD + ,t.WHS_ORG_NAME + ,t.WHS_ORG_KN + ,t.V_WHS_CD + ,t.WHS_NAME + ,t.NONYU_FCL_CD + ,t.V_INST_CD + ,t.V_INST_KN + ,t.V_INST_NAME + ,t.V_INST_ADDR + ,t.COMM_CD + ,t.COMM_NAME + ,t.NONYU_QTY + ,t.NONYU_PRICE + ,t.NONYU_AMT + ,t.SHIKIRI_PRICE + ,t.SHIKIRI_AMT + ,t.NHI_PRICE + ,t.NHI_AMT + ,t.WHSPOS_ERR_KBN + ,t.HTDNYMD_ERR_KBN + ,t.PRD_EXIS_KBN + ,t.FCL_EXIS_KBN + ,t.BEF_HSDN_YMD + ,t.BEF_SLIP_NUM + ,t.SLIP_ORG_KBN + ,t.ERR_FLG1 + ,t.ERR_FLG2 + ,t.ERR_FLG3 + ,t.ERR_FLG4 + ,t.ERR_FLG5 + ,t.ERR_FLG6 + ,t.ERR_FLG7 + ,t.ERR_FLG8 + ,t.ERR_FLG9 + ,t.ERR_FLG10 + ,t.ERR_FLG11 + ,t.ERR_FLG12 + ,t.ERR_FLG13 + ,t.ERR_FLG14 + ,t.ERR_FLG15 + ,t.ERR_FLG16 + ,t.ERR_FLG17 + ,t.ERR_FLG18 + ,t.ERR_FLG19 + ,t.ERR_FLG20 + ,t.KJYO_YM + ,t.TKSNBK_KBN + ,t.FCL_EXEC_KBN + ,t.REC_STS_KBN + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.sales AS t + ON DUPLICATE KEY UPDATE + REC_DATA=t.REC_DATA + ,REC_WHS_CD=t.REC_WHS_CD + ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD + ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD + ,REC_CUST_CD=t.REC_CUST_CD + ,REC_COMM_CD=t.REC_COMM_CD + ,REC_TRAN_KBN=t.REC_TRAN_KBN + ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK + ,REC_URAG_NUM=t.REC_URAG_NUM + ,REC_QTY=t.REC_QTY + ,REC_NONYU_PRICE=t.REC_NONYU_PRICE + ,REC_NONYU_AMT=t.REC_NONYU_AMT + ,REC_COMM_NAME=t.REC_COMM_NAME + ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME + ,FREE_ITEM=t.FREE_ITEM + ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR + ,REC_NONYU_FCL_POST=t.REC_NONYU_FCL_POST + ,REC_NONYU_FCL_TEL=t.REC_NONYU_FCL_TEL + ,REC_BEF_HSDN_YMD=t.REC_BEF_HSDN_YMD + ,REC_BEF_SLIP_NUM=t.REC_BEF_SLIP_NUM + ,REC_YMD=t.REC_YMD + ,SALE_DATA_CAT=t.SALE_DATA_CAT + ,SLIP_FILE_NAME=t.SLIP_FILE_NAME + ,SLIP_MGT_NUM=t.SLIP_MGT_NUM + ,ROW_NUM=t.ROW_NUM + ,HSDN_YMD=t.HSDN_YMD + ,EXEC_DT=t.EXEC_DT + ,V_TRAN_CD=t.V_TRAN_CD + ,TRAN_KBN_NAME=t.TRAN_KBN_NAME + ,WHS_ORG_CD=t.WHS_ORG_CD + ,V_WHSORG_CD=t.V_WHSORG_CD + ,WHS_ORG_NAME=t.WHS_ORG_NAME + ,WHS_ORG_KN=t.WHS_ORG_KN + ,V_WHS_CD=t.V_WHS_CD + ,WHS_NAME=t.WHS_NAME + ,NONYU_FCL_CD=t.NONYU_FCL_CD + ,V_INST_CD=t.V_INST_CD + ,V_INST_KN=t.V_INST_KN + ,V_INST_NAME=t.V_INST_NAME + ,V_INST_ADDR=t.V_INST_ADDR + ,COMM_CD=t.COMM_CD + ,COMM_NAME=t.COMM_NAME + ,NONYU_QTY=t.NONYU_QTY + ,NONYU_PRICE=t.NONYU_PRICE + ,NONYU_AMT=t.NONYU_AMT + ,SHIKIRI_PRICE=t.SHIKIRI_PRICE + ,SHIKIRI_AMT=t.SHIKIRI_AMT + ,NHI_PRICE=t.NHI_PRICE + ,NHI_AMT=t.NHI_AMT + ,WHSPOS_ERR_KBN=t.WHSPOS_ERR_KBN + ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN=t.PRD_EXIS_KBN + ,FCL_EXIS_KBN=t.FCL_EXIS_KBN + ,BEF_HSDN_YMD=t.BEF_HSDN_YMD + ,BEF_SLIP_NUM=t.BEF_SLIP_NUM + ,SLIP_ORG_KBN=t.SLIP_ORG_KBN + ,ERR_FLG1=t.ERR_FLG1 + ,ERR_FLG2=t.ERR_FLG2 + ,ERR_FLG3=t.ERR_FLG3 + ,ERR_FLG4=t.ERR_FLG4 + ,ERR_FLG5=t.ERR_FLG5 + ,ERR_FLG6=t.ERR_FLG6 + ,ERR_FLG7=t.ERR_FLG7 + ,ERR_FLG8=t.ERR_FLG8 + ,ERR_FLG9=t.ERR_FLG9 + ,ERR_FLG10=t.ERR_FLG10 + ,ERR_FLG11=t.ERR_FLG11 + ,ERR_FLG12=t.ERR_FLG12 + ,ERR_FLG13=t.ERR_FLG13 + ,ERR_FLG14=t.ERR_FLG14 + ,ERR_FLG15=t.ERR_FLG15 + ,ERR_FLG16=t.ERR_FLG16 + ,ERR_FLG17=t.ERR_FLG17 + ,ERR_FLG18=t.ERR_FLG18 + ,ERR_FLG19=t.ERR_FLG19 + ,ERR_FLG20=t.ERR_FLG20 + ,KJYO_YM=t.KJYO_YM + ,TKSNBK_KBN=t.TKSNBK_KBN + ,FCL_EXEC_KBN=t.FCL_EXEC_KBN + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸ホールディングスマスタ @@ -33,7 +297,50 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "hld_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.hld_mst_v", - _KEY_SRC_TABLE: "src05.hld_mst_v" + _KEY_SRC_TABLE: "src05.hld_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.hld_mst_v ( + V_HLD_CD + ,SUB_NUM + ,NAME + ,KN_NAME + ,ABB_NAME + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_HLD_CD + ,t.SUB_NUM + ,t.NAME + ,t.KN_NAME + ,t.ABB_NAME + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() FROM org05.hld_mst_v AS t + ON DUPLICATE KEY UPDATE + V_HLD_CD=t.V_HLD_CD + ,SUB_NUM=t.SUB_NUM + ,NAME=t.NAME + ,KN_NAME=t.KN_NAME + ,ABB_NAME=t.ABB_NAME + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸マスタ @@ -41,7 +348,66 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "whs_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_mst_v", - _KEY_SRC_TABLE: "src05.whs_mst_v" + _KEY_SRC_TABLE: "src05.whs_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whs_mst_v ( + V_WHS_CD + ,SUB_NUM + ,NAME + ,KN_NAME + ,ABB_NAME + ,POSTAL_CD + ,ADDR + ,KN_ADDR + ,TEL_NUM + ,V_HLD_CD + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_WHS_CD + ,t.SUB_NUM + ,t.NAME + ,t.KN_NAME + ,t.ABB_NAME + ,t.POSTAL_CD + ,t.ADDR + ,t.KN_ADDR + ,t.TEL_NUM + ,t.V_HLD_CD + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.whs_mst_v AS t + ON DUPLICATE KEY UPDATE + V_WHS_CD=t.V_WHS_CD + ,SUB_NUM=t.SUB_NUM + ,NAME=t.NAME + ,KN_NAME=t.KN_NAME + ,ABB_NAME=t.ABB_NAME + ,POSTAL_CD=t.POSTAL_CD + ,ADDR=t.ADDR + ,KN_ADDR=t.KN_ADDR + ,TEL_NUM=t.TEL_NUM + ,V_HLD_CD=t.V_HLD_CD + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # Vメーカー卸組織展開表 @@ -49,7 +415,156 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "mkr_org_horizon_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", - _KEY_SRC_TABLE: "src05.mkr_org_horizon_v" + _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.mkr_org_horizon_v ( + VID_KIND_1 + ,V_CD_1 + ,NAME_1 + ,DSP_ODR_1 + ,VID_KIND_2 + ,V_CD_2 + ,NAME_2 + ,DSP_ODR_2 + ,VID_KIND_3 + ,V_CD_3 + ,NAME_3 + ,DSP_ODR_3 + ,VID_KIND_4 + ,V_CD_4 + ,NAME_4 + ,DSP_ODR_4 + ,VID_KIND_5 + ,V_CD_5 + ,NAME_5 + ,DSP_ODR_5 + ,VID_KIND_6 + ,V_CD_6 + ,NAME_6 + ,DSP_ODR_6 + ,VID_KIND_7 + ,V_CD_7 + ,NAME_7 + ,DSP_ODR_7 + ,VID_KIND_8 + ,V_CD_8 + ,NAME_8 + ,DSP_ODR_8 + ,VID_KIND_9 + ,V_CD_9 + ,NAME_9 + ,DSP_ODR_9 + ,VID_KIND_10 + ,V_CD_10 + ,NAME_10 + ,DSP_ODR_10 + ,V_WHS_CD + ,START_DATE + ,END_DATE + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.VID_KIND_1 + ,t.V_CD_1 + ,t.NAME_1 + ,t.DSP_ODR_1 + ,t.VID_KIND_2 + ,t.V_CD_2 + ,t.NAME_2 + ,t.DSP_ODR_2 + ,t.VID_KIND_3 + ,t.V_CD_3 + ,t.NAME_3 + ,t.DSP_ODR_3 + ,t.VID_KIND_4 + ,t.V_CD_4 + ,t.NAME_4 + ,t.DSP_ODR_4 + ,t.VID_KIND_5 + ,t.V_CD_5 + ,t.NAME_5 + ,t.DSP_ODR_5 + ,t.VID_KIND_6 + ,t.V_CD_6 + ,t.NAME_6 + ,t.DSP_ODR_6 + ,t.VID_KIND_7 + ,t.V_CD_7 + ,t.NAME_7 + ,t.DSP_ODR_7 + ,t.VID_KIND_8 + ,t.V_CD_8 + ,t.NAME_8 + ,t.DSP_ODR_8 + ,t.VID_KIND_9 + ,t.V_CD_9 + ,t.NAME_9 + ,t.DSP_ODR_9 + ,t.VID_KIND_10 + ,t.V_CD_10 + ,t.NAME_10 + ,t.DSP_ODR_10 + ,t.V_WHS_CD + ,t.START_DATE + ,t.END_DATE + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.mkr_org_horizon_v AS t + ON DUPLICATE KEY UPDATE + VID_KIND_1=t.VID_KIND_1 + ,V_CD_1=t.V_CD_1 + ,NAME_1=t.NAME_1 + ,DSP_ODR_1=t.DSP_ODR_1 + ,VID_KIND_2=t.VID_KIND_2 + ,V_CD_2=t.V_CD_2 + ,NAME_2=t.NAME_2 + ,DSP_ODR_2=t.DSP_ODR_2 + ,VID_KIND_3=t.VID_KIND_3 + ,V_CD_3=t.V_CD_3 + ,NAME_3=t.NAME_3 + ,DSP_ODR_3=t.DSP_ODR_3 + ,VID_KIND_4=t.VID_KIND_4 + ,V_CD_4=t.V_CD_4 + ,NAME_4=t.NAME_4 + ,DSP_ODR_4=t.DSP_ODR_4 + ,VID_KIND_5=t.VID_KIND_5 + ,V_CD_5=t.V_CD_5 + ,NAME_5=t.NAME_5 + ,DSP_ODR_5=t.DSP_ODR_5 + ,VID_KIND_6=t.VID_KIND_6 + ,V_CD_6=t.V_CD_6 + ,NAME_6=t.NAME_6 + ,DSP_ODR_6=t.DSP_ODR_6 + ,VID_KIND_7=t.VID_KIND_7 + ,V_CD_7=t.V_CD_7 + ,NAME_7=t.NAME_7 + ,DSP_ODR_7=t.DSP_ODR_7 + ,VID_KIND_8=t.VID_KIND_8 + ,V_CD_8=t.V_CD_8 + ,NAME_8=t.NAME_8 + ,DSP_ODR_8=t.DSP_ODR_8 + ,VID_KIND_9=t.VID_KIND_9 + ,V_CD_9=t.V_CD_9 + ,NAME_9=t.NAME_9 + ,DSP_ODR_9=t.DSP_ODR_9 + ,VID_KIND_10=t.VID_KIND_10 + ,V_CD_10=t.V_CD_10 + ,NAME_10=t.NAME_10 + ,DSP_ODR_10=t.DSP_ODR_10 + ,V_WHS_CD=t.V_WHS_CD + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸組織変換マスタ @@ -57,7 +572,51 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "org_cnv_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", - _KEY_SRC_TABLE: "src05.org_cnv_mst_v" + _KEY_SRC_TABLE: "src05.org_cnv_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.org_cnv_mst_v ( + WHS_CD + ,WHS_SUB_CD + ,ORG_CD + ,SUB_NUM + ,V_ORG_CD + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.WHS_CD + ,t.WHS_SUB_CD + ,t.ORG_CD + ,t.SUB_NUM + ,t.V_ORG_CD + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.org_cnv_mst_v AS t + ON DUPLICATE KEY UPDATE + WHS_CD=t.WHS_CD + ,WHS_SUB_CD=t.WHS_SUB_CD + ,ORG_CD=t.ORG_CD + ,SUB_NUM=t.SUB_NUM + ,V_ORG_CD=t.V_ORG_CD + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V取引区分マスタ @@ -65,7 +624,45 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "tran_kbn_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", - _KEY_SRC_TABLE: "src05.tran_kbn_mst_v" + _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.tran_kbn_mst_v ( + V_TRAN_CD + ,SUB_NUM + ,NAME + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_TRAN_CD + ,t.SUB_NUM + ,t.NAME + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.tran_kbn_mst_v AS t + ON DUPLICATE KEY UPDATE + V_TRAN_CD=t.V_TRAN_CD + ,SUB_NUM=t.SUB_NUM + ,NAME=t.NAME + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V施設マスタ @@ -73,7 +670,90 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "fcl_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.fcl_mst_v", - _KEY_SRC_TABLE: "src05.fcl_mst_v" + _KEY_SRC_TABLE: "src05.fcl_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.fcl_mst_v ( + V_INST_CD + ,SUB_NUM + ,START_DATE + ,END_DATE + ,CLOSED_DT + ,FCL_NAME + ,FCL_KN_NAME + ,FCL_ABB_NAME + ,FCL_ABB_KN_NAME + ,MKR_CD + ,JSK_PROC_KBN + ,FMT_ADDR + ,FMT_KN_ADDR + ,POSTAL_CD + ,PRFT_CD + ,PRFT_NAME + ,CITY_NAME + ,ADDR_LINE_1 + ,TEL_NUM + ,ADMIN_KBN + ,FCL_TYPE + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_INST_CD + ,t.SUB_NUM + ,t.START_DATE + ,t.END_DATE + ,t.CLOSED_DT + ,t.FCL_NAME + ,t.FCL_KN_NAME + ,t.FCL_ABB_NAME + ,t.FCL_ABB_KN_NAME + ,t.MKR_CD + ,t.JSK_PROC_KBN + ,t.FMT_ADDR + ,t.FMT_KN_ADDR + ,t.POSTAL_CD + ,t.PRFT_CD + ,t.PRFT_NAME + ,t.CITY_NAME + ,t.ADDR_LINE_1 + ,t.TEL_NUM + ,t.ADMIN_KBN + ,t.FCL_TYPE + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.fcl_mst_v AS t + ON DUPLICATE KEY UPDATE + V_INST_CD=t.V_INST_CD + ,SUB_NUM=t.SUB_NUM + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,CLOSED_DT=t.CLOSED_DT + ,FCL_NAME=t.FCL_NAME + ,FCL_KN_NAME=t.FCL_KN_NAME + ,FCL_ABB_NAME=t.FCL_ABB_NAME + ,FCL_ABB_KN_NAME=t.FCL_ABB_KN_NAME + ,MKR_CD=t.MKR_CD + ,JSK_PROC_KBN=t.JSK_PROC_KBN + ,FMT_ADDR=t.FMT_ADDR + ,FMT_KN_ADDR=t.FMT_KN_ADDR + ,POSTAL_CD=t.POSTAL_CD + ,PRFT_CD=t.PRFT_CD + ,PRFT_NAME=t.PRFT_NAME + ,CITY_NAME=t.CITY_NAME + ,ADDR_LINE_1=t.ADDR_LINE_1 + ,TEL_NUM=t.TEL_NUM + ,ADMIN_KBN=t.ADMIN_KBN + ,FCL_TYPE=t.FCL_TYPE + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V製品マスタ @@ -81,7 +761,102 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "phm_prd_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", - _KEY_SRC_TABLE: "src05.phm_prd_mst_v" + _KEY_SRC_TABLE: "src05.phm_prd_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.phm_prd_mst_v ( + PRD_CD + ,SUB_NUM + ,PRD_NAME + ,PRD_E_NAME + ,MKR_CD + ,MKR_INF_1 + ,MKR_INF_2 + ,PHM_ITM_CD + ,ITM_NAME + ,ITM_ABB_NAME + ,FORM_CD + ,FORM_NAME + ,VOL_CD + ,VOL_NAME + ,CONT_CD + ,CONT_NAME + ,PKG_CD + ,PKG_NAME + ,CNV_NUM + ,JSK_START_DT + ,PRD_SALE_KBN + ,JSK_PROC_KBN + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.PRD_CD + ,t.SUB_NUM + ,t.PRD_NAME + ,t.PRD_E_NAME + ,t.MKR_CD + ,t.MKR_INF_1 + ,t.MKR_INF_2 + ,t.PHM_ITM_CD + ,t.ITM_NAME + ,t.ITM_ABB_NAME + ,t.FORM_CD + ,t.FORM_NAME + ,t.VOL_CD + ,t.VOL_NAME + ,t.CONT_CD + ,t.CONT_NAME + ,t.PKG_CD + ,t.PKG_NAME + ,t.CNV_NUM + ,t.JSK_START_DT + ,t.PRD_SALE_KBN + ,t.JSK_PROC_KBN + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.phm_prd_mst_v AS t + ON DUPLICATE KEY UPDATE + PRD_CD=t.PRD_CD + ,SUB_NUM=t.SUB_NUM + ,PRD_NAME=t.PRD_NAME + ,PRD_E_NAME=t.PRD_E_NAME + ,MKR_CD=t.MKR_CD + ,MKR_INF_1=t.MKR_INF_1 + ,MKR_INF_2=t.MKR_INF_2 + ,PHM_ITM_CD=t.PHM_ITM_CD + ,ITM_NAME=t.ITM_NAME + ,ITM_ABB_NAME=t.ITM_ABB_NAME + ,FORM_CD=t.FORM_CD + ,FORM_NAME=t.FORM_NAME + ,VOL_CD=t.VOL_CD + ,VOL_NAME=t.VOL_NAME + ,CONT_CD=t.CONT_CD + ,CONT_NAME=t.CONT_NAME + ,PKG_CD=t.PKG_CD + ,PKG_NAME=t.PKG_NAME + ,CNV_NUM=t.CNV_NUM + ,JSK_START_DT=t.JSK_START_DT + ,PRD_SALE_KBN=t.PRD_SALE_KBN + ,JSK_PROC_KBN=t.JSK_PROC_KBN + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V製品価格マスタ @@ -89,7 +864,48 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "phm_price_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_price_mst_v", - _KEY_SRC_TABLE: "src05.phm_price_mst_v" + _KEY_SRC_TABLE: "src05.phm_price_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.phm_price_mst_v ( + PHM_PRD_CD + ,PHM_PRICE_KIND + ,SUB_NUM + ,PRICE + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.PHM_PRD_CD + ,t.PHM_PRICE_KIND + ,t.SUB_NUM + ,t.PRICE + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.phm_price_mst_v AS t + ON DUPLICATE KEY UPDATE + PHM_PRD_CD=t.PHM_PRD_CD + ,PHM_PRICE_KIND=t.PHM_PRICE_KIND + ,SUB_NUM=t.SUB_NUM + ,PRICE=t.PRICE + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V施設統合マスタ @@ -97,7 +913,30 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "vop_hco_merge_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", - _KEY_SRC_TABLE: "src05.vop_hco_merge_v" + _KEY_SRC_TABLE: "src05.vop_hco_merge_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.vop_hco_merge_v ( + V_INST_CD + ,V_INST_CD_MERG + ,APPLY_DT + ,MERGE_REASON + ,DWH_UPD_DT + ) + SELECT + t.V_INST_CD + ,t.V_INST_CD_MERG + ,t.APPLY_DT + ,t.MERGE_REASON + ,SYSDATE() + FROM org05.vop_hco_merge_v AS t + ON DUPLICATE KEY UPDATE + V_INST_CD=t.V_INST_CD + ,V_INST_CD_MERG=t.V_INST_CD_MERG + ,APPLY_DT=t.APPLY_DT + ,MERGE_REASON=t.MERGE_REASON + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸得意先情報マスタ @@ -105,15 +944,112 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "whs_customer_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", - _KEY_SRC_TABLE: "src05.whs_customer_mst_v" + _KEY_SRC_TABLE: "src05.whs_customer_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whs_customer_mst_v ( + WHS_CD + ,WHS_SUB_CD + ,CUSTOMER_CD + ,SUB_NUM + ,START_DATE + ,END_DATE + ,WHS_ORG_CD + ,SRC_ORG_CD + ,NAME + ,KN_NAME + ,ADDR + ,KN_ADDR + ,POSTAL_CD + ,TEL_NUM + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.WHS_CD + ,t.WHS_SUB_CD + ,t.CUSTOMER_CD + ,t.SUB_NUM + ,t.START_DATE + ,t.END_DATE + ,t.WHS_ORG_CD + ,t.SRC_ORG_CD + ,t.NAME + ,t.KN_NAME + ,t.ADDR + ,t.KN_ADDR + ,t.POSTAL_CD + ,t.TEL_NUM + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.whs_customer_mst_v AS t + ON DUPLICATE KEY UPDATE + WHS_CD=t.WHS_CD + ,WHS_SUB_CD=t.WHS_SUB_CD + ,CUSTOMER_CD=t.CUSTOMER_CD + ,SUB_NUM=t.SUB_NUM + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,WHS_ORG_CD=t.WHS_ORG_CD + ,SRC_ORG_CD=t.SRC_ORG_CD + ,NAME=t.NAME + ,KN_NAME=t.KN_NAME + ,ADDR=t.ADDR + ,KN_ADDR=t.KN_ADDR + ,POSTAL_CD=t.POSTAL_CD + ,TEL_NUM=t.TEL_NUM + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # MDBコード変換表 CONDKEY_MDB_CONV_MST: { _KEY_FILE_PREFIX: "mdb_conv_mst_", _KEY_FILE_SUFFIX: ".tsv", - _KEY_ORG_TABLE: "org05.mdb_conv_mst_v", - _KEY_SRC_TABLE: "src05.mdb_conv_mst_v" + _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", + _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.mdb_cnv_mst_v ( + HCO_VID_V + ,SUB_NUM + ,MDB_CD + ,RELIABILITY + ,START_DATE + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.HCO_VID_V + ,t.SUB_NUM + ,t.MDB_CD + ,t.RELIABILITY + ,t.START_DATE + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.mdb_cnv_mst_v AS t + ON DUPLICATE KEY UPDATE + HCO_VID_V=t.HCO_VID_V + ,SUB_NUM=t.SUB_NUM + ,MDB_CD=t.MDB_CD + ,RELIABILITY=t.RELIABILITY + ,START_DATE=t.START_DATE + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # 卸在庫データ @@ -121,7 +1057,105 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "stock_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whole_stock", - _KEY_SRC_TABLE: "src05.whole_stock" + _KEY_SRC_TABLE: "src05.whole_stock", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whole_stock ( + REC_DATA + ,REC_WHS_CD + ,REC_WHS_SUB_CD + ,REC_STO_PLACE + ,REC_STOCK_YMD + ,REC_COMM_CD + ,REC_QTY + ,REC_STOCK_NO_SIGN + ,REC_JAN_CD + ,FREE_ITEM + ,REC_YMD + ,SALE_DATA_CAT + ,SLIP_FILE_NAME + ,SLIP_MGT_NUM + ,ROW_NUM + ,EXEC_DT + ,ERR_FLG1 + ,ERR_FLG2 + ,ERR_FLG3 + ,ERR_FLG4 + ,ERR_FLG5 + ,ERR_FLG6 + ,ERR_FLG7 + ,ERR_FLG8 + ,ERR_FLG9 + ,ERR_FLG10 + ,REC_STS_KBN + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.REC_DATA + ,t.REC_WHS_CD + ,t.REC_WHS_SUB_CD + ,t.REC_STO_PLACE + ,t.REC_STOCK_YMD + ,t.REC_COMM_CD + ,t.REC_QTY + ,t.REC_STOCK_NO_SIGN + ,t.REC_JAN_CD + ,t.FREE_ITEM + ,t.REC_YMD + ,t.SALE_DATA_CAT + ,t.SLIP_FILE_NAME + ,t.SLIP_MGT_NUM + ,t.ROW_NUM + ,t.EXEC_DT + ,t.ERR_FLG1 + ,t.ERR_FLG2 + ,t.ERR_FLG3 + ,t.ERR_FLG4 + ,t.ERR_FLG5 + ,t.ERR_FLG6 + ,t.ERR_FLG7 + ,t.ERR_FLG8 + ,t.ERR_FLG9 + ,t.ERR_FLG10 + ,t.REC_STS_KBN + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.whole_stock AS t + ON DUPLICATE KEY UPDATE + REC_DATA=t.REC_DATA + ,REC_WHS_CD=t.REC_WHS_CD + ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD + ,REC_STO_PLACE=t.REC_STO_PLACE + ,REC_STOCK_YMD=t.REC_STOCK_YMD + ,REC_COMM_CD=t.REC_COMM_CD + ,REC_QTY=t.REC_QTY + ,REC_STOCK_NO_SIGN=t.REC_STOCK_NO_SIGN + ,REC_JAN_CD=t.REC_JAN_CD + ,FREE_ITEM=t.FREE_ITEM + ,REC_YMD=t.REC_YMD + ,SALE_DATA_CAT=t.SALE_DATA_CAT + ,SLIP_FILE_NAME=t.SLIP_FILE_NAME + ,SLIP_MGT_NUM=t.SLIP_MGT_NUM + ,ROW_NUM=t.ROW_NUM + ,EXEC_DT=t.EXEC_DT + ,ERR_FLG1=t.ERR_FLG1 + ,ERR_FLG2=t.ERR_FLG2 + ,ERR_FLG3=t.ERR_FLG3 + ,ERR_FLG4=t.ERR_FLG4 + ,ERR_FLG5=t.ERR_FLG5 + ,ERR_FLG6=t.ERR_FLG6 + ,ERR_FLG7=t.ERR_FLG7 + ,ERR_FLG8=t.ERR_FLG8 + ,ERR_FLG9=t.ERR_FLG9 + ,ERR_FLG10=t.ERR_FLG10 + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # 生物由来データ @@ -129,7 +1163,252 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "bio_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.bio_sales", - _KEY_SRC_TABLE: "src05.bio_sales" + _KEY_SRC_TABLE: "src05.bio_sales", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.bio_sales ( + REC_DATA + ,REC_WHS_CD + ,REC_WHS_SUB_CD + ,REC_WHS_ORG_CD + ,REC_CUST_CD + ,REC_COMM_CD + ,REC_TRAN_KBN + ,REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK + ,REC_URAG_NUM + ,REC_COMM_NAME + ,REC_NONYU_FCL_NAME + ,REC_NONYU_FCL_ADDR + ,REC_LOT_NUM1 + ,REC_QTY1 + ,REC_LOT_NUM2 + ,REC_QTY2 + ,REC_LOT_NUM3 + ,REC_QTY3 + ,REC_YMD + ,SALE_DATA_CAT + ,SLIP_FILE_NAME + ,SLIP_MGT_NUM + ,ROW_NUM + ,HSDN_YMD + ,EXEC_DT + ,V_TRAN_CD + ,TRAN_KBN_NAME + ,WHS_ORG_CD + ,V_WHSORG_CD + ,WHS_ORG_NAME + ,WHS_ORG_KN + ,V_WHS_CD + ,WHS_NAME + ,NONYU_FCL_CD + ,V_INST_CD + ,V_INST_NAME + ,V_INST_KN + ,V_INST_ADDR + ,COMM_CD + ,PRODUCT_NAME + ,HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN + ,FCL_EXIS_KBN + ,QTY1 + ,QTY2 + ,QTY3 + ,SLIP_ORG_KBN + ,BEF_SLIP_MGT_NUM + ,WHS_REP_COMM_NAME + ,WHS_REP_NONYU_FCL_NAME + ,WHS_REP_NONYU_FCL_ADDR + ,ERR_FLG1 + ,ERR_FLG2 + ,ERR_FLG3 + ,ERR_FLG4 + ,ERR_FLG5 + ,ERR_FLG6 + ,ERR_FLG7 + ,ERR_FLG8 + ,ERR_FLG9 + ,ERR_FLG10 + ,ERR_FLG11 + ,ERR_FLG12 + ,ERR_FLG13 + ,ERR_FLG14 + ,ERR_FLG15 + ,ERR_FLG16 + ,ERR_FLG17 + ,ERR_FLG18 + ,ERR_FLG19 + ,ERR_FLG20 + ,KJYO_YM + ,TKSNBK_KBN + ,FCL_EXEC_KBN + ,REC_STS_KBN + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.REC_DATA + ,t.REC_WHS_CD + ,t.REC_WHS_SUB_CD + ,t.REC_WHS_ORG_CD + ,t.REC_CUST_CD + ,t.REC_COMM_CD + ,t.REC_TRAN_KBN + ,t.REV_HSDNYMD_WRK + ,t.REV_HSDNYMD_SRK + ,t.REC_URAG_NUM + ,t.REC_COMM_NAME + ,t.REC_NONYU_FCL_NAME + ,t.REC_NONYU_FCL_ADDR + ,t.REC_LOT_NUM1 + ,t.REC_QTY1 + ,t.REC_LOT_NUM2 + ,t.REC_QTY2 + ,t.REC_LOT_NUM3 + ,t.REC_QTY3 + ,t.REC_YMD + ,t.SALE_DATA_CAT + ,t.SLIP_FILE_NAME + ,t.SLIP_MGT_NUM + ,t.ROW_NUM + ,t.HSDN_YMD + ,t.EXEC_DT + ,t.V_TRAN_CD + ,t.TRAN_KBN_NAME + ,t.WHS_ORG_CD + ,t.V_WHSORG_CD + ,t.WHS_ORG_NAME + ,t.WHS_ORG_KN + ,t.V_WHS_CD + ,t.WHS_NAME + ,t.NONYU_FCL_CD + ,t.V_INST_CD + ,t.V_INST_NAME + ,t.V_INST_KN + ,t.V_INST_ADDR + ,t.COMM_CD + ,t.PRODUCT_NAME + ,t.HTDNYMD_ERR_KBN + ,t.PRD_EXIS_KBN + ,t.FCL_EXIS_KBN + ,t.QTY1 + ,t.QTY2 + ,t.QTY3 + ,t.SLIP_ORG_KBN + ,t.BEF_SLIP_MGT_NUM + ,t.WHS_REP_COMM_NAME + ,t.WHS_REP_NONYU_FCL_NAME + ,t.WHS_REP_NONYU_FCL_ADDR + ,t.ERR_FLG1 + ,t.ERR_FLG2 + ,t.ERR_FLG3 + ,t.ERR_FLG4 + ,t.ERR_FLG5 + ,t.ERR_FLG6 + ,t.ERR_FLG7 + ,t.ERR_FLG8 + ,t.ERR_FLG9 + ,t.ERR_FLG10 + ,t.ERR_FLG11 + ,t.ERR_FLG12 + ,t.ERR_FLG13 + ,t.ERR_FLG14 + ,t.ERR_FLG15 + ,t.ERR_FLG16 + ,t.ERR_FLG17 + ,t.ERR_FLG18 + ,t.ERR_FLG19 + ,t.ERR_FLG20 + ,t.KJYO_YM + ,t.TKSNBK_KBN + ,t.FCL_EXEC_KBN + ,t.REC_STS_KBN + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.bio_sales AS t + ON DUPLICATE KEY UPDATE + REC_DATA=t.REC_DATA + ,REC_WHS_CD=t.REC_WHS_CD + ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD + ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD + ,REC_CUST_CD=t.REC_CUST_CD + ,REC_COMM_CD=t.REC_COMM_CD + ,REC_TRAN_KBN=t.REC_TRAN_KBN + ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK + ,REC_URAG_NUM=t.REC_URAG_NUM + ,REC_COMM_NAME=t.REC_COMM_NAME + ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME + ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR + ,REC_LOT_NUM1=t.REC_LOT_NUM1 + ,REC_QTY1=t.REC_QTY1 + ,REC_LOT_NUM2=t.REC_LOT_NUM2 + ,REC_QTY2=t.REC_QTY2 + ,REC_LOT_NUM3=t.REC_LOT_NUM3 + ,REC_QTY3=t.REC_QTY3 + ,REC_YMD=t.REC_YMD + ,SALE_DATA_CAT=t.SALE_DATA_CAT + ,SLIP_FILE_NAME=t.SLIP_FILE_NAME + ,SLIP_MGT_NUM=t.SLIP_MGT_NUM + ,ROW_NUM=t.ROW_NUM + ,HSDN_YMD=t.HSDN_YMD + ,EXEC_DT=t.EXEC_DT + ,V_TRAN_CD=t.V_TRAN_CD + ,TRAN_KBN_NAME=t.TRAN_KBN_NAME + ,WHS_ORG_CD=t.WHS_ORG_CD + ,V_WHSORG_CD=t.V_WHSORG_CD + ,WHS_ORG_NAME=t.WHS_ORG_NAME + ,WHS_ORG_KN=t.WHS_ORG_KN + ,V_WHS_CD=t.V_WHS_CD + ,WHS_NAME=t.WHS_NAME + ,NONYU_FCL_CD=t.NONYU_FCL_CD + ,V_INST_CD=t.V_INST_CD + ,V_INST_NAME=t.V_INST_NAME + ,V_INST_KN=t.V_INST_KN + ,V_INST_ADDR=t.V_INST_ADDR + ,COMM_CD=t.COMM_CD + ,PRODUCT_NAME=t.PRODUCT_NAME + ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN=t.PRD_EXIS_KBN + ,FCL_EXIS_KBN=t.FCL_EXIS_KBN + ,QTY1=t.QTY1 + ,QTY2=t.QTY2 + ,QTY3=t.QTY3 + ,SLIP_ORG_KBN=t.SLIP_ORG_KBN + ,BEF_SLIP_MGT_NUM=t.BEF_SLIP_MGT_NUM + ,WHS_REP_COMM_NAME=t.WHS_REP_COMM_NAME + ,WHS_REP_NONYU_FCL_NAME=t.WHS_REP_NONYU_FCL_NAME + ,WHS_REP_NONYU_FCL_ADDR=t.WHS_REP_NONYU_FCL_ADDR + ,ERR_FLG1=t.ERR_FLG1 + ,ERR_FLG2=t.ERR_FLG2 + ,ERR_FLG3=t.ERR_FLG3 + ,ERR_FLG4=t.ERR_FLG4 + ,ERR_FLG5=t.ERR_FLG5 + ,ERR_FLG6=t.ERR_FLG6 + ,ERR_FLG7=t.ERR_FLG7 + ,ERR_FLG8=t.ERR_FLG8 + ,ERR_FLG9=t.ERR_FLG9 + ,ERR_FLG10=t.ERR_FLG10 + ,ERR_FLG11=t.ERR_FLG11 + ,ERR_FLG12=t.ERR_FLG12 + ,ERR_FLG13=t.ERR_FLG13 + ,ERR_FLG14=t.ERR_FLG14 + ,ERR_FLG15=t.ERR_FLG15 + ,ERR_FLG16=t.ERR_FLG16 + ,ERR_FLG17=t.ERR_FLG17 + ,ERR_FLG18=t.ERR_FLG18 + ,ERR_FLG19=t.ERR_FLG19 + ,ERR_FLG20=t.ERR_FLG20 + ,KJYO_YM=t.KJYO_YM + ,TKSNBK_KBN=t.TKSNBK_KBN + ,FCL_EXEC_KBN=t.FCL_EXEC_KBN + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # ロットマスタデータ @@ -137,7 +1416,36 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "lot_num_ms_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.lot_num_mst", - _KEY_SRC_TABLE: "src05.lot_num_mst" + _KEY_SRC_TABLE: "src05.lot_num_mst", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.lot_num_mst ( + SER_NUM + ,LOT_NUM + ,EXPR_DT + ,FRST_MOV_DT + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.SER_NUM + ,t.LOT_NUM + ,t.EXPR_DT + ,t.FRST_MOV_DT + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.lot_num_mst AS t + ON DUPLICATE KEY UPDATE + SER_NUM=t.SER_NUM + ,LOT_NUM=t.LOT_NUM + ,EXPR_DT=t.EXPR_DT + ,FRST_MOV_DT=t.FRST_MOV_DT + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, } @@ -165,12 +1473,19 @@ class VjskRecvFileMapper: ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) return ret + def get_upsert_sql(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_UPSERT_SQL) + return ret + def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: ret = None filename = s3_file_path[s3_file_path.rfind("/") + 1:] for condkey in self._VJSK_INTERFACE_MAPPING: element = self._VJSK_INTERFACE_MAPPING.get(condkey) - if filename.startswith(element.get(self._KEY_FILE_PREFIX)) and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): + if filename.startswith(element.get(self._KEY_FILE_PREFIX)) \ + and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): ret = condkey break return ret From 62ec4ddebb54de8eb07902ea33dced4ae05964ea Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 18:37:07 +0900 Subject: [PATCH 006/103] =?UTF-8?q?=E3=83=AD=E3=82=B0=E3=81=A8=E3=81=8B?= =?UTF-8?q?=E3=82=B3=E3=83=A1=E3=83=B3=E3=83=88=E3=81=AE=E6=95=B4=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 3 +- .../src/batch/vjsk/vjsk_data_load_manager.py | 4 + .../src/batch/vjsk/vjsk_importer.py | 151 ++++++++++-------- .../src/batch/vjsk/vjsk_recv_file_manager.py | 60 ------- 4 files changed, 89 insertions(+), 129 deletions(-) delete mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2ee93eb8..dde7cd46 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -110,8 +110,7 @@ class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER -class VjskBucket(S3Bucket): - # TODO:V実消化バケットから見たり取ってきたりする実装をやる +class VjskRecieveBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 4b574dce..70f69344 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -12,6 +12,8 @@ class JjskDataLoadManager: pass def _import_to_db(src_file_name: str, condkey: str): + logger.debug(f"_import_to_db start (src_file_name : {src_file_name}, condkey : {condkey})") + db = Database.get_instance() table_name_org = mapper.get_org_table(condkey) upsert_sql = mapper.get_upsert_sql(condkey) @@ -40,6 +42,8 @@ class JjskDataLoadManager: raise e finally: db.disconnect() + + logger.debug("_import_to_db end") return @classmethod diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 8b4b5197..55e9a41b 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -1,4 +1,4 @@ -from src.aws.s3 import ConfigBucket, VjskBucket +from src.aws.s3 import ConfigBucket, VjskRecieveBucket from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_wholestocksaler_file import \ CalendarWholwSalerStockFile @@ -7,108 +7,116 @@ from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger -# from src.batch.datachange import emp_chg_inst_lau - -_logger = get_logger('V実消化データ取込') +logger = get_logger('V実消化データ取込') batch_context = BatchContext.get_instance() -vjsk_recv_bucket = VjskBucket() +vjsk_recv_bucket = VjskRecieveBucket() vjsk_mapper = VjskRecvFileMapper() -def _check_if_file_exists(src_list: list, key: str) -> bool: - pref = vjsk_mapper.get_file_prefix(key) - suff = vjsk_mapper.get_file_suffix(key) +def _check_if_file_exists(src_list: list, condkey: str) -> bool: + logger.debug(f"_check_if_file_exists start (src_list : {src_list} , condkey : {condkey})") + # ファイル接頭辞と拡張子が一致するかで判定する + ret = False + pref = vjsk_mapper.get_file_prefix(condkey) + suff = vjsk_mapper.get_file_suffix(condkey) + for idx, elem in enumerate(src_list): buf = elem.get("filename") filename = buf[buf.rfind("/") + 1:] if filename.startswith(pref) and filename.endswith(suff): - return True - return False + ret = True + break + + logger.debug(f"_check_if_file_exists end (return : {ret})") + return ret def _check_received_files(): - """V実消化連携データ存在確認処理""" - _logger.debug('V実消化連携データ存在確認処理:開始') + """V実消化連携データファイル受領確認処理""" + logger.debug('_check_received_files start') - # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 - received_files = vjsk_recv_bucket.get_s3_file_list() - _logger.debug(f'ファイル一覧{received_files}') + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 + received_s3_files = vjsk_recv_bucket.get_s3_file_list() + logger.debug(f'ファイル一覧{received_s3_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): - raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): + raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸販売データ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_SLIP_DATA): - raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_SLIP_DATA): + raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸組織変換マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): - raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): + raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 施設統合マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): - raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): + raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_MST): - raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_WHS_MST): + raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸ホールディングスマスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_HLD_MST): - raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_HLD_MST): + raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 施設マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_FCL_MST): - raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_FCL_MST): + raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 メーカー卸組織展開表 - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): - raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): + raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 取引区分マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): - raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): + raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 製品マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): - raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): + raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 製品価格マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): - raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): + raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸得意先情報マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): - raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): + raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 MDBコード変換マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): - raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): + raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_s3_files}') - _logger.debug('V実消化連携データ存在確認処理:終了') + logger.debug('_check_received_files end') return True def _import_file_to_db(): - _logger.debug('V実消化取込処理:開始') + """V実消化連携データ取込処理""" + logger.debug('_import_file_to_db start') - # 実消化&アルトマーク V実消化データ受領バケットにあるファイルパス一覧を取得 + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 received_s3_files = vjsk_recv_bucket.get_s3_file_list() - # ファイルパス一覧にマッピング情報を参照するためのキーを持たせて辞書可する + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する target_dict = {} for s3_file_path in received_s3_files: + # S3バケットにある受領ファイルをローカルストレージにdownloadする local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path.get('filename')) + + # データファイル名に該当する辞書アクセス用のキーを取得する key = vjsk_mapper.get_condkey_by_s3_file_path(s3_file_path.get('filename')) + + # 想定されたデータファイルであれば辞書登録する if key is not None: target_dict[key] = {"condkey": key, "src_file_path": local_file_path} - _logger.debug(f'S3ファイルパス辞書{target_dict}') - - # TODO: diff_upsertに変わるやつを呼び出す - # TODO: emp_chg_inst_lau.batch_process() みたいに + logger.debug(f'取込対象データファイル辞書{target_dict}') # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: @@ -150,51 +158,60 @@ def _import_file_to_db(): # DB登録 MDBコード変換マスタ JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) - _logger.debug('V実消化取込処理:終了') + logger.debug('_import_file_to_db end') def _determine_today_is_stockslipdata_target(): - """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する""" + """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日付が該当していればTrueを返却する""" + logger.debug("_determine_today_is_stockslipdata_target start") try: + # 処理日付を取得する today = batch_context.syor_date - holiday_list_file_path = ConfigBucket().download_wholesaler_stock_list() - targetdays = CalendarWholwSalerStockFile(holiday_list_file_path) - ret = targetdays.compare_date(today) + # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする + config_file_path = ConfigBucket().download_wholesaler_stock_list() + + # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する + target_days = CalendarWholwSalerStockFile(config_file_path) + + # 処理日付が、設定ファイル「V実消化卸在庫データ連携日ファイル」の定義に該当するかを判定する + ret = target_days.compare_date(today) except Exception as e: - _logger.error(f'{e}') + logger.error(f'{e}') raise e + logger.debug("_determine_today_is_stockslipdata_target end") return ret def exec(): - """V実消化データ取込""" - _logger.info('Start Jitsusyouka Torikomi PGM.') + """V実消化データ取込処理""" + logger.info('Start Jitsusyouka Torikomi PGM.') # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる - _logger.debug('卸在庫データ取込対象日であるかを判定') + logger.debug('卸在庫データ取込対象日であるかを判定') batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() - _logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') + logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') if batch_context.is_import_target_vjsk_stockslipdata: - _logger.info('卸在庫データ取込対象日です') + logger.info('卸在庫データ取込対象日です') # V実消化データファイル受領チェック - _logger.debug('V実消化データファイル受領チェック:開始') + logger.debug('V実消化データファイル受領チェック:開始') try: - # S3バケット上でV実消化データファイルの存在チェックをする + # S3バケットにある受領済のV実消化データファイルの存在チェックをする _check_received_files() except BatchOperationException as e: - _logger.error('受領したV実消化データファイルに欠落があります') + logger.error('受領したV実消化データファイルに未受領もものがあります') raise e - _logger.debug('V実消化データファイル受領チェック:終了') + logger.debug('V実消化データファイル受領チェック:終了') # データベース取込 - _logger.debug('V実消化データ取込:開始') + logger.debug('V実消化データ取込:開始') try: + # S3バケットにある受領済のV実消化データファイルをデータベースに登録する _import_file_to_db() except Exception as e: - _logger.error(f'データベース登録失敗 {e}') + logger.error(f'データベース登録失敗 {e}') raise e - _logger.debug('V実消化データ取込:終了') + logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py deleted file mode 100644 index 7ddbb766..00000000 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py +++ /dev/null @@ -1,60 +0,0 @@ -# import csv -# from io import TextIOWrapper - - -# class VjskRecvFileManager: -# layout_class: str -# records: list[str] - -# def __init__(self, dat_line: list[str]) -> None: -# self.layout_class = dat_line[0] -# self.records = dat_line - - -# class VjskDatFile: -# """V実消化データファイル""" - -# lines: list[VjskRecvFileManager] -# success_count: int = 0 -# error_count: int = 0 -# total_count: int = 0 -# __i: int = 0 - -# def __iter__(self): -# return self - -# def __next__(self) -> VjskRecvFileManager: -# if self.__i == len(self.lines): -# raise StopIteration() -# line = self.lines[self.__i] -# self.__i += 1 -# return line - -# def __init__(self, file: TextIOWrapper) -> None: -# reader = csv.reader(file) -# csv_rows = [VjskRecvFileManager(row) for row in reader] - -# self.lines = csv_rows -# self.total_count = len(csv_rows) - -# def count_up_success(self): -# self.success_count += 1 - -# def count_up_error(self): -# self.error_count += 1 - -# @classmethod -# def retrieve_from_file(cls, local_file_path: str): -# """V実消化データファイルを読み込み、新しいインスタンスを作成する - -# Args: -# local_file_path (str): ローカルのファイルパス - -# Returns: -# VjskDatFile: このクラスのインスタンス -# """ -# # cp932(Shift-JIS Windows拡張)でファイルを読み込む -# file = open(local_file_path, encoding='cp932') -# instance = cls(file) -# file.close() -# return instance From f299cb9379a564ba99fae33ce14b1def61d4656e Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 19:06:19 +0900 Subject: [PATCH 007/103] =?UTF-8?q?BIO=E3=81=AE2=E6=9C=AC=E8=BF=BD?= =?UTF-8?q?=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_importer.py | 14 ++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 6 +++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 55e9a41b..9c1b0d72 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -92,6 +92,14 @@ def _check_received_files(): if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_s3_files}') + # ファイル存在確認 生物由来データ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_BIO_SLIP_DATA): + raise BatchOperationException(f'生物由来データファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 製造ロット番号マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): + raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') + logger.debug('_check_received_files end') return True @@ -158,6 +166,12 @@ def _import_file_to_db(): # DB登録 MDBコード変換マスタ JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + # DB登録 生物由来データ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + + # DB登録 製造ロット番号マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + logger.debug('_import_file_to_db end') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index a6736e16..cfd73810 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -16,7 +16,7 @@ class VjskRecvFileMapper: CONDKEY_MDB_CONV_MST = "MDB_CONV_MST" # MDBコード変換表 CONDKEY_STOCK_SLIP_DATA = "STOCK_SLIP_DATA" # 卸在庫データ CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ - CONDKEY_LOT_NUM_MS = "LOT_NUM_MS" # ロットマスタデータ + CONDKEY_LOT_NUM_MST = "LOT_NUM_MST" # ロットマスタデータ _KEY_FILE_PREFIX = "file_prefix" _KEY_FILE_SUFFIX = "file_suffix" @@ -1412,8 +1412,8 @@ class VjskRecvFileMapper: }, # ロットマスタデータ - CONDKEY_LOT_NUM_MS: { - _KEY_FILE_PREFIX: "lot_num_ms_", + CONDKEY_LOT_NUM_MST: { + _KEY_FILE_PREFIX: "lot_num_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", From 1bff52f8c7ea1aa7903aeae17bfeae7081b7f2d3 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 22:30:06 +0900 Subject: [PATCH 008/103] =?UTF-8?q?=E3=83=AD=E3=82=B0=E3=81=A8=E3=81=8B?= =?UTF-8?q?=E3=82=A4=E3=83=B3=E3=83=87=E3=83=B3=E3=83=88=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 2 +- .../src/batch/vjsk/vjsk_importer.py | 16 ++++++++++------ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 5 +++-- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 70f69344..9653f858 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -57,5 +57,5 @@ class JjskDataLoadManager: # データベース登録 self._import_to_db(local_file_name, target["condkey"]) - logger.debug('JjskDataLoadManager#load end') + logger.debug('JjskDataLoadManager#load done') return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 9c1b0d72..d667416e 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -27,7 +27,7 @@ def _check_if_file_exists(src_list: list, condkey: str) -> bool: ret = True break - logger.debug(f"_check_if_file_exists end (return : {ret})") + logger.debug(f"_check_if_file_exists done (return : {ret})") return ret @@ -100,7 +100,7 @@ def _check_received_files(): if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') - logger.debug('_check_received_files end') + logger.debug('_check_received_files done') return True @@ -115,11 +115,15 @@ def _import_file_to_db(): # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する target_dict = {} for s3_file_path in received_s3_files: + file_name = s3_file_path.get('filename') + # S3バケットにある受領ファイルをローカルストレージにdownloadする - local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path.get('filename')) + logger.debug(f"download s3 file start : {file_name}") + local_file_path = vjsk_recv_bucket.download_data_file(file_name) + logger.debug(f"download s3 file done : {file_name}") # データファイル名に該当する辞書アクセス用のキーを取得する - key = vjsk_mapper.get_condkey_by_s3_file_path(s3_file_path.get('filename')) + key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) # 想定されたデータファイルであれば辞書登録する if key is not None: @@ -172,7 +176,7 @@ def _import_file_to_db(): # DB登録 製造ロット番号マスタ JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) - logger.debug('_import_file_to_db end') + logger.debug('_import_file_to_db done') def _determine_today_is_stockslipdata_target(): @@ -193,7 +197,7 @@ def _determine_today_is_stockslipdata_target(): except Exception as e: logger.error(f'{e}') raise e - logger.debug("_determine_today_is_stockslipdata_target end") + logger.debug("_determine_today_is_stockslipdata_target done") return ret diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index cfd73810..ec243433 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -325,7 +325,8 @@ class VjskRecvFileMapper: ,t.REC_STS_KBN ,t.INS_DT ,t.UPD_DT - ,SYSDATE() FROM org05.hld_mst_v AS t + ,SYSDATE() + FROM org05.hld_mst_v AS t ON DUPLICATE KEY UPDATE V_HLD_CD=t.V_HLD_CD ,SUB_NUM=t.SUB_NUM @@ -1122,7 +1123,7 @@ class VjskRecvFileMapper: ,t.INS_DT ,t.INS_USR ,SYSDATE() - FROM org05.whole_stock AS t + FROM org05.whole_stock AS t ON DUPLICATE KEY UPDATE REC_DATA=t.REC_DATA ,REC_WHS_CD=t.REC_WHS_CD From 7a1941a7bcdf2144151f3cefa8cd3afefe72836f Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 22:31:50 +0900 Subject: [PATCH 009/103] =?UTF-8?q?transabtion=E5=BC=B5=E3=81=A3=E3=81=A6L?= =?UTF-8?q?OAD=E3=81=97=E3=81=9F=E3=81=82=E3=81=A8=E3=81=AEUPSERT=E3=81=8C?= =?UTF-8?q?=E3=82=B3=E3=82=B1=E3=81=9F=E3=81=A8=E3=81=8D=E3=81=AB=E3=80=81?= =?UTF-8?q?org=E3=81=8C=E3=83=AD=E3=83=BC=E3=83=AB=E3=83=90=E3=83=83?= =?UTF-8?q?=E3=82=AF=E3=81=95=E3=82=8C=E3=81=AA=E3=81=84=E3=82=88=E3=81=86?= =?UTF-8?q?=E3=81=AB=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 9653f858..86040640 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -19,8 +19,7 @@ class JjskDataLoadManager: upsert_sql = mapper.get_upsert_sql(condkey) try: - db.connect() # TODO:接続オプション local_infile = True が必要? - db.begin() + db.connect() # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") @@ -32,18 +31,19 @@ class JjskDataLoadManager: logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select + db.begin() result = db.execute(upsert_sql) logger.info(f'orgテーブルをsrcテーブルにUPSERT : 件数({result.rowcount})') db.commit() - except Exception as e: # TODO:DB例外だけキャッチしたい + except Exception as e: db.rollback() logger.error(e) raise e finally: db.disconnect() - logger.debug("_import_to_db end") + logger.debug("_import_to_db done") return @classmethod From 6cbee0e8baca40d23cc7d8d3830871daae8ecb95 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 10 May 2023 13:22:40 +0900 Subject: [PATCH 010/103] =?UTF-8?q?=E6=97=A5=E4=BB=98=E3=83=87=E3=83=BC?= =?UTF-8?q?=E3=82=BF=E3=81=8Ctsv=E3=81=A7=E3=83=96=E3=83=A9=E3=83=B3?= =?UTF-8?q?=E3=82=AF=E3=81=A0=E3=81=A3=E3=81=9F=E3=81=A8=E3=81=8D=E3=80=81?= =?UTF-8?q?LOAD=E6=96=87=E3=81=A7'0000-00-00'=E3=81=AB=E5=A4=89=E6=8F=9B?= =?UTF-8?q?=E3=81=95=E3=82=8C=E3=81=A6=E3=81=97=E3=81=BE=E3=81=86=E5=95=8F?= =?UTF-8?q?=E9=A1=8C=E3=81=AE=E5=AF=BE=E5=87=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 8 +++++--- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 86040640..c63ac962 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -25,15 +25,17 @@ class JjskDataLoadManager: db.execute(f"TRUNCATE TABLE {table_name_org};") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org}" \ + sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} " \ " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" result = db.execute(sql, {"src_file_name": src_file_name}) logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select db.begin() - result = db.execute(upsert_sql) - logger.info(f'orgテーブルをsrcテーブルにUPSERT : 件数({result.rowcount})') + db.execute(upsert_sql) + # TODO: insert+select 実質10件なのに、resultのrowcountは20件になってしまう ※sqlalchemyの仕様 + # https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount + logger.info('orgテーブルをsrcテーブルにUPSERT') db.commit() except Exception as e: diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index ec243433..726e927c 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -815,7 +815,7 @@ class VjskRecvFileMapper: ,t.PKG_CD ,t.PKG_NAME ,t.CNV_NUM - ,t.JSK_START_DT + ,nullif(t.JSK_START_DT, 0) ,t.PRD_SALE_KBN ,t.JSK_PROC_KBN ,t.START_DATE @@ -846,7 +846,7 @@ class VjskRecvFileMapper: ,PKG_CD=t.PKG_CD ,PKG_NAME=t.PKG_NAME ,CNV_NUM=t.CNV_NUM - ,JSK_START_DT=t.JSK_START_DT + ,JSK_START_DT=nullif(t.JSK_START_DT, 0) ,PRD_SALE_KBN=t.PRD_SALE_KBN ,JSK_PROC_KBN=t.JSK_PROC_KBN ,START_DATE=t.START_DATE From a0e858e93f141965c9351c71a8acbd1ea0664ae1 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 10 May 2023 16:50:44 +0900 Subject: [PATCH 011/103] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E3=81=AE=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../common/calendar_wholestocksaler_file.py | 32 ------------ .../src/batch/vjsk/vjsk_data_load_manager.py | 15 ++++-- .../src/batch/vjsk/vjsk_importer.py | 50 ++++++++++--------- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 22 ++++++++ 4 files changed, 59 insertions(+), 60 deletions(-) delete mode 100644 ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py diff --git a/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py b/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py deleted file mode 100644 index ba687514..00000000 --- a/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py +++ /dev/null @@ -1,32 +0,0 @@ -from src.system_var import constants - - -class CalendarWholwSalerStockFile: - """V実消化卸在庫データ連携日ファイル""" - - __calendar_file_lines: list[str] - - def __init__(self, calendar_file_path): - with open(calendar_file_path) as f: - self.__calendar_file_lines: list[str] = f.readlines() - - def compare_date(self, date_str: str) -> bool: - """与えられた日付がV実消化卸在庫データ連携日ファイル内に含まれているかどうか - V実消化卸在庫データ連携日ファイル内の日付はyyyy/mm/ddで書かれている前提 - コメント(#)が含まれている行は無視される - - Args: - date_str (str): yyyy/mm/dd文字列 - - Returns: - bool: 含まれていればTrue - """ - for calendar_date in self.__calendar_file_lines: - # コメント行が含まれている場合はスキップ - if constants.CALENDAR_COMMENT_SYMBOL in calendar_date: - continue - - if date_str in calendar_date: - return True - - return False diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index c63ac962..909bd041 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -7,7 +7,7 @@ logger = get_logger('V実消化データ取込(DB登録)') mapper = VjskRecvFileMapper() -class JjskDataLoadManager: +class VjskDataLoadManager: def __init__(self): pass @@ -15,7 +15,9 @@ class JjskDataLoadManager: logger.debug(f"_import_to_db start (src_file_name : {src_file_name}, condkey : {condkey})") db = Database.get_instance() + data_name = mapper.get_data_name(condkey) table_name_org = mapper.get_org_table(condkey) + table_name_src = mapper.get_src_table(condkey) upsert_sql = mapper.get_upsert_sql(condkey) try: @@ -28,14 +30,17 @@ class JjskDataLoadManager: sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} " \ " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" result = db.execute(sql, {"src_file_name": src_file_name}) - logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') + logger.debug(sql) + logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') # org→srcにinsert select db.begin() + logger.debug(upsert_sql) db.execute(upsert_sql) - # TODO: insert+select 実質10件なのに、resultのrowcountは20件になってしまう ※sqlalchemyの仕様 - # https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount - logger.info('orgテーブルをsrcテーブルにUPSERT') + # MEMO: insert+selectの結果件数は、LOAD結果と必ず等しいので、executeの結果件数はログ出力しない + # MEMO: insert+select 実質10件なのに、result.rowcountは20件になってしまう ※sqlalchemyの仕様 + # MEMO: https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount + logger.info(f'{table_name_org}を{table_name_src}にUPSERT') db.commit() except Exception as e: diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index d667416e..31c42874 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -1,8 +1,7 @@ from src.aws.s3 import ConfigBucket, VjskRecieveBucket from src.batch.common.batch_context import BatchContext -from src.batch.common.calendar_wholestocksaler_file import \ - CalendarWholwSalerStockFile -from src.batch.vjsk.vjsk_data_load_manager import JjskDataLoadManager +from src.batch.common.calendar_file import CalendarFile +from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger @@ -102,7 +101,7 @@ def _check_received_files(): logger.debug('_check_received_files done') - return True + return def _import_file_to_db(): @@ -132,49 +131,49 @@ def _import_file_to_db(): # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) # DB登録 卸販売データ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) # DB登録 卸組織変換マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) # DB登録 施設統合マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) # DB登録 卸マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) # DB登録 卸ホールディングスマスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) # DB登録 施設マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) # DB登録 メーカー卸組織展開表 - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) # DB登録 取引区分マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) # DB登録 製品マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) # DB登録 製品価格マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) # DB登録 卸得意先情報マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) # DB登録 MDBコード変換マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) # DB登録 生物由来データ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) # DB登録 製造ロット番号マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) logger.debug('_import_file_to_db done') @@ -187,10 +186,10 @@ def _determine_today_is_stockslipdata_target(): today = batch_context.syor_date # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする - config_file_path = ConfigBucket().download_wholesaler_stock_list() + wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_list() # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する - target_days = CalendarWholwSalerStockFile(config_file_path) + target_days = CalendarFile(wholesaler_stock_list_file_path) # 処理日付が、設定ファイル「V実消化卸在庫データ連携日ファイル」の定義に該当するかを判定する ret = target_days.compare_date(today) @@ -205,6 +204,11 @@ def exec(): """V実消化データ取込処理""" logger.info('Start Jitsusyouka Torikomi PGM.') + # 非営業日なら何もせず終了 + if batch_context.is_not_business_day: + logger.debug('非営業日なので処理をスキップ') + return + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる logger.debug('卸在庫データ取込対象日であるかを判定') batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() @@ -219,7 +223,7 @@ def exec(): _check_received_files() except BatchOperationException as e: - logger.error('受領したV実消化データファイルに未受領もものがあります') + logger.debug('受領したV実消化データファイルに未受領もものがあります') raise e logger.debug('V実消化データファイル受領チェック:終了') @@ -229,7 +233,7 @@ def exec(): # S3バケットにある受領済のV実消化データファイルをデータベースに登録する _import_file_to_db() except Exception as e: - logger.error(f'データベース登録失敗 {e}') + logger.debug(f'データベース登録失敗 {e}') raise e logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 726e927c..2ba87909 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -18,6 +18,7 @@ class VjskRecvFileMapper: CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ CONDKEY_LOT_NUM_MST = "LOT_NUM_MST" # ロットマスタデータ + _KEY_DATA_NAME = "data_name" _KEY_FILE_PREFIX = "file_prefix" _KEY_FILE_SUFFIX = "file_suffix" _KEY_ORG_TABLE = "org_table" @@ -26,6 +27,7 @@ class VjskRecvFileMapper: _VJSK_INTERFACE_MAPPING = { # 販売実績データ CONDKEY_SLIP_DATA: { + _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.sales", @@ -294,6 +296,7 @@ class VjskRecvFileMapper: # V卸ホールディングスマスタ CONDKEY_HLD_MST: { + _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.hld_mst_v", @@ -346,6 +349,7 @@ class VjskRecvFileMapper: # V卸マスタ CONDKEY_WHS_MST: { + _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_mst_v", @@ -413,6 +417,7 @@ class VjskRecvFileMapper: # Vメーカー卸組織展開表 CONDKEY_MKR_ORG_HORIZON: { + _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", @@ -570,6 +575,7 @@ class VjskRecvFileMapper: # V卸組織変換マスタ CONDKEY_ORG_CNV_MST: { + _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", @@ -622,6 +628,7 @@ class VjskRecvFileMapper: # V取引区分マスタ CONDKEY_TRAN_KBN_MST: { + _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", @@ -668,6 +675,7 @@ class VjskRecvFileMapper: # V施設マスタ CONDKEY_FCL_MST: { + _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.fcl_mst_v", @@ -759,6 +767,7 @@ class VjskRecvFileMapper: # V製品マスタ CONDKEY_PHM_PRD_MST: { + _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", @@ -862,6 +871,7 @@ class VjskRecvFileMapper: # V製品価格マスタ CONDKEY_PHM_PRICE_MST: { + _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_price_mst_v", @@ -911,6 +921,7 @@ class VjskRecvFileMapper: # V施設統合マスタ CONDKEY_VOP_HCO_MERGE: { + _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", @@ -942,6 +953,7 @@ class VjskRecvFileMapper: # V卸得意先情報マスタ CONDKEY_WHS_CUSTOMER_MST: { + _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", @@ -1012,6 +1024,7 @@ class VjskRecvFileMapper: # MDBコード変換表 CONDKEY_MDB_CONV_MST: { + _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", @@ -1055,6 +1068,7 @@ class VjskRecvFileMapper: # 卸在庫データ CONDKEY_STOCK_SLIP_DATA: { + _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whole_stock", @@ -1161,6 +1175,7 @@ class VjskRecvFileMapper: # 生物由来データ CONDKEY_BIO_SLIP_DATA: { + _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.bio_sales", @@ -1414,6 +1429,7 @@ class VjskRecvFileMapper: # ロットマスタデータ CONDKEY_LOT_NUM_MST: { + _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.lot_num_mst", @@ -1450,6 +1466,12 @@ class VjskRecvFileMapper: }, } + def get_data_name(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_DATA_NAME) + return ret + def get_file_prefix(self, condkey: str) -> str: ret = None if condkey in self._VJSK_INTERFACE_MAPPING: From d370e8c87d92faddcfbf8da7217c2e655d54be9b Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 11 May 2023 15:32:42 +0900 Subject: [PATCH 012/103] =?UTF-8?q?style:=20=E3=82=A8=E3=83=87=E3=82=A3?= =?UTF-8?q?=E3=82=BF=E3=81=AE=E8=AD=A6=E5=91=8A=E8=A7=A3=E6=B6=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 2ba87909..27c47ad5 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -1105,7 +1105,7 @@ class VjskRecvFileMapper: ,INS_DT ,INS_USR ,DWH_UPD_DT - ) + ) SELECT t.REC_DATA ,t.REC_WHS_CD @@ -1138,7 +1138,7 @@ class VjskRecvFileMapper: ,t.INS_USR ,SYSDATE() FROM org05.whole_stock AS t - ON DUPLICATE KEY UPDATE + ON DUPLICATE KEY UPDATE REC_DATA=t.REC_DATA ,REC_WHS_CD=t.REC_WHS_CD ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD From 285c725d5b1571251f82431ab826608e8905e851 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 11 May 2023 21:07:41 +0900 Subject: [PATCH 013/103] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=8F=8D=E6=98=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 8 +- .../src/batch/common/batch_context.py | 12 +- .../src/batch/vjsk/vjsk_data_load_manager.py | 24 +- .../src/batch/vjsk/vjsk_importer.py | 122 +- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 2374 ++++++++--------- 5 files changed, 1266 insertions(+), 1274 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index dde7cd46..62111409 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -91,13 +91,13 @@ class ConfigBucket(S3Bucket): f.seek(0) return temporary_file_path - def download_wholesaler_stock_list(self): + def download_wholesaler_stock_input_day_list(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) - holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) f.seek(0) return temporary_file_path @@ -110,7 +110,7 @@ class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER -class VjskRecieveBucket(S3Bucket): +class VjskReceiveBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV diff --git a/ecs/jskult-batch-daily/src/batch/common/batch_context.py b/ecs/jskult-batch-daily/src/batch/common/batch_context.py index b493ecca..b3fc4967 100644 --- a/ecs/jskult-batch-daily/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-daily/src/batch/common/batch_context.py @@ -3,7 +3,7 @@ class BatchContext: __syor_date: str # 処理日(yyyy/mm/dd形式) __is_not_business_day: bool # 日次バッチ起動日フラグ __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ - __is_import_target_vjsk_stockslipdata: bool # 卸在庫データ取込対象フラグ + __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ def __init__(self) -> None: self.__is_not_business_day = False @@ -40,9 +40,9 @@ class BatchContext: self.__is_ultmarc_imported = flag @property - def is_import_target_vjsk_stockslipdata(self): - return self.__is_import_target_vjsk_stockslipdata + def is_vjsk_stock_import_day(self): + return self.__is_vjsk_stock_import_day - @is_import_target_vjsk_stockslipdata.setter - def is_import_target_vjsk_stockslipdata(self, flag: bool): - self.__is_import_target_vjsk_stockslipdata = flag + @is_vjsk_stock_import_day.setter + def is_vjsk_stock_import_day(self, flag: bool): + self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 909bd041..2d99951b 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -1,10 +1,11 @@ # from src.batch.vjsk.vjsk_recv_file_manager import VjskDatFile -from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper from src.db.database import Database +from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger logger = get_logger('V実消化データ取込(DB登録)') -mapper = VjskRecvFileMapper() +mapper = VjskReceiveFileMapper() class VjskDataLoadManager: @@ -27,8 +28,13 @@ class VjskDataLoadManager: db.execute(f"TRUNCATE TABLE {table_name_org};") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} " \ - " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" + sql = f"""\ + LOAD DATA LOCAL INFILE :src_file_name + INTO TABLE {table_name_org} + FIELDS TERMINATED BY '\\t' + ENCLOSED BY '\"' + IGNORE 1 LINES; + """ result = db.execute(sql, {"src_file_name": src_file_name}) logger.debug(sql) logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') @@ -45,8 +51,7 @@ class VjskDataLoadManager: db.commit() except Exception as e: db.rollback() - logger.error(e) - raise e + raise BatchOperationException(e) finally: db.disconnect() @@ -54,9 +59,8 @@ class VjskDataLoadManager: return @classmethod - def Load(self, target: dict): - logger.debug(f'JjskDataLoadManager#load start target:{target}') - # target : {"condkey": key, "src_file_path":local_file_path} + def load(self, target: dict): + logger.debug(f'load start target:{target}') # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 local_file_name = target["src_file_path"] @@ -64,5 +68,5 @@ class VjskDataLoadManager: # データベース登録 self._import_to_db(local_file_name, target["condkey"]) - logger.debug('JjskDataLoadManager#load done') + logger.debug('load done') return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 31c42874..719e3e62 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -1,15 +1,54 @@ -from src.aws.s3 import ConfigBucket, VjskRecieveBucket +from src.aws.s3 import ConfigBucket, VjskReceiveBucket from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager -from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger logger = get_logger('V実消化データ取込') batch_context = BatchContext.get_instance() -vjsk_recv_bucket = VjskRecieveBucket() -vjsk_mapper = VjskRecvFileMapper() +vjsk_recv_bucket = VjskReceiveBucket() +vjsk_mapper = VjskReceiveFileMapper() + + +def exec(): + """V実消化データ取込処理""" + logger.debug('exec start') + + # 非営業日なら何もせず終了 + if batch_context.is_not_business_day: + logger.debug('非営業日なので処理をスキップ') + return + + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる + logger.debug('卸在庫データ取込対象日であるかを判定') + batch_context.is_vjsk_stock_import_day = _determine_today_is_stockslipdata_target() + logger.debug(f'判定結果 : {batch_context.is_vjsk_stock_import_day}') + if batch_context.is_vjsk_stock_import_day: + logger.info('卸在庫データ取込対象日です') + + # V実消化データファイル受領チェック + logger.debug('V実消化データファイル受領チェック:開始') + try: + # S3バケットにある受領済のV実消化データファイルの存在チェックをする + _check_received_files() + + except BatchOperationException as e: + logger.debug('受領したV実消化データファイルに未受領もものがあります') + raise e + logger.debug('V実消化データファイル受領チェック:終了') + + # データベース取込 + logger.debug('V実消化データ取込:開始') + try: + # S3バケットにある受領済のV実消化データファイルをデータベースに登録する + _import_file_to_db() + except Exception as e: + logger.debug(f'データベース登録失敗 {e}') + raise e + + logger.debug('exec done') def _check_if_file_exists(src_list: list, condkey: str) -> bool: @@ -39,7 +78,7 @@ def _check_received_files(): logger.debug(f'ファイル一覧{received_s3_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) - if batch_context.is_import_target_vjsk_stockslipdata: + if batch_context.is_vjsk_stock_import_day: if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_s3_files}') @@ -130,50 +169,50 @@ def _import_file_to_db(): logger.debug(f'取込対象データファイル辞書{target_dict}') # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) - if batch_context.is_import_target_vjsk_stockslipdata: - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + if batch_context.is_vjsk_stock_import_day: + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) # DB登録 卸販売データ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) # DB登録 卸組織変換マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) # DB登録 施設統合マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) # DB登録 卸マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) # DB登録 卸ホールディングスマスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) # DB登録 施設マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) # DB登録 メーカー卸組織展開表 - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) # DB登録 取引区分マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) # DB登録 製品マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) # DB登録 製品価格マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) # DB登録 卸得意先情報マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) # DB登録 MDBコード変換マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) # DB登録 生物由来データ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) # DB登録 製造ロット番号マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) logger.debug('_import_file_to_db done') @@ -186,7 +225,7 @@ def _determine_today_is_stockslipdata_target(): today = batch_context.syor_date # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする - wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_list() + wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_input_day_list() # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する target_days = CalendarFile(wholesaler_stock_list_file_path) @@ -198,42 +237,3 @@ def _determine_today_is_stockslipdata_target(): raise e logger.debug("_determine_today_is_stockslipdata_target done") return ret - - -def exec(): - """V実消化データ取込処理""" - logger.info('Start Jitsusyouka Torikomi PGM.') - - # 非営業日なら何もせず終了 - if batch_context.is_not_business_day: - logger.debug('非営業日なので処理をスキップ') - return - - # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる - logger.debug('卸在庫データ取込対象日であるかを判定') - batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() - logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') - if batch_context.is_import_target_vjsk_stockslipdata: - logger.info('卸在庫データ取込対象日です') - - # V実消化データファイル受領チェック - logger.debug('V実消化データファイル受領チェック:開始') - try: - # S3バケットにある受領済のV実消化データファイルの存在チェックをする - _check_received_files() - - except BatchOperationException as e: - logger.debug('受領したV実消化データファイルに未受領もものがあります') - raise e - logger.debug('V実消化データファイル受領チェック:終了') - - # データベース取込 - logger.debug('V実消化データ取込:開始') - try: - # S3バケットにある受領済のV実消化データファイルをデータベースに登録する - _import_file_to_db() - except Exception as e: - logger.debug(f'データベース登録失敗 {e}') - raise e - - logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 27c47ad5..612309d2 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -1,7 +1,7 @@ import textwrap -class VjskRecvFileMapper: +class VjskReceiveFileMapper: CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ CONDKEY_WHS_MST = "WHS_MST" # V卸マスタ @@ -34,262 +34,262 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.sales ( - REC_DATA - ,REC_WHS_CD - ,REC_WHS_SUB_CD - ,REC_WHS_ORG_CD - ,REC_CUST_CD - ,REC_COMM_CD - ,REC_TRAN_KBN - ,REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK - ,REC_URAG_NUM - ,REC_QTY - ,REC_NONYU_PRICE - ,REC_NONYU_AMT - ,REC_COMM_NAME - ,REC_NONYU_FCL_NAME - ,FREE_ITEM - ,REC_NONYU_FCL_ADDR - ,REC_NONYU_FCL_POST - ,REC_NONYU_FCL_TEL - ,REC_BEF_HSDN_YMD - ,REC_BEF_SLIP_NUM - ,REC_YMD - ,SALE_DATA_CAT - ,SLIP_FILE_NAME - ,SLIP_MGT_NUM - ,ROW_NUM - ,HSDN_YMD - ,EXEC_DT - ,V_TRAN_CD - ,TRAN_KBN_NAME - ,WHS_ORG_CD - ,V_WHSORG_CD - ,WHS_ORG_NAME - ,WHS_ORG_KN - ,V_WHS_CD - ,WHS_NAME - ,NONYU_FCL_CD - ,V_INST_CD - ,V_INST_KN - ,V_INST_NAME - ,V_INST_ADDR - ,COMM_CD - ,COMM_NAME - ,NONYU_QTY - ,NONYU_PRICE - ,NONYU_AMT - ,SHIKIRI_PRICE - ,SHIKIRI_AMT - ,NHI_PRICE - ,NHI_AMT - ,WHSPOS_ERR_KBN - ,HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN - ,FCL_EXIS_KBN - ,BEF_HSDN_YMD - ,BEF_SLIP_NUM - ,SLIP_ORG_KBN - ,ERR_FLG1 - ,ERR_FLG2 - ,ERR_FLG3 - ,ERR_FLG4 - ,ERR_FLG5 - ,ERR_FLG6 - ,ERR_FLG7 - ,ERR_FLG8 - ,ERR_FLG9 - ,ERR_FLG10 - ,ERR_FLG11 - ,ERR_FLG12 - ,ERR_FLG13 - ,ERR_FLG14 - ,ERR_FLG15 - ,ERR_FLG16 - ,ERR_FLG17 - ,ERR_FLG18 - ,ERR_FLG19 - ,ERR_FLG20 - ,KJYO_YM - ,TKSNBK_KBN - ,FCL_EXEC_KBN - ,REC_STS_KBN - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_whs_org_cd + ,rec_cust_cd + ,rec_comm_cd + ,rec_tran_kbn + ,rev_hsdnymd_wrk + ,rev_hsdnymd_srk + ,rec_urag_num + ,rec_qty + ,rec_nonyu_price + ,rec_nonyu_amt + ,rec_comm_name + ,rec_nonyu_fcl_name + ,free_item + ,rec_nonyu_fcl_addr + ,rec_nonyu_fcl_post + ,rec_nonyu_fcl_tel + ,rec_bef_hsdn_ymd + ,rec_bef_slip_num + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,hsdn_ymd + ,exec_dt + ,v_tran_cd + ,tran_kbn_name + ,whs_org_cd + ,v_whsorg_cd + ,whs_org_name + ,whs_org_kn + ,v_whs_cd + ,whs_name + ,nonyu_fcl_cd + ,v_inst_cd + ,v_inst_kn + ,v_inst_name + ,v_inst_addr + ,comm_cd + ,comm_name + ,nonyu_qty + ,nonyu_price + ,nonyu_amt + ,shikiri_price + ,shikiri_amt + ,nhi_price + ,nhi_amt + ,whspos_err_kbn + ,htdnymd_err_kbn + ,prd_exis_kbn + ,fcl_exis_kbn + ,bef_hsdn_ymd + ,bef_slip_num + ,slip_org_kbn + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,err_flg11 + ,err_flg12 + ,err_flg13 + ,err_flg14 + ,err_flg15 + ,err_flg16 + ,err_flg17 + ,err_flg18 + ,err_flg19 + ,err_flg20 + ,kjyo_ym + ,tksnbk_kbn + ,fcl_exec_kbn + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.REC_DATA - ,t.REC_WHS_CD - ,t.REC_WHS_SUB_CD - ,t.REC_WHS_ORG_CD - ,t.REC_CUST_CD - ,t.REC_COMM_CD - ,t.REC_TRAN_KBN - ,t.REV_HSDNYMD_WRK - ,t.REV_HSDNYMD_SRK - ,t.REC_URAG_NUM - ,t.REC_QTY - ,t.REC_NONYU_PRICE - ,t.REC_NONYU_AMT - ,t.REC_COMM_NAME - ,t.REC_NONYU_FCL_NAME - ,t.FREE_ITEM - ,t.REC_NONYU_FCL_ADDR - ,t.REC_NONYU_FCL_POST - ,t.REC_NONYU_FCL_TEL - ,t.REC_BEF_HSDN_YMD - ,t.REC_BEF_SLIP_NUM - ,t.REC_YMD - ,t.SALE_DATA_CAT - ,t.SLIP_FILE_NAME - ,t.SLIP_MGT_NUM - ,t.ROW_NUM - ,t.HSDN_YMD - ,t.EXEC_DT - ,t.V_TRAN_CD - ,t.TRAN_KBN_NAME - ,t.WHS_ORG_CD - ,t.V_WHSORG_CD - ,t.WHS_ORG_NAME - ,t.WHS_ORG_KN - ,t.V_WHS_CD - ,t.WHS_NAME - ,t.NONYU_FCL_CD - ,t.V_INST_CD - ,t.V_INST_KN - ,t.V_INST_NAME - ,t.V_INST_ADDR - ,t.COMM_CD - ,t.COMM_NAME - ,t.NONYU_QTY - ,t.NONYU_PRICE - ,t.NONYU_AMT - ,t.SHIKIRI_PRICE - ,t.SHIKIRI_AMT - ,t.NHI_PRICE - ,t.NHI_AMT - ,t.WHSPOS_ERR_KBN - ,t.HTDNYMD_ERR_KBN - ,t.PRD_EXIS_KBN - ,t.FCL_EXIS_KBN - ,t.BEF_HSDN_YMD - ,t.BEF_SLIP_NUM - ,t.SLIP_ORG_KBN - ,t.ERR_FLG1 - ,t.ERR_FLG2 - ,t.ERR_FLG3 - ,t.ERR_FLG4 - ,t.ERR_FLG5 - ,t.ERR_FLG6 - ,t.ERR_FLG7 - ,t.ERR_FLG8 - ,t.ERR_FLG9 - ,t.ERR_FLG10 - ,t.ERR_FLG11 - ,t.ERR_FLG12 - ,t.ERR_FLG13 - ,t.ERR_FLG14 - ,t.ERR_FLG15 - ,t.ERR_FLG16 - ,t.ERR_FLG17 - ,t.ERR_FLG18 - ,t.ERR_FLG19 - ,t.ERR_FLG20 - ,t.KJYO_YM - ,t.TKSNBK_KBN - ,t.FCL_EXEC_KBN - ,t.REC_STS_KBN - ,t.INS_DT - ,t.INS_USR + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_whs_org_cd + ,t.rec_cust_cd + ,t.rec_comm_cd + ,t.rec_tran_kbn + ,t.rev_hsdnymd_wrk + ,t.rev_hsdnymd_srk + ,t.rec_urag_num + ,t.rec_qty + ,t.rec_nonyu_price + ,t.rec_nonyu_amt + ,t.rec_comm_name + ,t.rec_nonyu_fcl_name + ,t.free_item + ,t.rec_nonyu_fcl_addr + ,t.rec_nonyu_fcl_post + ,t.rec_nonyu_fcl_tel + ,t.rec_bef_hsdn_ymd + ,t.rec_bef_slip_num + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.hsdn_ymd + ,t.exec_dt + ,t.v_tran_cd + ,t.tran_kbn_name + ,t.whs_org_cd + ,t.v_whsorg_cd + ,t.whs_org_name + ,t.whs_org_kn + ,t.v_whs_cd + ,t.whs_name + ,t.nonyu_fcl_cd + ,t.v_inst_cd + ,t.v_inst_kn + ,t.v_inst_name + ,t.v_inst_addr + ,t.comm_cd + ,t.comm_name + ,t.nonyu_qty + ,t.nonyu_price + ,t.nonyu_amt + ,t.shikiri_price + ,t.shikiri_amt + ,t.nhi_price + ,t.nhi_amt + ,t.whspos_err_kbn + ,t.htdnymd_err_kbn + ,t.prd_exis_kbn + ,t.fcl_exis_kbn + ,t.bef_hsdn_ymd + ,t.bef_slip_num + ,t.slip_org_kbn + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.err_flg11 + ,t.err_flg12 + ,t.err_flg13 + ,t.err_flg14 + ,t.err_flg15 + ,t.err_flg16 + ,t.err_flg17 + ,t.err_flg18 + ,t.err_flg19 + ,t.err_flg20 + ,t.kjyo_ym + ,t.tksnbk_kbn + ,t.fcl_exec_kbn + ,t.rec_sts_kbn + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.sales AS t ON DUPLICATE KEY UPDATE - REC_DATA=t.REC_DATA - ,REC_WHS_CD=t.REC_WHS_CD - ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD - ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD - ,REC_CUST_CD=t.REC_CUST_CD - ,REC_COMM_CD=t.REC_COMM_CD - ,REC_TRAN_KBN=t.REC_TRAN_KBN - ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK - ,REC_URAG_NUM=t.REC_URAG_NUM - ,REC_QTY=t.REC_QTY - ,REC_NONYU_PRICE=t.REC_NONYU_PRICE - ,REC_NONYU_AMT=t.REC_NONYU_AMT - ,REC_COMM_NAME=t.REC_COMM_NAME - ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME - ,FREE_ITEM=t.FREE_ITEM - ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR - ,REC_NONYU_FCL_POST=t.REC_NONYU_FCL_POST - ,REC_NONYU_FCL_TEL=t.REC_NONYU_FCL_TEL - ,REC_BEF_HSDN_YMD=t.REC_BEF_HSDN_YMD - ,REC_BEF_SLIP_NUM=t.REC_BEF_SLIP_NUM - ,REC_YMD=t.REC_YMD - ,SALE_DATA_CAT=t.SALE_DATA_CAT - ,SLIP_FILE_NAME=t.SLIP_FILE_NAME - ,SLIP_MGT_NUM=t.SLIP_MGT_NUM - ,ROW_NUM=t.ROW_NUM - ,HSDN_YMD=t.HSDN_YMD - ,EXEC_DT=t.EXEC_DT - ,V_TRAN_CD=t.V_TRAN_CD - ,TRAN_KBN_NAME=t.TRAN_KBN_NAME - ,WHS_ORG_CD=t.WHS_ORG_CD - ,V_WHSORG_CD=t.V_WHSORG_CD - ,WHS_ORG_NAME=t.WHS_ORG_NAME - ,WHS_ORG_KN=t.WHS_ORG_KN - ,V_WHS_CD=t.V_WHS_CD - ,WHS_NAME=t.WHS_NAME - ,NONYU_FCL_CD=t.NONYU_FCL_CD - ,V_INST_CD=t.V_INST_CD - ,V_INST_KN=t.V_INST_KN - ,V_INST_NAME=t.V_INST_NAME - ,V_INST_ADDR=t.V_INST_ADDR - ,COMM_CD=t.COMM_CD - ,COMM_NAME=t.COMM_NAME - ,NONYU_QTY=t.NONYU_QTY - ,NONYU_PRICE=t.NONYU_PRICE - ,NONYU_AMT=t.NONYU_AMT - ,SHIKIRI_PRICE=t.SHIKIRI_PRICE - ,SHIKIRI_AMT=t.SHIKIRI_AMT - ,NHI_PRICE=t.NHI_PRICE - ,NHI_AMT=t.NHI_AMT - ,WHSPOS_ERR_KBN=t.WHSPOS_ERR_KBN - ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN=t.PRD_EXIS_KBN - ,FCL_EXIS_KBN=t.FCL_EXIS_KBN - ,BEF_HSDN_YMD=t.BEF_HSDN_YMD - ,BEF_SLIP_NUM=t.BEF_SLIP_NUM - ,SLIP_ORG_KBN=t.SLIP_ORG_KBN - ,ERR_FLG1=t.ERR_FLG1 - ,ERR_FLG2=t.ERR_FLG2 - ,ERR_FLG3=t.ERR_FLG3 - ,ERR_FLG4=t.ERR_FLG4 - ,ERR_FLG5=t.ERR_FLG5 - ,ERR_FLG6=t.ERR_FLG6 - ,ERR_FLG7=t.ERR_FLG7 - ,ERR_FLG8=t.ERR_FLG8 - ,ERR_FLG9=t.ERR_FLG9 - ,ERR_FLG10=t.ERR_FLG10 - ,ERR_FLG11=t.ERR_FLG11 - ,ERR_FLG12=t.ERR_FLG12 - ,ERR_FLG13=t.ERR_FLG13 - ,ERR_FLG14=t.ERR_FLG14 - ,ERR_FLG15=t.ERR_FLG15 - ,ERR_FLG16=t.ERR_FLG16 - ,ERR_FLG17=t.ERR_FLG17 - ,ERR_FLG18=t.ERR_FLG18 - ,ERR_FLG19=t.ERR_FLG19 - ,ERR_FLG20=t.ERR_FLG20 - ,KJYO_YM=t.KJYO_YM - ,TKSNBK_KBN=t.TKSNBK_KBN - ,FCL_EXEC_KBN=t.FCL_EXEC_KBN - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_whs_org_cd=t.rec_whs_org_cd + ,rec_cust_cd=t.rec_cust_cd + ,rec_comm_cd=t.rec_comm_cd + ,rec_tran_kbn=t.rec_tran_kbn + ,rev_hsdnymd_wrk=t.rev_hsdnymd_wrk + ,rev_hsdnymd_srk=t.rev_hsdnymd_srk + ,rec_urag_num=t.rec_urag_num + ,rec_qty=t.rec_qty + ,rec_nonyu_price=t.rec_nonyu_price + ,rec_nonyu_amt=t.rec_nonyu_amt + ,rec_comm_name=t.rec_comm_name + ,rec_nonyu_fcl_name=t.rec_nonyu_fcl_name + ,free_item=t.free_item + ,rec_nonyu_fcl_addr=t.rec_nonyu_fcl_addr + ,rec_nonyu_fcl_post=t.rec_nonyu_fcl_post + ,rec_nonyu_fcl_tel=t.rec_nonyu_fcl_tel + ,rec_bef_hsdn_ymd=t.rec_bef_hsdn_ymd + ,rec_bef_slip_num=t.rec_bef_slip_num + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,hsdn_ymd=t.hsdn_ymd + ,exec_dt=t.exec_dt + ,v_tran_cd=t.v_tran_cd + ,tran_kbn_name=t.tran_kbn_name + ,whs_org_cd=t.whs_org_cd + ,v_whsorg_cd=t.v_whsorg_cd + ,whs_org_name=t.whs_org_name + ,whs_org_kn=t.whs_org_kn + ,v_whs_cd=t.v_whs_cd + ,whs_name=t.whs_name + ,nonyu_fcl_cd=t.nonyu_fcl_cd + ,v_inst_cd=t.v_inst_cd + ,v_inst_kn=t.v_inst_kn + ,v_inst_name=t.v_inst_name + ,v_inst_addr=t.v_inst_addr + ,comm_cd=t.comm_cd + ,comm_name=t.comm_name + ,nonyu_qty=t.nonyu_qty + ,nonyu_price=t.nonyu_price + ,nonyu_amt=t.nonyu_amt + ,shikiri_price=t.shikiri_price + ,shikiri_amt=t.shikiri_amt + ,nhi_price=t.nhi_price + ,nhi_amt=t.nhi_amt + ,whspos_err_kbn=t.whspos_err_kbn + ,htdnymd_err_kbn=t.htdnymd_err_kbn + ,prd_exis_kbn=t.prd_exis_kbn + ,fcl_exis_kbn=t.fcl_exis_kbn + ,bef_hsdn_ymd=t.bef_hsdn_ymd + ,bef_slip_num=t.bef_slip_num + ,slip_org_kbn=t.slip_org_kbn + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,err_flg11=t.err_flg11 + ,err_flg12=t.err_flg12 + ,err_flg13=t.err_flg13 + ,err_flg14=t.err_flg14 + ,err_flg15=t.err_flg15 + ,err_flg16=t.err_flg16 + ,err_flg17=t.err_flg17 + ,err_flg18=t.err_flg18 + ,err_flg19=t.err_flg19 + ,err_flg20=t.err_flg20 + ,kjyo_ym=t.kjyo_ym + ,tksnbk_kbn=t.tksnbk_kbn + ,fcl_exec_kbn=t.fcl_exec_kbn + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dT=SYSDATE() ; """) }, @@ -303,46 +303,46 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.hld_mst_v ( - V_HLD_CD - ,SUB_NUM - ,NAME - ,KN_NAME - ,ABB_NAME - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_hld_cd + ,sub_num + ,name + ,kn_name + ,abb_name + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_HLD_CD - ,t.SUB_NUM - ,t.NAME - ,t.KN_NAME - ,t.ABB_NAME - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_hld_cd + ,t.sub_num + ,t.name + ,t.kn_name + ,t.abb_name + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.hld_mst_v AS t ON DUPLICATE KEY UPDATE - V_HLD_CD=t.V_HLD_CD - ,SUB_NUM=t.SUB_NUM - ,NAME=t.NAME - ,KN_NAME=t.KN_NAME - ,ABB_NAME=t.ABB_NAME - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_hld_cd=t.v_hld_cd + ,sub_num=t.sub_num + ,name=t.name + ,kn_name=t.kn_name + ,abb_name=t.abb_name + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -356,61 +356,61 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.whs_mst_v ( - V_WHS_CD - ,SUB_NUM - ,NAME - ,KN_NAME - ,ABB_NAME - ,POSTAL_CD - ,ADDR - ,KN_ADDR - ,TEL_NUM - ,V_HLD_CD - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_whs_cd + ,sub_num + ,name + ,kn_name + ,abb_name + ,postal_cd + ,addr + ,kn_addr + ,tel_num + ,v_hld_cd + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_WHS_CD - ,t.SUB_NUM - ,t.NAME - ,t.KN_NAME - ,t.ABB_NAME - ,t.POSTAL_CD - ,t.ADDR - ,t.KN_ADDR - ,t.TEL_NUM - ,t.V_HLD_CD - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_whs_cd + ,t.sub_num + ,t.name + ,t.kn_name + ,t.abb_name + ,t.postal_cd + ,t.addr + ,t.kn_addr + ,t.tel_num + ,t.v_hld_cd + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.whs_mst_v AS t ON DUPLICATE KEY UPDATE - V_WHS_CD=t.V_WHS_CD - ,SUB_NUM=t.SUB_NUM - ,NAME=t.NAME - ,KN_NAME=t.KN_NAME - ,ABB_NAME=t.ABB_NAME - ,POSTAL_CD=t.POSTAL_CD - ,ADDR=t.ADDR - ,KN_ADDR=t.KN_ADDR - ,TEL_NUM=t.TEL_NUM - ,V_HLD_CD=t.V_HLD_CD - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_whs_cd=t.v_whs_cd + ,sub_num=t.sub_num + ,name=t.name + ,kn_name=t.kn_name + ,abb_name=t.abb_name + ,postal_cd=t.postal_cd + ,addr=t.addr + ,kn_addr=t.kn_addr + ,tel_num=t.tel_num + ,v_hld_cd=t.v_hld_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -424,151 +424,151 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.mkr_org_horizon_v ( - VID_KIND_1 - ,V_CD_1 - ,NAME_1 - ,DSP_ODR_1 - ,VID_KIND_2 - ,V_CD_2 - ,NAME_2 - ,DSP_ODR_2 - ,VID_KIND_3 - ,V_CD_3 - ,NAME_3 - ,DSP_ODR_3 - ,VID_KIND_4 - ,V_CD_4 - ,NAME_4 - ,DSP_ODR_4 - ,VID_KIND_5 - ,V_CD_5 - ,NAME_5 - ,DSP_ODR_5 - ,VID_KIND_6 - ,V_CD_6 - ,NAME_6 - ,DSP_ODR_6 - ,VID_KIND_7 - ,V_CD_7 - ,NAME_7 - ,DSP_ODR_7 - ,VID_KIND_8 - ,V_CD_8 - ,NAME_8 - ,DSP_ODR_8 - ,VID_KIND_9 - ,V_CD_9 - ,NAME_9 - ,DSP_ODR_9 - ,VID_KIND_10 - ,V_CD_10 - ,NAME_10 - ,DSP_ODR_10 - ,V_WHS_CD - ,START_DATE - ,END_DATE - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + vid_kind_1 + ,v_cd_1 + ,name_1 + ,dsp_odr_1 + ,vid_kind_2 + ,v_cd_2 + ,name_2 + ,dsp_odr_2 + ,vid_kind_3 + ,v_cd_3 + ,name_3 + ,dsp_odr_3 + ,vid_kind_4 + ,v_cd_4 + ,name_4 + ,dsp_odr_4 + ,vid_kind_5 + ,v_cd_5 + ,name_5 + ,dsp_odr_5 + ,vid_kind_6 + ,v_cd_6 + ,name_6 + ,dsp_odr_6 + ,vid_kind_7 + ,v_cd_7 + ,name_7 + ,dsp_odr_7 + ,vid_kind_8 + ,v_cd_8 + ,name_8 + ,dsp_odr_8 + ,vid_kind_9 + ,v_cd_9 + ,name_9 + ,dsp_odr_9 + ,vid_kind_10 + ,v_cd_10 + ,name_10 + ,dsp_odr_10 + ,v_whs_cd + ,start_date + ,end_date + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.VID_KIND_1 - ,t.V_CD_1 - ,t.NAME_1 - ,t.DSP_ODR_1 - ,t.VID_KIND_2 - ,t.V_CD_2 - ,t.NAME_2 - ,t.DSP_ODR_2 - ,t.VID_KIND_3 - ,t.V_CD_3 - ,t.NAME_3 - ,t.DSP_ODR_3 - ,t.VID_KIND_4 - ,t.V_CD_4 - ,t.NAME_4 - ,t.DSP_ODR_4 - ,t.VID_KIND_5 - ,t.V_CD_5 - ,t.NAME_5 - ,t.DSP_ODR_5 - ,t.VID_KIND_6 - ,t.V_CD_6 - ,t.NAME_6 - ,t.DSP_ODR_6 - ,t.VID_KIND_7 - ,t.V_CD_7 - ,t.NAME_7 - ,t.DSP_ODR_7 - ,t.VID_KIND_8 - ,t.V_CD_8 - ,t.NAME_8 - ,t.DSP_ODR_8 - ,t.VID_KIND_9 - ,t.V_CD_9 - ,t.NAME_9 - ,t.DSP_ODR_9 - ,t.VID_KIND_10 - ,t.V_CD_10 - ,t.NAME_10 - ,t.DSP_ODR_10 - ,t.V_WHS_CD - ,t.START_DATE - ,t.END_DATE - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.vid_kind_1 + ,t.v_cd_1 + ,t.name_1 + ,t.dsp_odr_1 + ,t.vid_kind_2 + ,t.v_cd_2 + ,t.name_2 + ,t.dsp_odr_2 + ,t.vid_kind_3 + ,t.v_cd_3 + ,t.name_3 + ,t.dsp_odr_3 + ,t.vid_kind_4 + ,t.v_cd_4 + ,t.name_4 + ,t.dsp_odr_4 + ,t.vid_kind_5 + ,t.v_cd_5 + ,t.name_5 + ,t.dsp_odr_5 + ,t.vid_kind_6 + ,t.v_cd_6 + ,t.name_6 + ,t.dsp_odr_6 + ,t.vid_kind_7 + ,t.v_cd_7 + ,t.name_7 + ,t.dsp_odr_7 + ,t.vid_kind_8 + ,t.v_cd_8 + ,t.name_8 + ,t.dsp_odr_8 + ,t.vid_kind_9 + ,t.v_cd_9 + ,t.name_9 + ,t.dsp_odr_9 + ,t.vid_kind_10 + ,t.v_cd_10 + ,t.name_10 + ,t.dsp_odr_10 + ,t.v_whs_cd + ,t.start_date + ,t.end_date + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.mkr_org_horizon_v AS t ON DUPLICATE KEY UPDATE - VID_KIND_1=t.VID_KIND_1 - ,V_CD_1=t.V_CD_1 - ,NAME_1=t.NAME_1 - ,DSP_ODR_1=t.DSP_ODR_1 - ,VID_KIND_2=t.VID_KIND_2 - ,V_CD_2=t.V_CD_2 - ,NAME_2=t.NAME_2 - ,DSP_ODR_2=t.DSP_ODR_2 - ,VID_KIND_3=t.VID_KIND_3 - ,V_CD_3=t.V_CD_3 - ,NAME_3=t.NAME_3 - ,DSP_ODR_3=t.DSP_ODR_3 - ,VID_KIND_4=t.VID_KIND_4 - ,V_CD_4=t.V_CD_4 - ,NAME_4=t.NAME_4 - ,DSP_ODR_4=t.DSP_ODR_4 - ,VID_KIND_5=t.VID_KIND_5 - ,V_CD_5=t.V_CD_5 - ,NAME_5=t.NAME_5 - ,DSP_ODR_5=t.DSP_ODR_5 - ,VID_KIND_6=t.VID_KIND_6 - ,V_CD_6=t.V_CD_6 - ,NAME_6=t.NAME_6 - ,DSP_ODR_6=t.DSP_ODR_6 - ,VID_KIND_7=t.VID_KIND_7 - ,V_CD_7=t.V_CD_7 - ,NAME_7=t.NAME_7 - ,DSP_ODR_7=t.DSP_ODR_7 - ,VID_KIND_8=t.VID_KIND_8 - ,V_CD_8=t.V_CD_8 - ,NAME_8=t.NAME_8 - ,DSP_ODR_8=t.DSP_ODR_8 - ,VID_KIND_9=t.VID_KIND_9 - ,V_CD_9=t.V_CD_9 - ,NAME_9=t.NAME_9 - ,DSP_ODR_9=t.DSP_ODR_9 - ,VID_KIND_10=t.VID_KIND_10 - ,V_CD_10=t.V_CD_10 - ,NAME_10=t.NAME_10 - ,DSP_ODR_10=t.DSP_ODR_10 - ,V_WHS_CD=t.V_WHS_CD - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + vid_kind_1=t.vid_kind_1 + ,v_cd_1=t.v_cd_1 + ,name_1=t.name_1 + ,dsp_odr_1=t.dsp_odr_1 + ,vid_kind_2=t.vid_kind_2 + ,v_cd_2=t.v_cd_2 + ,name_2=t.name_2 + ,dsp_odr_2=t.dsp_odr_2 + ,vid_kind_3=t.vid_kind_3 + ,v_cd_3=t.v_cd_3 + ,name_3=t.name_3 + ,dsp_odr_3=t.dsp_odr_3 + ,vid_kind_4=t.vid_kind_4 + ,v_cd_4=t.v_cd_4 + ,name_4=t.name_4 + ,dsp_odr_4=t.dsp_odr_4 + ,vid_kind_5=t.vid_kind_5 + ,v_cd_5=t.v_cd_5 + ,name_5=t.name_5 + ,dsp_odr_5=t.dsp_odr_5 + ,vid_kind_6=t.vid_kind_6 + ,v_cd_6=t.v_cd_6 + ,name_6=t.name_6 + ,dsp_odr_6=t.dsp_odr_6 + ,vid_kind_7=t.vid_kind_7 + ,v_cd_7=t.v_cd_7 + ,name_7=t.name_7 + ,dsp_odr_7=t.dsp_odr_7 + ,vid_kind_8=t.vid_kind_8 + ,v_cd_8=t.v_cd_8 + ,name_8=t.name_8 + ,dsp_odr_8=t.dsp_odr_8 + ,vid_kind_9=t.vid_kind_9 + ,v_cd_9=t.v_cd_9 + ,name_9=t.name_9 + ,dsp_odr_9=t.dsp_odr_9 + ,vid_kind_10=t.vid_kind_10 + ,v_cd_10=t.v_cd_10 + ,name_10=t.name_10 + ,dsp_odr_10=t.dsp_odr_10 + ,v_whs_cd=t.v_whs_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -582,46 +582,46 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.org_cnv_mst_v ( - WHS_CD - ,WHS_SUB_CD - ,ORG_CD - ,SUB_NUM - ,V_ORG_CD - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + whs_cd + ,whs_sub_cd + ,org_cd + ,sub_num + ,v_org_cd + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.WHS_CD - ,t.WHS_SUB_CD - ,t.ORG_CD - ,t.SUB_NUM - ,t.V_ORG_CD - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.whs_cd + ,t.whs_sub_cd + ,t.org_cd + ,t.sub_num + ,t.v_org_cd + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.org_cnv_mst_v AS t ON DUPLICATE KEY UPDATE - WHS_CD=t.WHS_CD - ,WHS_SUB_CD=t.WHS_SUB_CD - ,ORG_CD=t.ORG_CD - ,SUB_NUM=t.SUB_NUM - ,V_ORG_CD=t.V_ORG_CD - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + whs_cd=t.whs_cd + ,whs_sub_cd=t.whs_sub_cd + ,org_cd=t.org_cd + ,sub_num=t.sub_num + ,v_org_cd=t.v_org_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -635,40 +635,40 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.tran_kbn_mst_v ( - V_TRAN_CD - ,SUB_NUM - ,NAME - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_tran_cd + ,sub_num + ,name + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_TRAN_CD - ,t.SUB_NUM - ,t.NAME - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_tran_cd + ,t.sub_num + ,t.name + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.tran_kbn_mst_v AS t ON DUPLICATE KEY UPDATE - V_TRAN_CD=t.V_TRAN_CD - ,SUB_NUM=t.SUB_NUM - ,NAME=t.NAME - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_tran_cd=t.v_tran_cd + ,sub_num=t.sub_num + ,name=t.name + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -682,85 +682,85 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.fcl_mst_v ( - V_INST_CD - ,SUB_NUM - ,START_DATE - ,END_DATE - ,CLOSED_DT - ,FCL_NAME - ,FCL_KN_NAME - ,FCL_ABB_NAME - ,FCL_ABB_KN_NAME - ,MKR_CD - ,JSK_PROC_KBN - ,FMT_ADDR - ,FMT_KN_ADDR - ,POSTAL_CD - ,PRFT_CD - ,PRFT_NAME - ,CITY_NAME - ,ADDR_LINE_1 - ,TEL_NUM - ,ADMIN_KBN - ,FCL_TYPE - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_inst_cd + ,sub_num + ,start_date + ,end_date + ,closed_dt + ,fcl_name + ,fcl_kn_name + ,fcl_abb_name + ,fcl_abb_kn_name + ,mkr_cd + ,jsk_proc_kbn + ,fmt_addr + ,fmt_kn_addr + ,postal_cd + ,prft_cd + ,prft_name + ,city_name + ,addr_line_1 + ,tel_num + ,admin_kbn + ,fcl_type + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_INST_CD - ,t.SUB_NUM - ,t.START_DATE - ,t.END_DATE - ,t.CLOSED_DT - ,t.FCL_NAME - ,t.FCL_KN_NAME - ,t.FCL_ABB_NAME - ,t.FCL_ABB_KN_NAME - ,t.MKR_CD - ,t.JSK_PROC_KBN - ,t.FMT_ADDR - ,t.FMT_KN_ADDR - ,t.POSTAL_CD - ,t.PRFT_CD - ,t.PRFT_NAME - ,t.CITY_NAME - ,t.ADDR_LINE_1 - ,t.TEL_NUM - ,t.ADMIN_KBN - ,t.FCL_TYPE - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_inst_cd + ,t.sub_num + ,t.start_date + ,t.end_date + ,t.closed_dt + ,t.fcl_name + ,t.fcl_kn_name + ,t.fcl_abb_name + ,t.fcl_abb_kn_name + ,t.mkr_cd + ,t.jsk_proc_kbn + ,t.fmt_addr + ,t.fmt_kn_addr + ,t.postal_cd + ,t.prft_cd + ,t.prft_name + ,t.city_name + ,t.addr_line_1 + ,t.tel_num + ,t.admin_kbn + ,t.fcl_type + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.fcl_mst_v AS t ON DUPLICATE KEY UPDATE - V_INST_CD=t.V_INST_CD - ,SUB_NUM=t.SUB_NUM - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,CLOSED_DT=t.CLOSED_DT - ,FCL_NAME=t.FCL_NAME - ,FCL_KN_NAME=t.FCL_KN_NAME - ,FCL_ABB_NAME=t.FCL_ABB_NAME - ,FCL_ABB_KN_NAME=t.FCL_ABB_KN_NAME - ,MKR_CD=t.MKR_CD - ,JSK_PROC_KBN=t.JSK_PROC_KBN - ,FMT_ADDR=t.FMT_ADDR - ,FMT_KN_ADDR=t.FMT_KN_ADDR - ,POSTAL_CD=t.POSTAL_CD - ,PRFT_CD=t.PRFT_CD - ,PRFT_NAME=t.PRFT_NAME - ,CITY_NAME=t.CITY_NAME - ,ADDR_LINE_1=t.ADDR_LINE_1 - ,TEL_NUM=t.TEL_NUM - ,ADMIN_KBN=t.ADMIN_KBN - ,FCL_TYPE=t.FCL_TYPE - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_inst_cd=t.v_inst_cd + ,sub_num=t.sub_num + ,start_date=t.start_date + ,end_date=t.end_date + ,closed_dt=t.closed_dt + ,fcl_name=t.fcl_name + ,fcl_kn_name=t.fcl_kn_name + ,fcl_abb_name=t.fcl_abb_name + ,fcl_abb_kn_name=t.fcl_abb_kn_name + ,mkr_cd=t.mkr_cd + ,jsk_proc_kbn=t.jsk_proc_kbn + ,fmt_addr=t.fmt_addr + ,fmt_kn_addr=t.fmt_kn_addr + ,postal_cd=t.postal_cd + ,prft_cd=t.prft_cd + ,prft_name=t.prft_name + ,city_name=t.city_name + ,addr_line_1=t.addr_line_1 + ,tel_num=t.tel_num + ,admin_kbn=t.admin_kbn + ,fcl_type=t.fcl_type + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -774,97 +774,97 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.phm_prd_mst_v ( - PRD_CD - ,SUB_NUM - ,PRD_NAME - ,PRD_E_NAME - ,MKR_CD - ,MKR_INF_1 - ,MKR_INF_2 - ,PHM_ITM_CD - ,ITM_NAME - ,ITM_ABB_NAME - ,FORM_CD - ,FORM_NAME - ,VOL_CD - ,VOL_NAME - ,CONT_CD - ,CONT_NAME - ,PKG_CD - ,PKG_NAME - ,CNV_NUM - ,JSK_START_DT - ,PRD_SALE_KBN - ,JSK_PROC_KBN - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + prd_cd + ,sub_num + ,prd_name + ,prd_e_name + ,mkr_cd + ,mkr_inf_1 + ,mkr_inf_2 + ,phm_itm_cd + ,itm_name + ,itm_abb_name + ,form_cd + ,form_name + ,vol_cd + ,vol_name + ,cont_cd + ,cont_name + ,pkg_cd + ,pkg_name + ,cnv_num + ,jsk_start_dt + ,prd_sale_kbn + ,jsk_proc_kbn + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.PRD_CD - ,t.SUB_NUM - ,t.PRD_NAME - ,t.PRD_E_NAME - ,t.MKR_CD - ,t.MKR_INF_1 - ,t.MKR_INF_2 - ,t.PHM_ITM_CD - ,t.ITM_NAME - ,t.ITM_ABB_NAME - ,t.FORM_CD - ,t.FORM_NAME - ,t.VOL_CD - ,t.VOL_NAME - ,t.CONT_CD - ,t.CONT_NAME - ,t.PKG_CD - ,t.PKG_NAME - ,t.CNV_NUM - ,nullif(t.JSK_START_DT, 0) - ,t.PRD_SALE_KBN - ,t.JSK_PROC_KBN - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.prd_cd + ,t.sub_num + ,t.prd_name + ,t.prd_e_name + ,t.mkr_cd + ,t.mkr_inf_1 + ,t.mkr_inf_2 + ,t.phm_itm_cd + ,t.itm_name + ,t.itm_abb_name + ,t.form_cd + ,t.form_name + ,t.vol_cd + ,t.vol_name + ,t.cont_cd + ,t.cont_name + ,t.pkg_cd + ,t.pkg_name + ,t.cnv_num + ,nullif(t.jsk_start_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.prd_sale_kbn + ,t.jsk_proc_kbn + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.phm_prd_mst_v AS t ON DUPLICATE KEY UPDATE - PRD_CD=t.PRD_CD - ,SUB_NUM=t.SUB_NUM - ,PRD_NAME=t.PRD_NAME - ,PRD_E_NAME=t.PRD_E_NAME - ,MKR_CD=t.MKR_CD - ,MKR_INF_1=t.MKR_INF_1 - ,MKR_INF_2=t.MKR_INF_2 - ,PHM_ITM_CD=t.PHM_ITM_CD - ,ITM_NAME=t.ITM_NAME - ,ITM_ABB_NAME=t.ITM_ABB_NAME - ,FORM_CD=t.FORM_CD - ,FORM_NAME=t.FORM_NAME - ,VOL_CD=t.VOL_CD - ,VOL_NAME=t.VOL_NAME - ,CONT_CD=t.CONT_CD - ,CONT_NAME=t.CONT_NAME - ,PKG_CD=t.PKG_CD - ,PKG_NAME=t.PKG_NAME - ,CNV_NUM=t.CNV_NUM - ,JSK_START_DT=nullif(t.JSK_START_DT, 0) - ,PRD_SALE_KBN=t.PRD_SALE_KBN - ,JSK_PROC_KBN=t.JSK_PROC_KBN - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + prd_cd=t.prd_cd + ,sub_num=t.sub_num + ,prd_name=t.prd_name + ,prd_e_name=t.prd_e_name + ,mkr_cd=t.mkr_cd + ,mkr_inf_1=t.mkr_inf_1 + ,mkr_inf_2=t.mkr_inf_2 + ,phm_itm_cd=t.phm_itm_cd + ,itm_name=t.itm_name + ,itm_abb_name=t.itm_abb_name + ,form_cd=t.form_cd + ,form_name=t.form_name + ,vol_cd=t.vol_cd + ,vol_name=t.vol_name + ,cont_cd=t.cont_cd + ,cont_name=t.cont_name + ,pkg_cd=t.pkg_cd + ,pkg_name=t.pkg_name + ,cnv_num=t.cnv_num + ,jsk_start_dt=nullif(t.jsk_start_dt, 0) + ,prd_sale_kbn=t.prd_sale_kbn + ,jsk_proc_kbn=t.jsk_proc_kbn + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -878,43 +878,43 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.phm_price_mst_v ( - PHM_PRD_CD - ,PHM_PRICE_KIND - ,SUB_NUM - ,PRICE - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + phm_prd_cd + ,phm_price_kind + ,sub_num + ,price + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.PHM_PRD_CD - ,t.PHM_PRICE_KIND - ,t.SUB_NUM - ,t.PRICE - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.phm_prd_cd + ,t.phm_price_kind + ,t.sub_num + ,t.price + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.phm_price_mst_v AS t ON DUPLICATE KEY UPDATE - PHM_PRD_CD=t.PHM_PRD_CD - ,PHM_PRICE_KIND=t.PHM_PRICE_KIND - ,SUB_NUM=t.SUB_NUM - ,PRICE=t.PRICE - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + phm_prd_cd=t.phm_prd_cd + ,phm_price_kind=t.phm_price_kind + ,sub_num=t.sub_num + ,price=t.price + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -928,25 +928,25 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.vop_hco_merge_v ( - V_INST_CD - ,V_INST_CD_MERG - ,APPLY_DT - ,MERGE_REASON - ,DWH_UPD_DT + v_inst_cd + ,v_inst_cd_merg + ,apply_dt + ,merge_reason + ,dwh_upd_dt ) SELECT - t.V_INST_CD - ,t.V_INST_CD_MERG - ,t.APPLY_DT - ,t.MERGE_REASON + t.v_inst_cd + ,t.v_inst_cd_merg + ,t.apply_dt + ,t.merge_reason ,SYSDATE() FROM org05.vop_hco_merge_v AS t ON DUPLICATE KEY UPDATE - V_INST_CD=t.V_INST_CD - ,V_INST_CD_MERG=t.V_INST_CD_MERG - ,APPLY_DT=t.APPLY_DT - ,MERGE_REASON=t.MERGE_REASON - ,DWH_UPD_DT=SYSDATE() + v_inst_cd=t.v_inst_cd + ,v_inst_cd_merg=t.v_inst_cd_merg + ,apply_dt=t.apply_dt + ,merge_reason=t.merge_reason + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -960,64 +960,64 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.whs_customer_mst_v ( - WHS_CD - ,WHS_SUB_CD - ,CUSTOMER_CD - ,SUB_NUM - ,START_DATE - ,END_DATE - ,WHS_ORG_CD - ,SRC_ORG_CD - ,NAME - ,KN_NAME - ,ADDR - ,KN_ADDR - ,POSTAL_CD - ,TEL_NUM - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + whs_cd + ,whs_sub_cd + ,customer_cd + ,sub_num + ,start_date + ,end_date + ,whs_org_cd + ,src_org_cd + ,name + ,kn_name + ,addr + ,kn_addr + ,postal_cd + ,tel_num + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.WHS_CD - ,t.WHS_SUB_CD - ,t.CUSTOMER_CD - ,t.SUB_NUM - ,t.START_DATE - ,t.END_DATE - ,t.WHS_ORG_CD - ,t.SRC_ORG_CD - ,t.NAME - ,t.KN_NAME - ,t.ADDR - ,t.KN_ADDR - ,t.POSTAL_CD - ,t.TEL_NUM - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.whs_cd + ,t.whs_sub_cd + ,t.customer_cd + ,t.sub_num + ,t.start_date + ,t.end_date + ,t.whs_org_cd + ,t.src_org_cd + ,t.name + ,t.kn_name + ,t.addr + ,t.kn_addr + ,t.postal_cd + ,t.tel_num + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.whs_customer_mst_v AS t ON DUPLICATE KEY UPDATE - WHS_CD=t.WHS_CD - ,WHS_SUB_CD=t.WHS_SUB_CD - ,CUSTOMER_CD=t.CUSTOMER_CD - ,SUB_NUM=t.SUB_NUM - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,WHS_ORG_CD=t.WHS_ORG_CD - ,SRC_ORG_CD=t.SRC_ORG_CD - ,NAME=t.NAME - ,KN_NAME=t.KN_NAME - ,ADDR=t.ADDR - ,KN_ADDR=t.KN_ADDR - ,POSTAL_CD=t.POSTAL_CD - ,TEL_NUM=t.TEL_NUM - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + whs_cd=t.whs_cd + ,whs_sub_cd=t.whs_sub_cd + ,customer_cd=t.customer_cd + ,sub_num=t.sub_num + ,start_date=t.start_date + ,end_date=t.end_date + ,whs_org_cd=t.whs_org_cd + ,src_org_cd=t.src_org_cd + ,name=t.name + ,kn_name=t.kn_name + ,addr=t.addr + ,kn_addr=t.kn_addr + ,postal_cd=t.postal_cd + ,tel_num=t.tel_num + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1031,37 +1031,37 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.mdb_cnv_mst_v ( - HCO_VID_V - ,SUB_NUM - ,MDB_CD - ,RELIABILITY - ,START_DATE - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + hco_vid_v + ,sub_num + ,mdb_cd + ,reliability + ,start_date + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.HCO_VID_V - ,t.SUB_NUM - ,t.MDB_CD - ,t.RELIABILITY - ,t.START_DATE - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.hco_vid_v + ,t.sub_num + ,t.mdb_cd + ,t.reliability + ,t.start_date + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.mdb_cnv_mst_v AS t ON DUPLICATE KEY UPDATE - HCO_VID_V=t.HCO_VID_V - ,SUB_NUM=t.SUB_NUM - ,MDB_CD=t.MDB_CD - ,RELIABILITY=t.RELIABILITY - ,START_DATE=t.START_DATE - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + hco_vid_v=t.hco_vid_v + ,sub_num=t.sub_num + ,mdb_cd=t.mdb_cd + ,reliability=t.reliability + ,start_date=t.start_date + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1075,100 +1075,100 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.whole_stock ( - REC_DATA - ,REC_WHS_CD - ,REC_WHS_SUB_CD - ,REC_STO_PLACE - ,REC_STOCK_YMD - ,REC_COMM_CD - ,REC_QTY - ,REC_STOCK_NO_SIGN - ,REC_JAN_CD - ,FREE_ITEM - ,REC_YMD - ,SALE_DATA_CAT - ,SLIP_FILE_NAME - ,SLIP_MGT_NUM - ,ROW_NUM - ,EXEC_DT - ,ERR_FLG1 - ,ERR_FLG2 - ,ERR_FLG3 - ,ERR_FLG4 - ,ERR_FLG5 - ,ERR_FLG6 - ,ERR_FLG7 - ,ERR_FLG8 - ,ERR_FLG9 - ,ERR_FLG10 - ,REC_STS_KBN - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_sto_place + ,rec_stock_ymd + ,rec_comm_cd + ,rec_qty + ,rec_stock_no_sign + ,rec_jan_cd + ,free_item + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,exec_dt + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.REC_DATA - ,t.REC_WHS_CD - ,t.REC_WHS_SUB_CD - ,t.REC_STO_PLACE - ,t.REC_STOCK_YMD - ,t.REC_COMM_CD - ,t.REC_QTY - ,t.REC_STOCK_NO_SIGN - ,t.REC_JAN_CD - ,t.FREE_ITEM - ,t.REC_YMD - ,t.SALE_DATA_CAT - ,t.SLIP_FILE_NAME - ,t.SLIP_MGT_NUM - ,t.ROW_NUM - ,t.EXEC_DT - ,t.ERR_FLG1 - ,t.ERR_FLG2 - ,t.ERR_FLG3 - ,t.ERR_FLG4 - ,t.ERR_FLG5 - ,t.ERR_FLG6 - ,t.ERR_FLG7 - ,t.ERR_FLG8 - ,t.ERR_FLG9 - ,t.ERR_FLG10 - ,t.REC_STS_KBN - ,t.INS_DT - ,t.INS_USR + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_sto_place + ,t.rec_stock_ymd + ,t.rec_comm_cd + ,t.rec_qty + ,t.rec_stock_no_sign + ,t.rec_jan_cd + ,t.free_item + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.exec_dt + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.rec_sts_kbn + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.whole_stock AS t ON DUPLICATE KEY UPDATE - REC_DATA=t.REC_DATA - ,REC_WHS_CD=t.REC_WHS_CD - ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD - ,REC_STO_PLACE=t.REC_STO_PLACE - ,REC_STOCK_YMD=t.REC_STOCK_YMD - ,REC_COMM_CD=t.REC_COMM_CD - ,REC_QTY=t.REC_QTY - ,REC_STOCK_NO_SIGN=t.REC_STOCK_NO_SIGN - ,REC_JAN_CD=t.REC_JAN_CD - ,FREE_ITEM=t.FREE_ITEM - ,REC_YMD=t.REC_YMD - ,SALE_DATA_CAT=t.SALE_DATA_CAT - ,SLIP_FILE_NAME=t.SLIP_FILE_NAME - ,SLIP_MGT_NUM=t.SLIP_MGT_NUM - ,ROW_NUM=t.ROW_NUM - ,EXEC_DT=t.EXEC_DT - ,ERR_FLG1=t.ERR_FLG1 - ,ERR_FLG2=t.ERR_FLG2 - ,ERR_FLG3=t.ERR_FLG3 - ,ERR_FLG4=t.ERR_FLG4 - ,ERR_FLG5=t.ERR_FLG5 - ,ERR_FLG6=t.ERR_FLG6 - ,ERR_FLG7=t.ERR_FLG7 - ,ERR_FLG8=t.ERR_FLG8 - ,ERR_FLG9=t.ERR_FLG9 - ,ERR_FLG10=t.ERR_FLG10 - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_sto_place=t.rec_sto_place + ,rec_stock_ymd=t.rec_stock_ymd + ,rec_comm_cd=t.rec_comm_cd + ,rec_qty=t.rec_qty + ,rec_stock_no_sign=t.rec_stock_no_sign + ,rec_jan_cd=t.rec_jan_cd + ,free_item=t.free_item + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,exec_dt=t.exec_dt + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1182,247 +1182,247 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.bio_sales ( - REC_DATA - ,REC_WHS_CD - ,REC_WHS_SUB_CD - ,REC_WHS_ORG_CD - ,REC_CUST_CD - ,REC_COMM_CD - ,REC_TRAN_KBN - ,REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK - ,REC_URAG_NUM - ,REC_COMM_NAME - ,REC_NONYU_FCL_NAME - ,REC_NONYU_FCL_ADDR - ,REC_LOT_NUM1 - ,REC_QTY1 - ,REC_LOT_NUM2 - ,REC_QTY2 - ,REC_LOT_NUM3 - ,REC_QTY3 - ,REC_YMD - ,SALE_DATA_CAT - ,SLIP_FILE_NAME - ,SLIP_MGT_NUM - ,ROW_NUM - ,HSDN_YMD - ,EXEC_DT - ,V_TRAN_CD - ,TRAN_KBN_NAME - ,WHS_ORG_CD - ,V_WHSORG_CD - ,WHS_ORG_NAME - ,WHS_ORG_KN - ,V_WHS_CD - ,WHS_NAME - ,NONYU_FCL_CD - ,V_INST_CD - ,V_INST_NAME - ,V_INST_KN - ,V_INST_ADDR - ,COMM_CD - ,PRODUCT_NAME - ,HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN - ,FCL_EXIS_KBN - ,QTY1 - ,QTY2 - ,QTY3 - ,SLIP_ORG_KBN - ,BEF_SLIP_MGT_NUM - ,WHS_REP_COMM_NAME - ,WHS_REP_NONYU_FCL_NAME - ,WHS_REP_NONYU_FCL_ADDR - ,ERR_FLG1 - ,ERR_FLG2 - ,ERR_FLG3 - ,ERR_FLG4 - ,ERR_FLG5 - ,ERR_FLG6 - ,ERR_FLG7 - ,ERR_FLG8 - ,ERR_FLG9 - ,ERR_FLG10 - ,ERR_FLG11 - ,ERR_FLG12 - ,ERR_FLG13 - ,ERR_FLG14 - ,ERR_FLG15 - ,ERR_FLG16 - ,ERR_FLG17 - ,ERR_FLG18 - ,ERR_FLG19 - ,ERR_FLG20 - ,KJYO_YM - ,TKSNBK_KBN - ,FCL_EXEC_KBN - ,REC_STS_KBN - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_whs_org_cd + ,rec_cust_cd + ,rec_comm_cd + ,rec_tran_kbn + ,rev_hsdnymd_wrk + ,rev_hsdnymd_srk + ,rec_urag_num + ,rec_comm_name + ,rec_nonyu_fcl_name + ,rec_nonyu_fcl_addr + ,rec_lot_num1 + ,rec_qty1 + ,rec_lot_num2 + ,rec_qty2 + ,rec_lot_num3 + ,rec_qty3 + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,hsdn_ymd + ,exec_dt + ,v_tran_cd + ,tran_kbn_name + ,whs_org_cd + ,v_whsorg_cd + ,whs_org_name + ,whs_org_kn + ,v_whs_cd + ,whs_name + ,nonyu_fcl_cd + ,v_inst_cd + ,v_inst_name + ,v_inst_kn + ,v_inst_addr + ,comm_cd + ,product_name + ,htdnymd_err_kbn + ,prd_exis_kbn + ,fcl_exis_kbn + ,qty1 + ,qty2 + ,qty3 + ,slip_org_kbn + ,bef_slip_mgt_num + ,whs_rep_comm_name + ,whs_rep_nonyu_fcl_name + ,whs_rep_nonyu_fcl_addr + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,err_flg11 + ,err_flg12 + ,err_flg13 + ,err_flg14 + ,err_flg15 + ,err_flg16 + ,err_flg17 + ,err_flg18 + ,err_flg19 + ,err_flg20 + ,kjyo_ym + ,tksnbk_kbn + ,fcl_exec_kbn + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.REC_DATA - ,t.REC_WHS_CD - ,t.REC_WHS_SUB_CD - ,t.REC_WHS_ORG_CD - ,t.REC_CUST_CD - ,t.REC_COMM_CD - ,t.REC_TRAN_KBN - ,t.REV_HSDNYMD_WRK - ,t.REV_HSDNYMD_SRK - ,t.REC_URAG_NUM - ,t.REC_COMM_NAME - ,t.REC_NONYU_FCL_NAME - ,t.REC_NONYU_FCL_ADDR - ,t.REC_LOT_NUM1 - ,t.REC_QTY1 - ,t.REC_LOT_NUM2 - ,t.REC_QTY2 - ,t.REC_LOT_NUM3 - ,t.REC_QTY3 - ,t.REC_YMD - ,t.SALE_DATA_CAT - ,t.SLIP_FILE_NAME - ,t.SLIP_MGT_NUM - ,t.ROW_NUM - ,t.HSDN_YMD - ,t.EXEC_DT - ,t.V_TRAN_CD - ,t.TRAN_KBN_NAME - ,t.WHS_ORG_CD - ,t.V_WHSORG_CD - ,t.WHS_ORG_NAME - ,t.WHS_ORG_KN - ,t.V_WHS_CD - ,t.WHS_NAME - ,t.NONYU_FCL_CD - ,t.V_INST_CD - ,t.V_INST_NAME - ,t.V_INST_KN - ,t.V_INST_ADDR - ,t.COMM_CD - ,t.PRODUCT_NAME - ,t.HTDNYMD_ERR_KBN - ,t.PRD_EXIS_KBN - ,t.FCL_EXIS_KBN - ,t.QTY1 - ,t.QTY2 - ,t.QTY3 - ,t.SLIP_ORG_KBN - ,t.BEF_SLIP_MGT_NUM - ,t.WHS_REP_COMM_NAME - ,t.WHS_REP_NONYU_FCL_NAME - ,t.WHS_REP_NONYU_FCL_ADDR - ,t.ERR_FLG1 - ,t.ERR_FLG2 - ,t.ERR_FLG3 - ,t.ERR_FLG4 - ,t.ERR_FLG5 - ,t.ERR_FLG6 - ,t.ERR_FLG7 - ,t.ERR_FLG8 - ,t.ERR_FLG9 - ,t.ERR_FLG10 - ,t.ERR_FLG11 - ,t.ERR_FLG12 - ,t.ERR_FLG13 - ,t.ERR_FLG14 - ,t.ERR_FLG15 - ,t.ERR_FLG16 - ,t.ERR_FLG17 - ,t.ERR_FLG18 - ,t.ERR_FLG19 - ,t.ERR_FLG20 - ,t.KJYO_YM - ,t.TKSNBK_KBN - ,t.FCL_EXEC_KBN - ,t.REC_STS_KBN - ,t.INS_DT - ,t.INS_USR + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_whs_org_cd + ,t.rec_cust_cd + ,t.rec_comm_cd + ,t.rec_tran_kbn + ,t.rev_hsdnymd_wrk + ,t.rev_hsdnymd_srk + ,t.rec_urag_num + ,t.rec_comm_name + ,t.rec_nonyu_fcl_name + ,t.rec_nonyu_fcl_addr + ,t.rec_lot_num1 + ,t.rec_qty1 + ,t.rec_lot_num2 + ,t.rec_qty2 + ,t.rec_lot_num3 + ,t.rec_qty3 + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.hsdn_ymd + ,t.exec_dt + ,t.v_tran_cd + ,t.tran_kbn_name + ,t.whs_org_cd + ,t.v_whsorg_cd + ,t.whs_org_name + ,t.whs_org_kn + ,t.v_whs_cd + ,t.whs_name + ,t.nonyu_fcl_cd + ,t.v_inst_cd + ,t.v_inst_name + ,t.v_inst_kn + ,t.v_inst_addr + ,t.comm_cd + ,t.product_name + ,t.htdnymd_err_kbn + ,t.prd_exis_kbn + ,t.fcl_exis_kbn + ,t.qty1 + ,t.qty2 + ,t.qty3 + ,t.slip_org_kbn + ,t.bef_slip_mgt_num + ,t.whs_rep_comm_name + ,t.whs_rep_nonyu_fcl_name + ,t.whs_rep_nonyu_fcl_addr + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.err_flg11 + ,t.err_flg12 + ,t.err_flg13 + ,t.err_flg14 + ,t.err_flg15 + ,t.err_flg16 + ,t.err_flg17 + ,t.err_flg18 + ,t.err_flg19 + ,t.err_flg20 + ,t.kjyo_ym + ,t.tksnbk_kbn + ,t.fcl_exec_kbn + ,t.rec_sts_kbn + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.bio_sales AS t ON DUPLICATE KEY UPDATE - REC_DATA=t.REC_DATA - ,REC_WHS_CD=t.REC_WHS_CD - ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD - ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD - ,REC_CUST_CD=t.REC_CUST_CD - ,REC_COMM_CD=t.REC_COMM_CD - ,REC_TRAN_KBN=t.REC_TRAN_KBN - ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK - ,REC_URAG_NUM=t.REC_URAG_NUM - ,REC_COMM_NAME=t.REC_COMM_NAME - ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME - ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR - ,REC_LOT_NUM1=t.REC_LOT_NUM1 - ,REC_QTY1=t.REC_QTY1 - ,REC_LOT_NUM2=t.REC_LOT_NUM2 - ,REC_QTY2=t.REC_QTY2 - ,REC_LOT_NUM3=t.REC_LOT_NUM3 - ,REC_QTY3=t.REC_QTY3 - ,REC_YMD=t.REC_YMD - ,SALE_DATA_CAT=t.SALE_DATA_CAT - ,SLIP_FILE_NAME=t.SLIP_FILE_NAME - ,SLIP_MGT_NUM=t.SLIP_MGT_NUM - ,ROW_NUM=t.ROW_NUM - ,HSDN_YMD=t.HSDN_YMD - ,EXEC_DT=t.EXEC_DT - ,V_TRAN_CD=t.V_TRAN_CD - ,TRAN_KBN_NAME=t.TRAN_KBN_NAME - ,WHS_ORG_CD=t.WHS_ORG_CD - ,V_WHSORG_CD=t.V_WHSORG_CD - ,WHS_ORG_NAME=t.WHS_ORG_NAME - ,WHS_ORG_KN=t.WHS_ORG_KN - ,V_WHS_CD=t.V_WHS_CD - ,WHS_NAME=t.WHS_NAME - ,NONYU_FCL_CD=t.NONYU_FCL_CD - ,V_INST_CD=t.V_INST_CD - ,V_INST_NAME=t.V_INST_NAME - ,V_INST_KN=t.V_INST_KN - ,V_INST_ADDR=t.V_INST_ADDR - ,COMM_CD=t.COMM_CD - ,PRODUCT_NAME=t.PRODUCT_NAME - ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN=t.PRD_EXIS_KBN - ,FCL_EXIS_KBN=t.FCL_EXIS_KBN - ,QTY1=t.QTY1 - ,QTY2=t.QTY2 - ,QTY3=t.QTY3 - ,SLIP_ORG_KBN=t.SLIP_ORG_KBN - ,BEF_SLIP_MGT_NUM=t.BEF_SLIP_MGT_NUM - ,WHS_REP_COMM_NAME=t.WHS_REP_COMM_NAME - ,WHS_REP_NONYU_FCL_NAME=t.WHS_REP_NONYU_FCL_NAME - ,WHS_REP_NONYU_FCL_ADDR=t.WHS_REP_NONYU_FCL_ADDR - ,ERR_FLG1=t.ERR_FLG1 - ,ERR_FLG2=t.ERR_FLG2 - ,ERR_FLG3=t.ERR_FLG3 - ,ERR_FLG4=t.ERR_FLG4 - ,ERR_FLG5=t.ERR_FLG5 - ,ERR_FLG6=t.ERR_FLG6 - ,ERR_FLG7=t.ERR_FLG7 - ,ERR_FLG8=t.ERR_FLG8 - ,ERR_FLG9=t.ERR_FLG9 - ,ERR_FLG10=t.ERR_FLG10 - ,ERR_FLG11=t.ERR_FLG11 - ,ERR_FLG12=t.ERR_FLG12 - ,ERR_FLG13=t.ERR_FLG13 - ,ERR_FLG14=t.ERR_FLG14 - ,ERR_FLG15=t.ERR_FLG15 - ,ERR_FLG16=t.ERR_FLG16 - ,ERR_FLG17=t.ERR_FLG17 - ,ERR_FLG18=t.ERR_FLG18 - ,ERR_FLG19=t.ERR_FLG19 - ,ERR_FLG20=t.ERR_FLG20 - ,KJYO_YM=t.KJYO_YM - ,TKSNBK_KBN=t.TKSNBK_KBN - ,FCL_EXEC_KBN=t.FCL_EXEC_KBN - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_whs_org_cd=t.rec_whs_org_cd + ,rec_cust_cd=t.rec_cust_cd + ,rec_comm_cd=t.rec_comm_cd + ,rec_tran_kbn=t.rec_tran_kbn + ,rev_hsdnymd_wrk=t.rev_hsdnymd_wrk + ,rev_hsdnymd_srk=t.rev_hsdnymd_srk + ,rec_urag_num=t.rec_urag_num + ,rec_comm_name=t.rec_comm_name + ,rec_nonyu_fcl_name=t.rec_nonyu_fcl_name + ,rec_nonyu_fcl_addr=t.rec_nonyu_fcl_addr + ,rec_lot_num1=t.rec_lot_num1 + ,rec_qty1=t.rec_qty1 + ,rec_lot_num2=t.rec_lot_num2 + ,rec_qty2=t.rec_qty2 + ,rec_lot_num3=t.rec_lot_num3 + ,rec_qty3=t.rec_qty3 + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,hsdn_ymd=t.hsdn_ymd + ,exec_dt=t.exec_dt + ,v_tran_cd=t.v_tran_cd + ,tran_kbn_name=t.tran_kbn_name + ,whs_org_cd=t.whs_org_cd + ,v_whsorg_cd=t.v_whsorg_cd + ,whs_org_name=t.whs_org_name + ,whs_org_kn=t.whs_org_kn + ,v_whs_cd=t.v_whs_cd + ,whs_name=t.whs_name + ,nonyu_fcl_cd=t.nonyu_fcl_cd + ,v_inst_cd=t.v_inst_cd + ,v_inst_name=t.v_inst_name + ,v_inst_kn=t.v_inst_kn + ,v_inst_addr=t.v_inst_addr + ,comm_cd=t.comm_cd + ,product_name=t.product_name + ,htdnymd_err_kbn=t.htdnymd_err_kbn + ,prd_exis_kbn=t.prd_exis_kbn + ,fcl_exis_kbn=t.fcl_exis_kbn + ,qty1=t.qty1 + ,qty2=t.qty2 + ,qty3=t.qty3 + ,slip_org_kbn=t.slip_org_kbn + ,bef_slip_mgt_num=t.bef_slip_mgt_num + ,whs_rep_comm_name=t.whs_rep_comm_name + ,whs_rep_nonyu_fcl_name=t.whs_rep_nonyu_fcl_name + ,whs_rep_nonyu_fcl_addr=t.whs_rep_nonyu_fcl_addr + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,err_flg11=t.err_flg11 + ,err_flg12=t.err_flg12 + ,err_flg13=t.err_flg13 + ,err_flg14=t.err_flg14 + ,err_flg15=t.err_flg15 + ,err_flg16=t.err_flg16 + ,err_flg17=t.err_flg17 + ,err_flg18=t.err_flg18 + ,err_flg19=t.err_flg19 + ,err_flg20=t.err_flg20 + ,kjyo_ym=t.kjyo_ym + ,tksnbk_kbn=t.tksnbk_kbn + ,fcl_exec_kbn=t.fcl_exec_kbn + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1436,71 +1436,59 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.lot_num_mst ( - SER_NUM - ,LOT_NUM - ,EXPR_DT - ,FRST_MOV_DT - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + ser_num + ,lot_num + ,expr_dt + ,frst_mov_dt + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.SER_NUM - ,t.LOT_NUM - ,t.EXPR_DT - ,t.FRST_MOV_DT - ,t.INS_DT - ,t.INS_USR + t.ser_num + ,t.lot_num + ,t.expr_dt + ,t.frst_mov_dt + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.lot_num_mst AS t ON DUPLICATE KEY UPDATE - SER_NUM=t.SER_NUM - ,LOT_NUM=t.LOT_NUM - ,EXPR_DT=t.EXPR_DT - ,FRST_MOV_DT=t.FRST_MOV_DT - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + ser_num=t.ser_num + ,lot_num=t.lot_num + ,expr_dt=t.expr_dt + ,frst_mov_dt=t.frst_mov_dt + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() ; """) }, } - def get_data_name(self, condkey: str) -> str: + def _get_interface_property(self, condkey: str, property_name: str) -> str: ret = None if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_DATA_NAME) + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(property_name) return ret + def get_data_name(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_DATA_NAME) + def get_file_prefix(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_PREFIX) - return ret + return self._get_interface_property(condkey, self._KEY_FILE_PREFIX) def get_file_suffix(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_SUFFIX) - return ret + return self._get_interface_property(condkey, self._KEY_FILE_SUFFIX) def get_org_table(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_ORG_TABLE) - return ret + return self._get_interface_property(condkey, self._KEY_ORG_TABLE) def get_src_table(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) - return ret + return self._get_interface_property(condkey, self._KEY_SRC_TABLE) def get_upsert_sql(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_UPSERT_SQL) - return ret + return self._get_interface_property(condkey, self._KEY_UPSERT_SQL) def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: ret = None From 5a32b27e26983cea93cac2ee5c7216ff8840791a Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 12 May 2023 12:27:29 +0900 Subject: [PATCH 014/103] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=8F=8D=E6=98=A0(=E5=8F=97=E9=A0=98?= =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB=E8=A7=A3=E5=87=8D=E3=81=A8?= =?UTF-8?q?=E3=83=90=E3=83=83=E3=82=AF=E3=82=A2=E3=83=83=E3=83=97=E9=80=80?= =?UTF-8?q?=E9=81=BF=E3=82=82=E3=82=8C)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_importer.py | 21 +++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 30 +++++++++---------- 2 files changed, 36 insertions(+), 15 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 719e3e62..f2da3ed9 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -48,6 +48,17 @@ def exec(): logger.debug(f'データベース登録失敗 {e}') raise e + # V実消化データ受領ファイルバックアップ退避 + logger.debug('V実消化データ受領ファイルバックアップ退避:開始') + try: + # 取込が完了したS3バケットにある受領ファイルをバックアップ用S3バケットに移動する + _backup_received_files() + + except BatchOperationException as e: + logger.debug('V実消化データ受領ファイルのバックアップ退避が失敗しました') + raise e + logger.debug('V実消化データ受領ファイルバックアップ退避:終了') + logger.debug('exec done') @@ -160,6 +171,8 @@ def _import_file_to_db(): local_file_path = vjsk_recv_bucket.download_data_file(file_name) logger.debug(f"download s3 file done : {file_name}") + # TODO: 受領ファイルはtar.gzなので、ローカルストレージ上で解凍する + # データファイル名に該当する辞書アクセス用のキーを取得する key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) @@ -237,3 +250,11 @@ def _determine_today_is_stockslipdata_target(): raise e logger.debug("_determine_today_is_stockslipdata_target done") return ret + + +def _backup_received_files(): + logger.debug("_backup_received_files start") + + # TODO 受領バケットのファイル → バックアップバケット + + logger.debug("_backup_received_files done") diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 612309d2..4c374a2b 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -29,7 +29,7 @@ class VjskReceiveFileMapper: CONDKEY_SLIP_DATA: { _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.sales", _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -298,7 +298,7 @@ class VjskReceiveFileMapper: CONDKEY_HLD_MST: { _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.hld_mst_v", _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -351,7 +351,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_MST: { _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.whs_mst_v", _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -419,7 +419,7 @@ class VjskReceiveFileMapper: CONDKEY_MKR_ORG_HORIZON: { _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -577,7 +577,7 @@ class VjskReceiveFileMapper: CONDKEY_ORG_CNV_MST: { _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -630,7 +630,7 @@ class VjskReceiveFileMapper: CONDKEY_TRAN_KBN_MST: { _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -677,7 +677,7 @@ class VjskReceiveFileMapper: CONDKEY_FCL_MST: { _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.fcl_mst_v", _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -769,7 +769,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRD_MST: { _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -873,7 +873,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRICE_MST: { _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.phm_price_mst_v", _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -923,7 +923,7 @@ class VjskReceiveFileMapper: CONDKEY_VOP_HCO_MERGE: { _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -955,7 +955,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_CUSTOMER_MST: { _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1026,7 +1026,7 @@ class VjskReceiveFileMapper: CONDKEY_MDB_CONV_MST: { _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1070,7 +1070,7 @@ class VjskReceiveFileMapper: CONDKEY_STOCK_SLIP_DATA: { _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.whole_stock", _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1177,7 +1177,7 @@ class VjskReceiveFileMapper: CONDKEY_BIO_SLIP_DATA: { _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.bio_sales", _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1431,7 +1431,7 @@ class VjskReceiveFileMapper: CONDKEY_LOT_NUM_MST: { _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ From 2f1b42705a5f374bbb3551de70a036e098f114fa Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Sat, 13 May 2023 01:06:11 +0900 Subject: [PATCH 015/103] =?UTF-8?q?=E5=8F=97=E9=A0=98=E3=83=95=E3=82=A1?= =?UTF-8?q?=E3=82=A4=E3=83=AB=E3=81=AFtar.gz=E5=BD=A2=E5=BC=8F=E3=81=AA?= =?UTF-8?q?=E3=81=AE=E3=81=A7=E8=A7=A3=E5=87=8D=E3=81=97=E3=81=A6=E3=81=8B?= =?UTF-8?q?=E3=82=89tsv=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB=E3=82=92LOAD?= =?UTF-8?q?=E3=81=99=E3=82=8B=E3=82=88=E3=81=86=E5=AE=9F=E8=A3=85=E6=BC=8F?= =?UTF-8?q?=E3=82=8C=E3=82=92=E8=A3=9C=E5=AE=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 13 +++++++++++++ .../src/batch/vjsk/vjsk_importer.py | 7 +++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 62111409..2e98eaac 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -1,5 +1,7 @@ import io +import os import os.path as path +import tarfile import tempfile import boto3 @@ -127,3 +129,14 @@ class VjskReceiveBucket(S3Bucket): self._s3_client.download_file(self._bucket_name, data_filename, f) f.seek(0) return temporary_file_path + + def unzip_data_file(self, filename: str): + ret = [] + with tarfile.open(filename) as tar: + temp_dir = os.path.dirname(filename) + tar.extractall(path=temp_dir) + extracted_files = tar.getnames() + for extracted_file in extracted_files: + file = os.path.join(temp_dir, extracted_file) + ret.append(file) + return ret diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index f2da3ed9..2737ffca 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -171,14 +171,17 @@ def _import_file_to_db(): local_file_path = vjsk_recv_bucket.download_data_file(file_name) logger.debug(f"download s3 file done : {file_name}") - # TODO: 受領ファイルはtar.gzなので、ローカルストレージ上で解凍する + # ローカルストレージにdownloadした受領ファイル(tar.gz)を解凍する + unzip_file_path = vjsk_recv_bucket.unzip_data_file(local_file_path) + logger.debug(f"unzip done : {unzip_file_path}") # データファイル名に該当する辞書アクセス用のキーを取得する key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) # 想定されたデータファイルであれば辞書登録する if key is not None: - target_dict[key] = {"condkey": key, "src_file_path": local_file_path} + # ※受領ファイル(tar.gz)の書庫構成はtsvファイルが1つだけの前提 + target_dict[key] = {"condkey": key, "src_file_path": unzip_file_path[0]} logger.debug(f'取込対象データファイル辞書{target_dict}') # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) From 2cb6cb5d996411f99e9b8a398acb5873089fb87e Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 15 May 2023 14:50:48 +0900 Subject: [PATCH 016/103] =?UTF-8?q?=E5=8F=97=E9=A0=98=E3=83=95=E3=82=A1?= =?UTF-8?q?=E3=82=A4=E3=83=AB=E3=81=AELOAD=E5=AE=8C=E4=BA=86=E5=BE=8C?= =?UTF-8?q?=E3=80=81=E5=8F=97=E9=A0=98=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=82=92=E3=83=90=E3=83=83=E3=82=AF=E3=82=A2=E3=83=83=E3=83=97?= =?UTF-8?q?=E3=83=90=E3=82=B1=E3=83=83=E3=83=88=E3=81=AB=E7=A7=BB=E5=8B=95?= =?UTF-8?q?=E3=81=99=E3=82=8B=E5=AE=9F=E8=A3=85=E6=BC=8F=E3=82=8C=E3=81=BB?= =?UTF-8?q?=E8=A3=9C=E5=AE=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 1 + ecs/jskult-batch-daily/src/aws/s3.py | 14 +++++++++++ .../src/batch/vjsk/vjsk_importer.py | 24 ++++++------------- .../src/system_var/environment.py | 1 + 4 files changed, 23 insertions(+), 17 deletions(-) diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index d95322fb..2c1cb5a7 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -8,6 +8,7 @@ ULTMARC_DATA_BUCKET=**************** ULTMARC_DATA_FOLDER=recv JSKULT_BACKUP_BUCKET=**************** ULTMARC_BACKUP_FOLDER=ultmarc +VJSK_BACKUP_FOLDER=vjsk JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2e98eaac..0df364d4 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -112,6 +112,10 @@ class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER +class VjskBackupBucket(JskUltBackupBucket): + _folder = environment.VJSK_BACKUP_FOLDER + + class VjskReceiveBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV @@ -140,3 +144,13 @@ class VjskReceiveBucket(S3Bucket): file = os.path.join(temp_dir, extracted_file) ret.append(file) return ret + + def backup_dat_file(self, target_files: list, datetime_key: str): + jskult_backup_bucket = VjskBackupBucket() + for target_file in target_files: + backup_from_file_path = target_file.get("filename") + backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") + backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' + self._s3_client.copy(self._bucket_name, backup_from_file_path, + jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.delete_file(self._bucket_name, backup_from_file_path) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 2737ffca..23fc8e5d 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -30,9 +30,10 @@ def exec(): # V実消化データファイル受領チェック logger.debug('V実消化データファイル受領チェック:開始') + received_s3_files = [] try: # S3バケットにある受領済のV実消化データファイルの存在チェックをする - _check_received_files() + received_s3_files = _check_received_files() except BatchOperationException as e: logger.debug('受領したV実消化データファイルに未受領もものがあります') @@ -43,7 +44,7 @@ def exec(): logger.debug('V実消化データ取込:開始') try: # S3バケットにある受領済のV実消化データファイルをデータベースに登録する - _import_file_to_db() + _import_file_to_db(received_s3_files) except Exception as e: logger.debug(f'データベース登録失敗 {e}') raise e @@ -52,7 +53,7 @@ def exec(): logger.debug('V実消化データ受領ファイルバックアップ退避:開始') try: # 取込が完了したS3バケットにある受領ファイルをバックアップ用S3バケットに移動する - _backup_received_files() + vjsk_recv_bucket.backup_dat_file(received_s3_files, batch_context.syor_date) except BatchOperationException as e: logger.debug('V実消化データ受領ファイルのバックアップ退避が失敗しました') @@ -80,7 +81,7 @@ def _check_if_file_exists(src_list: list, condkey: str) -> bool: return ret -def _check_received_files(): +def _check_received_files() -> list: """V実消化連携データファイル受領確認処理""" logger.debug('_check_received_files start') @@ -151,16 +152,13 @@ def _check_received_files(): logger.debug('_check_received_files done') - return + return received_s3_files -def _import_file_to_db(): +def _import_file_to_db(received_s3_files: list): """V実消化連携データ取込処理""" logger.debug('_import_file_to_db start') - # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 - received_s3_files = vjsk_recv_bucket.get_s3_file_list() - # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する target_dict = {} for s3_file_path in received_s3_files: @@ -253,11 +251,3 @@ def _determine_today_is_stockslipdata_target(): raise e logger.debug("_determine_today_is_stockslipdata_target done") return ret - - -def _backup_received_files(): - logger.debug("_backup_received_files start") - - # TODO 受領バケットのファイル → バックアップバケット - - logger.debug("_backup_received_files done") diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index 6a2fca0b..5973a181 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -12,6 +12,7 @@ ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] +VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] From 7e69b86f0a5a6824d2759c60d9b20077930d3ca7 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 15 May 2023 17:26:11 +0900 Subject: [PATCH 017/103] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=8F=8D=E6=98=A0=E3=80=80=E5=8F=97=E9=A0=98?= =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB=E3=81=AE=E6=8B=A1=E5=BC=B5?= =?UTF-8?q?=E5=AD=90=E3=81=AF=20.gz=20=E3=81=A0=E3=81=91=E3=81=AB=E3=81=AA?= =?UTF-8?q?=E3=81=A3=E3=81=A6=E3=81=84=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 4c374a2b..8b372dc2 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -29,7 +29,7 @@ class VjskReceiveFileMapper: CONDKEY_SLIP_DATA: { _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.sales", _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -298,7 +298,7 @@ class VjskReceiveFileMapper: CONDKEY_HLD_MST: { _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.hld_mst_v", _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -351,7 +351,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_MST: { _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.whs_mst_v", _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -419,7 +419,7 @@ class VjskReceiveFileMapper: CONDKEY_MKR_ORG_HORIZON: { _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -577,7 +577,7 @@ class VjskReceiveFileMapper: CONDKEY_ORG_CNV_MST: { _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -630,7 +630,7 @@ class VjskReceiveFileMapper: CONDKEY_TRAN_KBN_MST: { _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -677,7 +677,7 @@ class VjskReceiveFileMapper: CONDKEY_FCL_MST: { _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.fcl_mst_v", _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -769,7 +769,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRD_MST: { _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -873,7 +873,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRICE_MST: { _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.phm_price_mst_v", _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -923,7 +923,7 @@ class VjskReceiveFileMapper: CONDKEY_VOP_HCO_MERGE: { _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -955,7 +955,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_CUSTOMER_MST: { _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1026,7 +1026,7 @@ class VjskReceiveFileMapper: CONDKEY_MDB_CONV_MST: { _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1070,7 +1070,7 @@ class VjskReceiveFileMapper: CONDKEY_STOCK_SLIP_DATA: { _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.whole_stock", _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1177,7 +1177,7 @@ class VjskReceiveFileMapper: CONDKEY_BIO_SLIP_DATA: { _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.bio_sales", _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1431,7 +1431,7 @@ class VjskReceiveFileMapper: CONDKEY_LOT_NUM_MST: { _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ From a9b0b000567fc6b6c824cc626bda5f58565ca5e9 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 11 May 2023 20:20:19 +0900 Subject: [PATCH 018/103] =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=82=B3?= =?UTF-8?q?=E3=83=BC=E3=83=89=E3=81=AE=E3=82=B5=E3=83=B3=E3=83=97=E3=83=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py | 0 .../tests/batch/vjsk/vjsk_file_check/__init__.py | 0 .../batch/vjsk/vjsk_file_check/test_vjsk_file_check.py | 6 ++++++ .../tests/batch/vjsk/vjsk_load/__init__.py | 0 .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 6 ++++++ 5 files changed, 12 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py new file mode 100644 index 00000000..e9f88d81 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -0,0 +1,6 @@ +def test1(): + pass + + +def test2(): + pass diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py new file mode 100644 index 00000000..e9f88d81 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -0,0 +1,6 @@ +def test1(): + pass + + +def test2(): + pass From 647d8836ff8012ae8c2169080e206ccc883597c7 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 16 May 2023 22:30:09 +0900 Subject: [PATCH 019/103] =?UTF-8?q?=E5=AE=9F=E6=B6=88=E5=8C=96=E3=83=95?= =?UTF-8?q?=E3=82=A1=E3=82=A4=E3=83=AB=E5=8F=97=E9=A0=98=E3=83=81=E3=82=A7?= =?UTF-8?q?=E3=83=83=E3=82=AF=E3=81=AE=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E3=82=B3=E3=83=BC=E3=83=89=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/Pipfile | 1 + ecs/jskult-batch-daily/Pipfile.lock | 292 ++-- .../batch/vjsk/vjsk_file_check/conftest.py | 1226 +++++++++++++++++ .../vjsk_file_check/test_vjsk_file_check.py | 269 +++- ecs/jskult-batch-daily/tests/conftest.py | 10 - 5 files changed, 1666 insertions(+), 132 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py diff --git a/ecs/jskult-batch-daily/Pipfile b/ecs/jskult-batch-daily/Pipfile index a5d5dddd..1e6adf91 100644 --- a/ecs/jskult-batch-daily/Pipfile +++ b/ecs/jskult-batch-daily/Pipfile @@ -18,6 +18,7 @@ autopep8 = "*" flake8 = "*" pytest = "*" pytest-cov = "*" +boto3 = "*" [requires] python_version = "3.9" diff --git a/ecs/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-daily/Pipfile.lock index 519c60a0..10b5f555 100644 --- a/ecs/jskult-batch-daily/Pipfile.lock +++ b/ecs/jskult-batch-daily/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9bce8f43bcad5d6ae8e5a558b8ade00a83f6e1671993e91b0a883fffa6b95df9" + "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" }, "pipfile-spec": 6, "requires": { @@ -18,19 +18,19 @@ "default": { "boto3": { "hashes": [ - "sha256:816a198a6cc4f283af6b21439d85be6dbe4b73c2232dd906c6bafb4fece28d19", - "sha256:9de90a2c0b853f84436b032b28947fc8a765dc462573a8d543b13f16c6579b40" + "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6", + "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40" ], "index": "pypi", - "version": "==1.26.107" + "version": "==1.26.134" }, "botocore": { "hashes": [ - "sha256:ee1e43e6cd0864cc6811ba3f05123647612ee3f07a286a4c94f5885aa86d6922", - "sha256:f63942b4b7248c0b3d6ecbc2852cf0787c23ace2a91a012f7ee0b3ae3eb08f4f" + "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba", + "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f" ], "markers": "python_version >= '3.7'", - "version": "==1.29.107" + "version": "==1.29.134" }, "greenlet": { "hashes": [ @@ -124,11 +124,11 @@ }, "s3transfer": { "hashes": [ - "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd", - "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947" + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" ], "markers": "python_version >= '3.7'", - "version": "==0.6.0" + "version": "==0.6.1" }, "six": { "hashes": [ @@ -140,50 +140,50 @@ }, "sqlalchemy": { "hashes": [ - "sha256:07950fc82f844a2de67ddb4e535f29b65652b4d95e8b847823ce66a6d540a41d", - "sha256:0a865b5ec4ba24f57c33b633b728e43fde77b968911a6046443f581b25d29dd9", - "sha256:0b49f1f71d7a44329a43d3edd38cc5ee4c058dfef4487498393d16172007954b", - "sha256:13f984a190d249769a050634b248aef8991acc035e849d02b634ea006c028fa8", - "sha256:1b69666e25cc03c602d9d3d460e1281810109e6546739187044fc256c67941ef", - "sha256:1d06e119cf79a3d80ab069f064a07152eb9ba541d084bdaee728d8a6f03fd03d", - "sha256:246712af9fc761d6c13f4f065470982e175d902e77aa4218c9cb9fc9ff565a0c", - "sha256:34eb96c1de91d8f31e988302243357bef3f7785e1b728c7d4b98bd0c117dafeb", - "sha256:4c3020afb144572c7bfcba9d7cce57ad42bff6e6115dffcfe2d4ae6d444a214f", - "sha256:4f759eccb66e6d495fb622eb7f4ac146ae674d829942ec18b7f5a35ddf029597", - "sha256:68ed381bc340b4a3d373dbfec1a8b971f6350139590c4ca3cb722fdb50035777", - "sha256:6b72dccc5864ea95c93e0a9c4e397708917fb450f96737b4a8395d009f90b868", - "sha256:6e84ab63d25d8564d7a8c05dc080659931a459ee27f6ed1cf4c91f292d184038", - "sha256:734805708632e3965c2c40081f9a59263c29ffa27cba9b02d4d92dfd57ba869f", - "sha256:78612edf4ba50d407d0eb3a64e9ec76e6efc2b5d9a5c63415d53e540266a230a", - "sha256:7e472e9627882f2d75b87ff91c5a2bc45b31a226efc7cc0a054a94fffef85862", - "sha256:865392a50a721445156809c1a6d6ab6437be70c1c2599f591a8849ed95d3c693", - "sha256:8d118e233f416d713aac715e2c1101e17f91e696ff315fc9efbc75b70d11e740", - "sha256:8d3ece5960b3e821e43a4927cc851b6e84a431976d3ffe02aadb96519044807e", - "sha256:93c78d42c14aa9a9e0866eacd5b48df40a50d0e2790ee377af7910d224afddcf", - "sha256:95719215e3ec7337b9f57c3c2eda0e6a7619be194a5166c07c1e599f6afc20fa", - "sha256:9838bd247ee42eb74193d865e48dd62eb50e45e3fdceb0fdef3351133ee53dcf", - "sha256:aa5c270ece17c0c0e0a38f2530c16b20ea05d8b794e46c79171a86b93b758891", - "sha256:ac6a0311fb21a99855953f84c43fcff4bdca27a2ffcc4f4d806b26b54b5cddc9", - "sha256:ad5363a1c65fde7b7466769d4261126d07d872fc2e816487ae6cec93da604b6b", - "sha256:b3e5864eba71a3718236a120547e52c8da2ccb57cc96cecd0480106a0c799c92", - "sha256:bbda1da8d541904ba262825a833c9f619e93cb3fd1156be0a5e43cd54d588dcd", - "sha256:c6e27189ff9aebfb2c02fd252c629ea58657e7a5ff1a321b7fc9c2bf6dc0b5f3", - "sha256:c8239ce63a90007bce479adf5460d48c1adae4b933d8e39a4eafecfc084e503c", - "sha256:d209594e68bec103ad5243ecac1b40bf5770c9ebf482df7abf175748a34f4853", - "sha256:d5327f54a9c39e7871fc532639616f3777304364a0bb9b89d6033ad34ef6c5f8", - "sha256:db4bd1c4792da753f914ff0b688086b9a8fd78bb9bc5ae8b6d2e65f176b81eb9", - "sha256:e4780be0f19e5894c17f75fc8de2fe1ae233ab37827125239ceb593c6f6bd1e2", - "sha256:e4a019f723b6c1e6b3781be00fb9e0844bc6156f9951c836ff60787cc3938d76", - "sha256:e62c4e762d6fd2901692a093f208a6a6575b930e9458ad58c2a7f080dd6132da", - "sha256:e730603cae5747bc6d6dece98b45a57d647ed553c8d5ecef602697b1c1501cf2", - "sha256:ebc4eeb1737a5a9bdb0c24f4c982319fa6edd23cdee27180978c29cbb026f2bd", - "sha256:ee2946042cc7851842d7a086a92b9b7b494cbe8c3e7e4627e27bc912d3a7655e", - "sha256:f005245e1cb9b8ca53df73ee85e029ac43155e062405015e49ec6187a2e3fb44", - "sha256:f49c5d3c070a72ecb96df703966c9678dda0d4cb2e2736f88d15f5e1203b4159", - "sha256:f61ab84956dc628c8dfe9d105b6aec38afb96adae3e5e7da6085b583ff6ea789" + "sha256:0aa2cbde85a6eab9263ab480f19e8882d022d30ebcdc14d69e6a8d7c07b0a871", + "sha256:0d6979c9707f8b82366ba34b38b5a6fe32f75766b2e901f9820e271e95384070", + "sha256:0eb14a386a5b610305bec6639b35540b47f408b0a59f75999199aed5b3d40079", + "sha256:2424a84f131901fbb20a99844d47b38b517174c6e964c8efb15ea6bb9ced8c2b", + "sha256:2ad9688debf1f0ae9c6e0706a4e2d33b1a01281317cee9bd1d7eef8020c5baac", + "sha256:2f0a355264af0952570f18457102984e1f79510f856e5e0ae652e63316d1ca23", + "sha256:31f72bb300eed7bfdb373c7c046121d84fa0ae6f383089db9505ff553ac27cef", + "sha256:375b7ba88f261dbd79d044f20cbcd919d88befb63f26af9d084614f10cdf97a6", + "sha256:37de4010f53f452e94e5ed6684480432cfe6a7a8914307ef819cd028b05b98d5", + "sha256:49c138856035cb97f0053e5e57ba90ec936b28a0b8b0020d44965c7b0c0bf03a", + "sha256:4f9832815257969b3ca9bf0501351e4c02c8d60cbd3ec9f9070d5b0f8852900e", + "sha256:566a0ac347cf4632f551e7b28bbd0d215af82e6ffaa2556f565a3b6b51dc3f81", + "sha256:6777673d346071451bf7cccf8d0499024f1bd6a835fc90b4fe7af50373d92ce6", + "sha256:72746ec17a7d9c5acf2c57a6e6190ceba3dad7127cd85bb17f24e90acc0e8e3f", + "sha256:755f653d693f9b8f4286d987aec0d4279821bf8d179a9de8e8a5c685e77e57d6", + "sha256:7612a7366a0855a04430363fb4ab392dc6818aaece0b2e325ff30ee77af9b21f", + "sha256:7ad24c85f2a1caf0cd1ae8c2fdb668777a51a02246d9039420f94bd7dbfd37ed", + "sha256:881cc388dded44ae6e17a1666364b98bd76bcdc71b869014ae725f06ba298e0e", + "sha256:8d97b37b4e60073c38bcf94e289e3be09ef9be870de88d163f16e08f2b9ded1a", + "sha256:9119795d2405eb23bf7e6707e228fe38124df029494c1b3576459aa3202ea432", + "sha256:9136d596111c742d061c0f99bab95c5370016c4101a32e72c2b634ad5e0757e6", + "sha256:9ad883ac4f5225999747f0849643c4d0ec809d9ffe0ddc81a81dd3e68d0af463", + "sha256:a25b4c4fdd633501233924f873e6f6cd8970732859ecfe4ecfb60635881f70be", + "sha256:a30e4db983faa5145e00ef6eaf894a2d503b3221dbf40a595f3011930d3d0bac", + "sha256:a5e9e78332a5d841422b88b8c490dfd7f761e64b3430249b66c05d02f72ceab0", + "sha256:b4e08e3831671008888bad5d160d757ef35ce34dbb73b78c3998d16aa1334c97", + "sha256:bf1aae95e80acea02a0a622e1c12d3fefc52ffd0fe7bda70a30d070373fbb6c3", + "sha256:c61b89803a87a3b2a394089a7dadb79a6c64c89f2e8930cc187fec43b319f8d2", + "sha256:cdf80359b641185ae7e580afb9f88cf560298f309a38182972091165bfe1225d", + "sha256:d93ebbff3dcf05274843ad8cf650b48ee634626e752c5d73614e5ec9df45f0ce", + "sha256:db24d2738add6db19d66ca820479d2f8f96d3f5a13c223f27fa28dd2f268a4bd", + "sha256:e0d20f27edfd6f35b388da2bdcd7769e4ffa374fef8994980ced26eb287e033a", + "sha256:e2f3b5236079bc3e318a92bab2cc3f669cc32127075ab03ff61cacbae1c392b8", + "sha256:e481e54db8cec1457ee7c05f6d2329e3298a304a70d3b5e2e82e77170850b385", + "sha256:e5e5dc300a0ca8755ada1569f5caccfcdca28607dfb98b86a54996b288a8ebd3", + "sha256:ec2f525273528425ed2f51861b7b88955160cb95dddb17af0914077040aff4a5", + "sha256:f234ba3bb339ad17803009c8251f5ee65dcf283a380817fe486823b08b26383d", + "sha256:f463598f9e51ccc04f0fe08500f9a0c3251a7086765350be418598b753b5561d", + "sha256:f717944aee40e9f48776cf85b523bb376aa2d9255a268d6d643c57ab387e7264", + "sha256:fd0febae872a4042da44e972c070f0fd49a85a0a7727ab6b85425f74348be14e", + "sha256:fec56c7d1b6a22c8f01557de3975d962ee40270b81b60d1cfdadf2a105d10e84" ], "index": "pypi", - "version": "==2.0.9" + "version": "==2.0.13" }, "tenacity": { "hashes": [ @@ -211,14 +211,6 @@ } }, "develop": { - "attrs": { - "hashes": [ - "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", - "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" - ], - "markers": "python_version >= '3.6'", - "version": "==22.2.0" - }, "autopep8": { "hashes": [ "sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1", @@ -227,65 +219,89 @@ "index": "pypi", "version": "==2.0.2" }, + "boto3": { + "hashes": [ + "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6", + "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40" + ], + "index": "pypi", + "version": "==1.26.134" + }, + "botocore": { + "hashes": [ + "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba", + "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f" + ], + "markers": "python_version >= '3.7'", + "version": "==1.29.134" + }, + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.6" + }, "coverage": { "extras": [ "toml" ], "hashes": [ - "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d", - "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4", - "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e", - "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab", - "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90", - "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6", - "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731", - "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540", - "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2", - "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292", - "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5", - "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b", - "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2", - "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0", - "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57", - "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3", - "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140", - "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84", - "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988", - "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67", - "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d", - "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2", - "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5", - "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9", - "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8", - "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd", - "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6", - "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be", - "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88", - "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25", - "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137", - "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968", - "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9", - "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef", - "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54", - "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512", - "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005", - "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f", - "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149", - "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d", - "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8", - "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7", - "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5", - "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016", - "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69", - "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212", - "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc", - "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8", - "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d", - "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd", - "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169" + "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3", + "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a", + "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813", + "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0", + "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a", + "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd", + "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139", + "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b", + "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252", + "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790", + "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045", + "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce", + "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200", + "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718", + "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b", + "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f", + "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5", + "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade", + "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5", + "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a", + "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8", + "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33", + "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e", + "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c", + "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3", + "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969", + "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068", + "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2", + "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771", + "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed", + "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212", + "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614", + "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88", + "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3", + "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c", + "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84", + "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11", + "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1", + "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1", + "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e", + "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1", + "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd", + "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47", + "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a", + "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c", + "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31", + "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5", + "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6", + "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303", + "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5", + "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47" ], "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.2.5" }, "exceptiongroup": { "hashes": [ @@ -311,6 +327,14 @@ "markers": "python_version >= '3.7'", "version": "==2.0.0" }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, "mccabe": { "hashes": [ "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", @@ -321,11 +345,11 @@ }, "packaging": { "hashes": [ - "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", - "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" ], "markers": "python_version >= '3.7'", - "version": "==23.0" + "version": "==23.1" }, "pluggy": { "hashes": [ @@ -353,11 +377,11 @@ }, "pytest": { "hashes": [ - "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e", - "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4" + "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", + "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" ], "index": "pypi", - "version": "==7.2.2" + "version": "==7.3.1" }, "pytest-cov": { "hashes": [ @@ -367,6 +391,30 @@ "index": "pypi", "version": "==4.0.0" }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.2" + }, + "s3transfer": { + "hashes": [ + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" + ], + "markers": "python_version >= '3.7'", + "version": "==0.6.1" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, "tomli": { "hashes": [ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", @@ -374,6 +422,14 @@ ], "markers": "python_version < '3.11'", "version": "==2.0.1" + }, + "urllib3": { + "hashes": [ + "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", + "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.15" } } } diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py new file mode 100644 index 00000000..c341b2e8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -0,0 +1,1226 @@ +"""vjsk_file_cheak用テストフィクスチャoverride""" + + +import os + +import boto3 +import pytest + + +@pytest.fixture +def s3_client(): + conn = boto3.client('s3') + yield conn + + +@pytest.fixture +def bucket_name(): + return os.environ["JSKULT_DATA_BUCKET"] + + +@pytest.fixture +def receive_folder(): + return os.environ["JSKULT_DATA_FOLDER_RECV"] + + +@pytest.fixture +def init_check_received_files_ok1(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ok2(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng01(s3_client, bucket_name, receive_folder): + # setup + + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng02(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng03(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng04(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng05(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng06(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng07(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng08(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng09(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng10(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng11(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng12(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng13(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng14(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng15(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index e9f88d81..44ece095 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -1,6 +1,267 @@ -def test1(): - pass +import pytest + +from src.batch.common.batch_context import BatchContext +from src.batch.vjsk.vjsk_importer import _check_received_files +from src.error.exceptions import BatchOperationException -def test2(): - pass +def test_check_received_files_ok1(init_check_received_files_ok1): + """ + 観点 + 正常系 : 卸在庫データ取込対象日 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + received_s3_files = _check_received_files() + + assert received_s3_files is not None + + +def test_check_received_files_ok2(init_check_received_files_ok2): + """ + 観点 + 正常系 : 卸在庫データ取込対象日以外 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = False + + received_s3_files = _check_received_files() + # with pytest.raises(BatchOperationException): + # received_s3_files = _check_received_files() + + assert received_s3_files is not None + + +def test_check_received_files_ng01(init_check_received_files_ng01): + """ + 観点 + 異常系 : 卸在庫データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸在庫データファイルがありません") > 0 + + +def test_check_received_files_ng02(init_check_received_files_ng02): + """ + 観点 + 異常系 : 卸販売データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸販売データファイルがありません") > 0 + + +def test_check_received_files_ng03(init_check_received_files_ng03): + """ + 観点 + 異常系 : 卸組織変換マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸組織変換マスタファイルがありません") > 0 + + +def test_check_received_files_ng04(init_check_received_files_ng04): + """ + 観点 + 異常系 : 施設統合マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("施設統合マスタファイルがありません") > 0 + + +def test_check_received_files_ng05(init_check_received_files_ng05): + """ + 観点 + 異常系 : 卸マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸マスタファイルがありません") > 0 + + +def test_check_received_files_ng06(init_check_received_files_ng06): + """ + 観点 + 異常系 : 卸ホールディングスマスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸ホールディングスマスタファイルがありません") > 0 + + +def test_check_received_files_ng07(init_check_received_files_ng07): + """異常系 : 施設マスタファイルが欠落""" + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("施設マスタファイルがありません") > 0 + + +def test_check_received_files_ng08(init_check_received_files_ng08): + """ + 観点 + 異常系 : メーカー卸組織展開表ファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("メーカー卸組織展開表ファイルがありません") > 0 + + +def test_check_received_files_ng09(init_check_received_files_ng09): + """異常系 : 取引区分マスタファイルが欠落""" + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("取引区分マスタファイルがありません") > 0 + + +def test_check_received_files_ng10(init_check_received_files_ng10): + """ + 観点 + 異常系 : 製品マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製品マスタファイルがありません") > 0 + + +def test_check_received_files_ng11(init_check_received_files_ng11): + """ + 観点 + 異常系 : 製品価格マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製品価格マスタファイルがありません") > 0 + + +def test_check_received_files_ng12(init_check_received_files_ng12): + """ + 観点 + 異常系 : 卸得意先情報マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸得意先情報マスタファイルがありません") > 0 + + +def test_check_received_files_ng13(init_check_received_files_ng13): + """ + 観点 + 異常系 : MDBコード変換マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("MDBコード変換マスタファイルがありません") > 0 + + +def test_check_received_files_ng14(init_check_received_files_ng14): + """ + 観点 + 異常系 : 生物由来データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("生物由来データファイルがありません") > 0 + + +def test_check_received_files_ng15(init_check_received_files_ng15): + """ + 観点 + 異常系 : 製造ロット番号マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製造ロット番号マスタファイルがありません") > 0 diff --git a/ecs/jskult-batch-daily/tests/conftest.py b/ecs/jskult-batch-daily/tests/conftest.py index a03a8638..d2afff68 100644 --- a/ecs/jskult-batch-daily/tests/conftest.py +++ b/ecs/jskult-batch-daily/tests/conftest.py @@ -1,11 +1 @@ """共通テストフィクスチャ""" - -import pytest - -from src.db.database import Database - - -@pytest.fixture -def database() -> Database: - """データベース接続モジュールを作成""" - return Database.get_instance() From 4af4a6ca60c99c983e35c39c69a67ba1084ab0ac Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 17 May 2023 11:46:15 +0900 Subject: [PATCH 020/103] =?UTF-8?q?=E3=83=A1=E3=83=A2=E6=9B=B8=E3=81=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/batch/vjsk/vjsk_file_check/conftest.py | 1 + .../tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index c341b2e8..cfab500f 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -23,6 +23,7 @@ def receive_folder(): return os.environ["JSKULT_DATA_FOLDER_RECV"] +# TODO 共通fixtureにして15個固定でput/delete、各個別fixtureで15個から引き算でdeleteする @pytest.fixture def init_check_received_files_ok1(s3_client, bucket_name, receive_folder): # setup diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index 44ece095..43062be1 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -19,6 +19,10 @@ def test_check_received_files_ok1(init_check_received_files_ok1): assert received_s3_files is not None + # target_path = path.dirname(__file__) + # target_file = "xxxxxxxxxxxx000000000000.gz" + # s3_client.upload_file() + def test_check_received_files_ok2(init_check_received_files_ok2): """ From 7a7d5597407b24d946be9183dce687311963ef67 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 17 May 2023 12:03:51 +0900 Subject: [PATCH 021/103] =?UTF-8?q?=E9=96=93=E9=81=95=E3=81=A3=E3=81=A6?= =?UTF-8?q?=E6=B6=88=E3=81=97=E3=81=A6=E3=81=9F=E3=81=AE=E3=81=A7=E6=88=BB?= =?UTF-8?q?=E3=81=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/tests/conftest.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/ecs/jskult-batch-daily/tests/conftest.py b/ecs/jskult-batch-daily/tests/conftest.py index d2afff68..a03a8638 100644 --- a/ecs/jskult-batch-daily/tests/conftest.py +++ b/ecs/jskult-batch-daily/tests/conftest.py @@ -1 +1,11 @@ """共通テストフィクスチャ""" + +import pytest + +from src.db.database import Database + + +@pytest.fixture +def database() -> Database: + """データベース接続モジュールを作成""" + return Database.get_instance() From c1c9fd68b53178b31a52bc56adf0d807925ea1e6 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 17 May 2023 19:24:45 +0900 Subject: [PATCH 022/103] =?UTF-8?q?load=E5=87=A6=E7=90=86=E3=81=AE?= =?UTF-8?q?=E3=82=AB=E3=83=90=E3=83=AC=E3=83=83=E3=82=B8=E7=A2=BA=E8=AA=8D?= =?UTF-8?q?=E5=88=86=E3=82=92=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_file_check/conftest.py | 2 +- .../tests/batch/vjsk/vjsk_load/conftest.py | 41 +++++ .../batch/vjsk/vjsk_load/test_vjsk_load.py | 146 +++++++++++++++++- .../testdata/bio_slip_data_202304270000.gz | Bin 0 -> 2868 bytes .../batch/vjsk/vjsk_load/testdata/dummy.gz | Bin 0 -> 107 bytes .../testdata/fcl_mst_202304270000.gz | Bin 0 -> 2009 bytes .../testdata/hld_mst_202304270000.gz | Bin 0 -> 500 bytes .../testdata/lot_num_mst_202304270000.gz | Bin 0 -> 303 bytes .../testdata/mdb_conv_mst_202304270000.gz | Bin 0 -> 426 bytes .../testdata/mkr_org_horizon_202304270000.gz | Bin 0 -> 729 bytes .../testdata/org_cnv_mst_202304270000.gz | Bin 0 -> 402 bytes .../testdata/phm_prd_mst_202304270000.gz | Bin 0 -> 1134 bytes .../testdata/phm_price_mst_202304270000.gz | Bin 0 -> 413 bytes .../testdata/slip_data_202304270000.gz | Bin 0 -> 3134 bytes .../testdata/stock_slip_data_202304270000.gz | Bin 0 -> 899 bytes .../testdata/tran_kbn_mst_202304270000.gz | Bin 0 -> 419 bytes .../testdata/vop_hco_merge_202304270000.gz | Bin 0 -> 286 bytes .../testdata/whs_customer_mst_202304270000.gz | Bin 0 -> 1307 bytes .../testdata/whs_mst_202304270000.gz | Bin 0 -> 499 bytes 19 files changed, 184 insertions(+), 5 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index cfab500f..0778241d 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -1,4 +1,4 @@ -"""vjsk_file_cheak用テストフィクスチャoverride""" +"""vjsk_file_check用テストフィクスチャoverride""" import os diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py new file mode 100644 index 00000000..ea29eb63 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py @@ -0,0 +1,41 @@ +"""vjsk_load用テストフィクスチャoverride""" +import os + +import boto3 +import pytest + +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper + + +@pytest.fixture +def s3_client(): + conn = boto3.client('s3') + yield conn + + +@pytest.fixture +def bucket_name(): + return os.environ["JSKULT_DATA_BUCKET"] + + +@pytest.fixture +def receive_folder(): + return os.environ["JSKULT_DATA_FOLDER_RECV"] + + +@pytest.fixture +def mapper(): + return VjskReceiveFileMapper() + +# @pytest.fixture +# def init_Load_ok(s3_client, bucket_name, receive_folder): +# # setup + +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + +# # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index e9f88d81..43afd62c 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1,6 +1,144 @@ -def test1(): - pass +from os import path + +import pytest + +from src.batch.common.batch_context import BatchContext +# from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager +from src.batch.vjsk.vjsk_importer import (_check_received_files, + _import_file_to_db) +from src.db.database import Database -def test2(): - pass +class TestImportFileToDb: + db: Database + batch_context: BatchContext + test_file_path: str + + @pytest.fixture(autouse=True, scope='function') + def pre_test(self, database: Database): + """テスト実行前後処理""" + # setup + self.test_file_path = path.join(path.dirname(__file__), "testdata") + + self.batch_context = BatchContext.get_instance() + + self.db = database + self.db.connect() + # self.db.begin() + + # testing + yield + + # teardown + # self.db.rollback() + self.db.disconnect() + + def test_import_file_to_db_ok(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 正常系 : すべての受領データをデータベースに登録できる + 期待値 + 例外が発生しない + """ + # setup + self.batch_context.is_vjsk_stock_import_day = True + + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz", + "dummy.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") + + # assertion + received_s3_files = _check_received_files() + _import_file_to_db(received_s3_files) + + # self.db.connect() + + # # 検証 (卸在庫データファイル) + # table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) + # table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) + # result = self.db.execute(f"select * from {table_name_org}") + # assert result.rowcount == 10 + # result = self.db.execute(f"select * from {table_name_src}") + # assert result.rowcount == 10 + + # # 検証 (卸販売データ) + # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + # result = self.db.execute(f"select * from {table_name_org}") + # assert result.rowcount == 10 + # result = self.db.execute(f"select * from {table_name_src}") + # assert result.rowcount == 10 + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + # def test_load_stock_slip_data_ok(self, mapper): + # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + + # # setup + # self.batch_context.is_vjsk_stock_import_day = True + # self.db.execute(f"truncate table {table_name_src}") + + # # assertion (insert) + # target_dict = { + # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304280000.tsv") + # } + # VjskDataLoadManager.load(target_dict) + + # result = self.db.execute(f"select * from {table_name_org}") + # assert result.rowcount == 4 + # result = self.db.execute(f"select * from {table_name_src}") + # assert result.rowcount == 4 + + # # assertion (update) + # target_dict = { + # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304290000.tsv") + # } + # VjskDataLoadManager.load(target_dict) + + # result_org = self.db.execute(f"select * from {table_name_org}") + # assert result_org.rowcount == 4 + # result_src1 = self.db.execute(f"select * from {table_name_src}") + # assert result_src1.rowcount == 6 + + # # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..36d1af4593f1b83a34e64c586b11a47eb6cae724 GIT binary patch literal 2868 zcmV-43(NE$iwFn=tX*UT0AguxUvq3}a9?C$bYWjIFfubRG%`0ZFfcGKbYXG;?VDLs z6G0GwebryF<(m(&X7*m+#H+k5Ewy|~q2di%Nxa@9@iv~Ih=TVm9z?}RlJ&nBHZf28 z7wqZTD+z?GiGa!!gr44+*-dxPx4pB&?t@38@goP1MfY|lI-?>HB_fL&fwD9aKe3=K zg-}8?OCsrf6Z5~T~MEQ_iv6DotAilAE{kOebPj>i+7u?1vC&NT;%o%_Bx*0l%S zzjz^vClADVf#)o?YF0qbFmB zYcz3Bb3E3$KiYFNI9yM+Q|sx8A6~uZNVOum4>X;7_r@IEk)w%d&+%^RR6Dy9wEBqX z9}%ZKB01veZuVW`okzO(4B69}fI7~YeFs6=)9qHf_a~g|K6;W31`_*r&Inznn2}lE z6KEA~BiBBBm@3nrV3yvDHUCg`yTd(hJFgLMz|6u`U>i?>V_e`JU9&|`K*e{PZ}9^O z?whVy%<BKAy{=R?U&@m0WSf|bIu6@yfwVaENbsh6O?Q48*`^q|x zvy0nzWIy#q(HA9Olzmb0Mb#HIU(|ik2%LfhWEzmgfGh=MIUp+mSq;cqK-L3t;JhL` z@8LtIk3uA|kU1RRwEJ+C2fFqI=2$%8`r}|v93svUj>lt*7cE{XE25zikTbZC=ICVcYDr3twzA1v2|Ad=&}V zxJIBmgvp9s_-+?I*@bcF%>?rnbV4~}7shOJ&^FK8<~0}$+F;8u9QX>JEom3B&iM;@ z!8XsZ!-X82RL~Sj2vs_k@92O#l3OLL)DA(gOsxd7f+gXTF!$Hk6Ewa_q2gK)n_}#s zolzj3=vW=3)(i~;v|?-M-d=OVfUd=T4L3Rn^^Lie8yxb?h6AjF-yQ3=t&eo9-5gyP z-MDJ&iuFGz7suZWZ^)xe)z3ey{{zy20q~Woz z!`A0(*42Sh{|9S)05muXFKliXlTV7Nx5d=$VrrzAN)?k|N{`-H`Rfz;d*x?lZJekY zT!3Q0A1c8$DtMJvI#(V{mOkB{7$1i|*8Qi&-o9e*`C{*tVzLhgDxNu08hBnB8mU~# zm)}1w51gwEK7o$R+2&ol@E#-J8oD{pBZ>)y5pb(p^@9LDp`*|sK=_C_`Y`*tlthTG zh(h`K1MB%kFiCkR?HDGTbcFOczqrbodv^ZjkBhhL{4M;v8Jr$dg}zYw^ve2p*1Gge z5Wu?K`yuq#F5ke-P5B>*8I%1wArbr`ZT{sUQ#HIOMTi_B5?tRhSzLybJbpT!=<2S* z?i$L9q-mg$iD*bfl>^w7B#|ls?6OGJoM~*Hi?E4X_-vbL=7_==Gmd#1e1YbA2Ms#7Q3~nXgwpWn#P~=hRp3Sf zLQmu_1Fh|R9|tpj!cTwotnp`rUHItD388%N{Y3T(aN3&=f&dd@?RJ4~S+%K()Z$-3 z>K#A_5q^kDWC%FTFqB||tb#GrYEnyb2v`_oYFO}_Z!acmxrpn=0=oA$4;6Q`H1l@h z3(wAA*lJB)K<$ui4j?GZKK$v$o9uPqgBDQ)ZM86cS+m+$5g;?pG14NZgBE5Q-^ft$ zY}%01jvvBtx}oV|sNc6fbpBHgfT3L<_!{np*`u_Ml&0Y*y~C%pY%sa};<}Z2Tpk&j z$e#IqM7zP`=h3<@$q-;LqsioXbmi^cO7aEIp!r2uqod_(SDK@AZ3gYN@iOT8$$1G5 zPl{!o0+QG**fGXXnhTIYmj+({yz~M1Tplzly?v!GLoTB4;Ag*`za5+~p%875NfAxS zU%}{g9gH9nG2olSD7pqK89t*GQBrl|&t=o#x}^JDbk*WqW%DjUIlOmAT!!-*6hz#7qXLUeemqe?8qf*p#%lZ%e?ljY@uZk@7kz#=7z+X(`%oVtsQik!&Ejf>5`oZGCtt0o zjOWVz4}rDT^=GBb6Uc(?{D8Mq(e6Z;{soX-5BV89Ov59Cs2ZvQ?o>1?2O+Czps9i^ zB_U?M)Ho6DLqA`_J81#s{+em;H1!^U&f(1e21D~F^q2#tUhRe_s?Vf)jV^)XjTJ+{uhtz5WTJ~LA8 zABQWm-Va&V9^&FfpPj#33vWt7!CRU1KYz2_Re|D)Auk0_#OC=$|JH}oLAbzK*OTkO~&aGCSU z312NFUBC2t@1ERA`;|_$2Ml47<*%u{*AYmB;o!ajJk4->_TRMIE`@jg8^mC|OxmLURiX!L zs!a#$XLl-h`rH-no1D}&69#V}AN}c2_`s(_a6cZMVn3ezZ!)^|ZP1~~FmqUNw&~6^ zou}Ox&Hd1pjnSd4Khs=xd@zT&EF+h-c`nZZF1NTz`fq?TrJBNahRLF+QMRo^(Ihzt zWtlB{{E@^N1Hs*Bo92E+c^G?UptHLjeiTo0!{4`CzfTtrpW^|-b|AR_LNO-D5gLJ4 z&^T6>p9zNJk8U*s1^c10AFc-d{}8_4PTsmY!Dbs2+KBKff1rlDBxHh(UTb55-GZ;6 z`&@AsT#DndIRtzKfv<-FJ_{0mONn8CJdwW)dyHGyPr`%D=R8E0LNyisUl<_4PX6vi z>Ct0{^Y;std-pl{Vdq@=`6cVwOK1StZ*Zu5ezf%Et9K)Cmrp)`r|a(p-2LdDp3ewGX9H`Nnka<;OX0$$w)$i zmRc_C3X|KNp1$Y4Z*#iuLgr#Nmk~_p%n&xiIF`#^J@6X`zfPR+50Z1AV6sA3U7Baxyb;353*1E^{ge zvVY)|BKj^|%Jw(ufs1&YAIQ*m_DrrtM2i>C52`cIWzT0W4_@laoIg2$>FJBP%!yN{ z28op0quJ$4*_?Xg^5E$f)jYcIZ0@?EHCPdoMnQbx4)E!~sJ%&F)PRk_sGzQ#PlKAYr z84gD7Hl{1}l@EgTY!5qjg2@bY1GHLRAxsvB0<<zumqcx)14z&jj)QBH4J; z#C{XZQa>)a1Z^m+mQx*Za?R325(r3LK4Lr(-GYRb?x>h!387nt<`cV~!`56uOk$=*>i4)^c~! z+7`XKhPSzC>l)XL;|ZkFy3!?Lz@U?uG5uNC%uXA^Ff4A<;exX+u4X%?p~Jb-yw^@JdP+m5%qnEvo8LY)40G_DVr%9r7wd>ik0mM1B4}a@_W-uaGX1 zaU$~jGLCE2^Bfq_b|)L0B9rj_cu17eP2nWnIWN#}?z zrQAvD4ikoD8j1EyFjKTE9cdNGwY{JRPu<47kziw~zO?D*S5;+ijIN;jsFhcO#nr~l z@Xp)|oS?F%s>?r9*0->krsFD9)K1E~Bh4z0O{#k$y{^!4U+CyjY>k3pO(oWLa4M5R zp18j6|G?A{1-Px&!<{v*Gb^Pu1S=}WWhyE?pRO8i?;MHxkHZuM{jw8VZ_nty) z@Rs_*Bxq`CCZaSKHwnXWztw8#BNv`mv7d@|P0G1<2Tz+hnw?-T6)bCC5(TD7AeeT+ z!el5L+;JQ_N+@3t9SsMo3ESp^8T&U}eSTzTZX#;3zRFIj_`)yVj$4%WMKZ(#3YSe4 z(XBSB73p)KHwAm`Ze6pfo?t33Tih|3VS~jt zwaSC2)=eg=-0WVRkDGJ~CCg0`_zQj342?VZBv^N9dSVcud*Wca4uf(l>U&t4?AO`) zmSOA2^p4T1+JfiK>@qdb1=T&1-VtqXz5Ki}^gb@Hk!)Ke_kHR+pQt_KM<$@{&P;zZ^I9OcXDc(a}jg^dNe2t-Sd))?QER~#t z^RB9)8s81IL{J}tP}K!~py??8#}gQ(Ib^uSP(in&#p3<}GYkEI&P?Xe;aoU-H3j%* z4Yw^DotxG3e>OjV>H137ny)Hcf7cajg03|a?HOVhzg;(`yqaZc7#_5I5qh6Rt-V9EBfG!0A-x rV_=~7kBu3Qa2$CRQuUtcZHeQ+|M7qPAOFYyksRLv3aT>602}}ShH~>@ literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..efedd921426c1148d8eee36540901f014c0e1cc0 GIT binary patch literal 500 zcmVgiwFoqtX*UT0BCGvUu|=AUotQjGz?i?+QbkJ`-ZXd z>#rprSO=G}7|B#%zyv7fpmUnadGL<`1i(2DV4dANMU(`HQXEo35azlcQQdw5!5%V` z>G?E(y?oaJ=KJHTH5bKG(2H9~EjjCoWUL3tNWTpevAWAax*in2RVw0C5Bo{H9>j7Y z#_?2-rv09fsT9L*qCZ_Ea@{bStU8ZdNdMVL7m91ScvJa}%0E^9sPZe7-KqRVWzQ;m zUtTW&wTmW|J*fOiWv_p=s{B@E_qy9`SuQP}=QbeyIzjjh;0(5_D=F3wE(!FN@i-Tw1kYs|D9uu+@UMEX2h^yewwRa%t5~i))t0k zbluRpHblo@1cSj`;N`iz1`!M>{k+=TY;a7+R#Iw; zgo<#8YK>}*dd0ZZ={XGFZsXl!PQw|Ep{i7)CJkZMSZYc&I-{vsm1?StreRyEG2>hf z`qHEVF5VlP(WJXqs;M&?29;`>Ni!cfBUKb?2pJ8d{#hdy78Vx%8=o133Tprg001U# Bhw1c3*9CbYC(sGBYqVGB+?VFfcB3VR8WNl3Pl{ zP!NXms;ek@ugvXSH*>I!V6Ycy(t>OA!6mr`=k%_&L5dWS{BX$3obb(0@=vQ#8qa5| zG;P{MKny|-RCjH&DtRGcH zFQ2Pi+?Lh>QE(mrF_^$;^cM91u%HG_a;=AQ2rS^Pr6KST!T>oM8!?=kZTSq~5s|DB zZz%)Te8r;>9Qf*C&Ce`!Lxdr^0lEk_8v4YSsAHCMeBALR1oxc}Cs=%Hw&!D}i5K+N z?ESMP(zO@*=P2PEAGdtA;^7DWGpm!&4A!v|@`m^Rl`XpE68|O=L^kL6xZ^w4m%98P zSlxWj&Kgp_Kgv(g|IQcXN5knOyK{Wp@{w(jPX7m1Ctn~~1S5jhANxlExy(OtBr0!< UUcGwt>h*v10l?2|!vG8b0P#W2IRF3v literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..835d236007b3aa749ac63f3d9bf9ef9681cf26f4 GIT binary patch literal 729 zcmV;~0w(<*iwFoetX*UT0Bvh>UvF|}UubV~X?kyNUotQZe$^pE)mmp?sM}$KnVXEROs?X=IDIxoL(NmN2(5<-?TI z!WCb^b9yWsb-v(unaO!>`q2KMrRRT6yPV8e>CAm=DV<3?Os6aa5C$qT0IfS&JEx4H z00h7=3?6Zm(GBc!u>uh(d@sL;^%2#395SVgX_isv%T4R0XJtPz#~Pp(a30 zgn9^d4s`+QA~Zs1aA*k75MeZgQ4XU5jEc|yxa*dp9_g+-`mru#7JyP82Up|w3rW^rYM$khO zsDMpSlNLl6epAq#2kJb8a}dTbh-oI%IM|2gpb^th3`f7q2*fPNq_EQ~zwve(e(9vs zu69q?#jyNz#n5CEuXkAPIiE?T=pVKGdev_}_RmYwPQ^|t4x=s^NN4FeI3j!dVrV5C zE1cA5(|0g7+D_GVp1ApaH-GFp$G@HVC^#`)D7@w;H-GNt+pgod&T~>|`7iU5&y-Ip zCL=EjELHqT=GSVSi-H(w$!E$Zl@X}2p2m^B#PERl&4#y8`{f&iKH!63N%c?asv!+q z6N76E>8MW!XyQQvz0yZ=u_MXVSpi`OdoO!-a82xLD=wl_)ffHXGf>#BIIlFyWnH6w~FS+Zow`p@bs LCB`z%03HAUu3ciK literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..89eb4a65c860810433383f7aec3b85d0c884d501 GIT binary patch literal 402 zcmV;D0d4*tiwFoatX*UT0B>?dERpN z)P{Uvvrac}r@UmG0`ehrT#lHq&;DD+|)V}rP=_b5cfu>xDx9z#)G578HhAG>6CTS3_-?8Dub{HI%;&W w>2V-6a75>DBy1Ukzd@%u681Qd890*Xa6}BMMvWRZYScya39mPJQ~(SB0D^V7aR2}S literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..169f6761a19b359dec7013fb730d8675dbeffc6d GIT binary patch literal 1134 zcmV-!1d;n6iwFoWtX*UT0B~q+UvP3{Uu|=AUotQh=`FRcs%&hsNhW@l)oQPXAgYlLGNlrcs@iT`*I#^IGXJRJ}2$Kh4-`Sgts zCvFpB($+r0>9+iOFoNGkgBjGyqf69(69o^W;C$4-69rGv14B$;t-lq|=fGz+_x zNs&3!kp@g8&zYTOEKRe7Svk|l35Gf+O`SAz2xB<`I&*}{u>V_3HWgEt%mpxU$d}2O z64UEAycUPY;=ytpUWy0L02%po`O*8Ar#?Qt8;57&@H+A%;8xmBSglae;F#l3i)Q80 zpGCpRD0mwM52F5EV2}g=`7)y9a?!(RavM&l3?@QPAY}+q#+FPOa?m-mF`@}ZbBd@86~%N!HFWI!)FTBeGolG+Af5R@cu<7JR=Z3(@{CSqH)VLd~+7WV17B zmgVTM3oy=19TzwPx};y;i3e{1F@bzPEFaZfQxT;*O8G6CsjzL`IC0q{5HA2O`hruk zrh0W_ert)8NxodmNIj98U$;+VW(;(c43)t!n+bZdtVUBQ;KM}Oe0|hfsE=~^ENj2y zLvtw(gLnY7ctRHGxp9cLi3rCCGMiIN=8}9i9wc4wtgO%*HQPkPLVa7QQmvPXyoERS z9T&8({TQ2k_qLfp!-z?0Y#wCUg@_5pvO{7gvE~NLdRq@(%FTUIa=ZF{Zq|s~IaoFB z>HcyQ+?NENf~(|IE7XhCV!cqPB;3TeT+q7s=b3iiwFoRtX*UT0B~q+UvP41V`X1$b97%aFfubRG%`0ZFfcGKbYXG;?NUo_ zgD?!L^M6*`8bK@_B* zlY&Xn#E4dqm1;_bx-Np$S;eqs*f0zfgug1|j?zLtRU50&(966PnzUl9)Yn*{p_O?j z^gky>=yOShn-wCHx1}Pr1(>$-SA{y2T&qmqJC&0P4ZX?oGx?QjMS`%xKSipuwcQ01)Yh`hg%2vrqg;^Ye0hc6`RdwR662!sl!G)6P8dwQAq`t|oszZoYkcV3CLwIo|&x~3bNsXH2MuO<_H zO*PvzrfH7jG-kMxG#@4xb zdutzefpaf$GxqJ*iCAk}D3Q2&u{fhQeo1At0)nAbZ+Ev!CF3nUv960fnCy#vooMU% zs=F=rZM>@_CjmKBt)L5 zh_|=LI+N|;ld%ps+tSt+FU`Bsn@E=CCEG8Htc&d(c)_vnyW2upd!jw8y zT&}A}mJ13|0e-WT1$P`6RK7zZDM5?%K|+kPuw*NIywJem`k0RpCWFD^*+mIaqYXx# z@U+2Q>Ly= zLz$*BEoIuubd>2TGaBkb3)w7W>ml0+*=ESLLbe^UosjK@>`=ct@3-rl?|T6b?oC~Z z4=;8V0oUHjlfsu1NeN?TPXe4#7*`YVb1$EJ6Bt|L{}{6wJ8wp4)0k^Gh7k#Epeu&P z48zu~C}T|H>4+J%4NK!T-*osHaEdlthjislH0fuTn*H2}pL^)%Qhsj0&#wE~te@-m zb3ggnWk0*==O+B@-+p!t8olriR+5&kUHITj_JyCj;b${YA6vN=s=NYw+4^0~dG6;P z@Fs*}xt42{RL3sIpcu46wG0D_;K>I%mV5}AkwZ5C8Bm2i?j*YEqyU%_0g7NV^eO@y z8wpGSJ+BNKwkz=lQ$mi3S^x{p90yydcDm3)2dCGvxOA`tqzI=tFNZ_dIri_t?~C_7 zeLwbI?7~~0yzxFgdeJFa3>%<4JsKL#z+^D{Oco5g_vHD``VHwYaHM%*%9>{DDENx#~F7-9rfsW0nVOoxDM@`-3 z*ToUc&iUC@9LF@c3t>vWLO_;*p24#@s3Jzcg1cnrOWh^A5V#9;3>V8U`M2@H>~Mbh zr=3iO?$R5ZDoi}uyZ`HUDn&Uv>+{~sLSb?cGHIZi;m%paOHbxk*7o}Eh>Gh2`RTdB zM8@B|1AqSx-S#(cho>0L4nMo7=12A!&5uZa`Kh;f-5Z|4$;;Q(gs)d7d^b$^y6L(N!ml~u^X3TbV#1e@HwgbEEuRkI8%W&^!hc~2-whGIm}H9a zb&FY=8KQjM)lEk~)0_|9r7^f8*q?WWdt1x|L@RKm48LSem(^LbPUOvi8;I;!qk zj$Q}OzYaBVf6veTfUV@9LM29$d~drjvzb#=HTw?~DbCr1KWYGS;MxL8Vaz!Kf6%PO z2`8@$=8-dX!*`xs`q9?{C zn^Yl!cs?VX_bXbF-1C#Oz{R^OGu~*wH$TZQFqNXa2{Hphw_PN3Cqe}Z(`hn*(7nYe zFSm@GehdF5PfX}IljJlKQ%Eto;W)Y#euy_YdT0A7zFJ0yE-Ev+=7ty@3Pm;3Z7_N* z8J$C4@)R#!Q9<5d^pmxII*iWqnA2eNv&ZOeh|p;Uz2J}N6qYd=kotKh{1!YQYPqbO z&^3V0arTMBJMhz^wk?A4j;+@tEOQ+v?ifzopRw8o_|ZXx4iCDnWgFm;kWI@-TwHFV zH>}W-9@JN889B;U9Fmd9s*?iTYCh2IrhlkoWKyy9o)m;trH6ccNRHAc_zrSyJ22`Y6nD`Nvlx9`gXCeHMj=&~udjKX z+#u|++%&m+xJB^@nH!=qNIv6olB~egV$IAxx@bt0)ky(#Ric4IAVmXjAkf%-PE@%xABya%;;`JP zpL+x@V_LFO1kBIniiZ}oICB3W4WyuIz+m#R@)|nqX7FeO#V8Zi0z+AjF5hgeK>x%s z#W|ZLFF0{d42CJkXx5vZgLwwPlkp;nxH>67r$m74f$<_S9FnxSA(1CjC29pWGn<8hOX+-hq#@ZDF;)Lq^i~++?@j9fRGRO8+dc-J%3E3SwTMZ(s)u~Yhw5Aobtr5 za6~Q3wRI3DOE;t_r)AJ+jVIOeZmz^Z0yUsA_*C*ts>FJv+S2>FSD6}aaVZ>F17r8k zEg*BaJ8vlkx^loNZ{`m!D(Ojpi^jK^4{$_)Ih`g0G=^R}!)j#jr!`QTU>$<{$>KSz zrXT1{FaaZlsfC^O5fGbT?%|Ys_wxw*_^l6xV2By4j?k_aK01~jmB%wJHS`+7WB|%} zDJ~W~#zZx@!eRuXCkET+#WMOYtQ4bxQIEw4)c+Zaj{Dgys78ZY@oSe?V0fj6u44Fy zKVq7CxDm-oH{gQg=N0`jwEd7850>Pt*=RNnjfsR_NJyO3Nda{=FC=IXik2f1aBfPw zrpuAzq70Q!A7tp{bX%uesIj2n_eIqOwa|Tnm57Noyv#Lkd9^S%x3iwA`UQn>KBXz} zdcxw~!ysa&Rm8REGosuu+A7i|H9^I-jFb&P{oI(bp;R zOH_k})+x+%3|*_RPBB2U9kV==CDjAp@xW17r!OAal^`H{D6P@G@y#;$D_?Z*ajY;- z4_O!pS;Y(4P#-pug-zZzgO42DBYosp9Lth8tCIriYGPSp5W=eyi8q=uQA6>l8fB`vdgWqcAoC5xcMufI5_C(m*xyky2l(2JnD7 z?2gRtjt;|{aH$IuxW9LDb&zkl<*`&7N~GazFDs6r3|3`jFhhC(4T5gg;p?VD9qs?B z43;D(vsf7d@N4;w%@508y6m6%hO8;Umpg z;Jul|j*Ch{9FiXLq}XpJW555r-)s)Tc6HYa8udMI%MX2z5ypvNE&;m~2K%`Q7@>r? zuA3v)(s%gNz;Ig$u?#xE^@b zt${+&-ApQAz<7HNzgp{+?Et#j?EhbFH=9XozZn{DAKUGEt+p4`cUrxw8-6Qkx!tMN zyn0w`+H&uWf#0avfm!wOO=h5O*P*i2>=&CmA-otLYae*6F4or$u}dL(osPHN*rAEb z5|<~gNL-n?PU5yaIOE??2G?OC#r|C#(bx*5TTHVsod*h#Ibz7@vI{KiapISZD>N)u25|iZ>c!X|1 zYzPNef*CMD;cnxC6Nd)hMn}gM@HRmiW?|tJ^Slbbjn&H9LS_BR(wevGm6xx=z25>C zp2HB^#Wltxsx+5=JtM+&>mPoMFA%;!MV^sGK^G-rtz4_DFPAMGCJ_#E(r~C<5J^u7 z)zNp;>-#!-r`55JK5F$?t4BKeoTNe>Y4ujCH$obm6jBA%;xt zK8OcFxoJ?bfd0~#ZAOhCCOatXjl*XG%I;B=jst3hZDBYlvqxDCnLv#oCOas6))6xS z#l`qDYJ_cJI4CYAGowZjlO2>qP;wTaI4zn3m+XU5ug!Y@)hzrEz&9zmV~@K!`erhB zTq1DlMRT=!0*%Il1i`_#@zandD`zA0kkz<4yq- zEwMpR61E1G{(e#qO-hd7YDcfh)?M)N8hwH0cI=wZM_2w84ung?iG!Z;%`_PS*@2)| Zr}+PXt?|0HD!Db zFo%8>zg61--L9NL*6j|e=&C0M_cJ)j;3R_w8JuMB9s~UhBpFCDFvviXfjx%sGX%*H zBtrxlf@FvuL;4w#WJr=BgA7SBq+{?0x9fBMUMzl94$^5@IAYgTa7ts{UkjyF-SnPNFem#*F!! Nd;xuB%U}Qu004LWxEBBb literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..58fbc63dc9c6a6172a51e020e1c64a029b77860f GIT binary patch literal 286 zcmV+(0pb21iwFoAtX*UT0CsP1Uua`*Uu|V_XJub9FfubRG%`0ZFfcG*E_7jX0PWI2 zYQr!P1yF69Qv@HNjHIz;okp0(w1u`dwrl7ay6r`}Z5O>vAJ(k%t}D#u_b=Q1|MYKN0k?F%1%8`e9 zKSw!vDgS>y*I(b#owWgNqS}DAplwuJ&2Ca$jw8bYC(sGBYqVGB+?VFfcB3VR8WN znB8*HKorNh)u&**(hHpZ$OrMx(FbtG2Vm58)KSp^)ETb|ZMA5pMJd!OSU=kFV``_g z+R`>j-$h75ul5x@*(C|I3IS#uFSUnbmGRtbmsoO@l@hoI+GxX zP$Ur*K|7yGKZ=ar5JpH<)d&);`x{QmvJ%0PL==fiR8e6*l@uisMUj8dVDHyb5%f>) z^&d92e{hCB17l}S@lN{Tbzyiq(+uSTOV~d$j!HmMF`!TC9`>?cQ{g`tqv+(1lQE1l7&4a2{ z&pB_NIumQo!XDFKF@34;je9O=8t&>Y$k=Db4l{P))=_Sl%$(yVbiT|@7~u=giuFc` z2EUKXDuO6<9jeKSq=5@H&$~2+q!^Z>7+;X-1uXq(>=+?g3PFhQ1ICxAgk>4aU;+}vgZ!w`#U8ROLKvl^UN@~S>WT<_T*q% zxd%eADhNUt=ggKkydu-feb+V=!tNEj0F8B6Ya3jgkTy7qsY2;!;Nqdd5e|+JOpky& z%odybSrLI&F>CEqLrP{pa8uCf91)=y1v~JgSqY<~_VOFM@Xyj>6DC?bhJB5ceT{?)b+k~^aA;7(1w|wx zG_qE+=06Pq=ZIfCS*eTM9)^njdevTB^~(=0XD#%--OyAZzzs3aM?y4kYa0lJP8$e3 z$O=ZI0Z|75BKg{9SMrBT1<*TZ#dHghPwTC32UdQiRbI1dkRi61zQOdB5H2af_RcZ0 z2omUT7hJ-fvB`Pmz*l^{e)Y=bUh%X1(c0P&r1+;Oa#UY5!;&%=cxsyI5IFES^#g}` zfrH|ThDYIq5e?1BRX7R_u5zrna4O3%0@(UGA$*P!xv1)wiM^_bm9Ng~J1F=(Z7X(E zfrY;77=q9wj0P)}y9z-Hl^_zZFjghMpiosZlFDH;SV0k}Tp@2)*5K7^xJ2aphHp{l zF*PM#y~3s-U+g%Vf{33XUb;d9`jgm2DT-0;xZ1h@aTzU&aG2MvKS5XW=X zr;y&2oy|w7FSDpARJ3X7rWL%o0YysnAXV^CZ0*Hr4=REm#Xij1YEQm`n{0@sjaY*Q z5Bal6W@l%1f9y=Qa+Kt$O)$bL(r_9HE!ni9{Gt#bluoA=BsKX}F(3sr4I7%OX|5kY z2SZsvN(?Gfd!?kHSkC%~dVlJtD#Y!AH8VArqUBX$6@+2s#cRVN-k0WVX>6|-W!#Es zmSli$+1X7mQEZmmG-Z%`voaM?2$WVrgP^W+puM3`a7v;?i4rA>RUa~aF$@3>004!C=xqQ1 literal 0 HcmV?d00001 From f7957af13bfd71de522bc69fb7bbb0d059c4cc93 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 18 May 2023 16:22:02 +0900 Subject: [PATCH 023/103] =?UTF-8?q?=E6=83=B3=E5=AE=9A=E5=A4=96=E3=83=95?= =?UTF-8?q?=E3=82=A1=E3=82=A4=E3=83=AB=E3=81=AE=E5=8F=97=E9=A0=98=E7=A2=BA?= =?UTF-8?q?=E8=AA=8D=E3=83=81=E3=82=A7=E3=83=83=E3=82=AF=E3=82=92=E8=BF=BD?= =?UTF-8?q?=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 想定ファイル数、卸在庫データ取込対象日の場合は15、そうでない場合は14 --- .../src/batch/vjsk/vjsk_importer.py | 5 +++++ .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 13 ++++++++++--- .../tests/batch/vjsk/vjsk_load/testdata/dummy.gz | Bin 107 -> 0 bytes 3 files changed, 15 insertions(+), 3 deletions(-) delete mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 23fc8e5d..694f93ac 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -150,6 +150,11 @@ def _check_received_files() -> list: if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') + # 想定外ファイルの受領確認 (想定ファイル数、卸在庫データ取込対象日の場合は15、そうでない場合は14) + naturally_count = 15 if batch_context.is_vjsk_stock_import_day else 14 + if len(received_s3_files) > naturally_count: + raise BatchOperationException(f'想定数を超える受領ファイルがあります ファイル一覧:{received_s3_files}') + logger.debug('_check_received_files done') return received_s3_files diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 43afd62c..8dac0688 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -2,6 +2,7 @@ from os import path import pytest +from src.aws.s3 import VjskReceiveBucket from src.batch.common.batch_context import BatchContext # from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_importer import (_check_received_files, @@ -40,9 +41,16 @@ class TestImportFileToDb: 期待値 例外が発生しない """ - # setup + # setup - 卸在庫データ取込対象日 self.batch_context.is_vjsk_stock_import_day = True + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload test_files = [ "stock_slip_data_202304270000.gz", "slip_data_202304270000.gz", @@ -58,8 +66,7 @@ class TestImportFileToDb: "whs_customer_mst_202304270000.gz", "mdb_conv_mst_202304270000.gz", "bio_slip_data_202304270000.gz", - "lot_num_mst_202304270000.gz", - "dummy.gz" + "lot_num_mst_202304270000.gz" ] for test_file in test_files: file_name = path.join(self.test_file_path, test_file) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz deleted file mode 100644 index fb34f9372aaf78634f82e936651d484182b6cc88..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 107 zcmV-x0F?h9iwFqlUu0wi0AzJ-ZFw$qVR8WN(LoA;Fc1U4JjEACmbUJD1h2j57WDU` zg1*2~=*}gVKn__-&c|rG;Xf2oRl&!XuLOxP2$N1KxY`mh5EYco<`t~6)u8)-IOm+} N%?T+)6;uES0074AF75yT From bb1c545a44511640a3b908b054f81859298a204c Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 18 May 2023 16:35:05 +0900 Subject: [PATCH 024/103] =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=82=B1?= =?UTF-8?q?=E3=83=BC=E3=82=B9=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_file_check/conftest.py | 150 ++++++++++++++++++ .../vjsk_file_check/test_vjsk_file_check.py | 32 ++++ 2 files changed, 182 insertions(+) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index 0778241d..c315147a 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -1225,3 +1225,153 @@ def init_check_received_files_ng15(s3_client, bucket_name, receive_folder): Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') # s3_client.delete_object(Bucket=bucket_name, # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng16(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng17(s3_client, bucket_name, receive_folder): + # setup + + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz') diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index 43062be1..99b31d68 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -269,3 +269,35 @@ def test_check_received_files_ng15(init_check_received_files_ng15): _check_received_files() assert str(e.value).startswith("製造ロット番号マスタファイルがありません") > 0 + + +def test_check_received_files_ng16(init_check_received_files_ng16): + """ + 観点 + 異常系 : 想定外のファイルが受領されている(卸在庫データ取込対象日) + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("想定数を超える受領ファイルがあります") > 0 + + +def test_check_received_files_ng17(init_check_received_files_ng17): + """ + 観点 + 異常系 : 想定外のファイルが受領されている(卸在庫データ取込対象日) + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = False + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("想定数を超える受領ファイルがあります") > 0 From 94016f513d9eb9004f9883dbf3db52fac76a8a81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 18 May 2023 18:04:16 +0900 Subject: [PATCH 025/103] =?UTF-8?q?feat:=20DCF=E6=96=BD=E8=A8=AD=E7=B5=B1?= =?UTF-8?q?=E5=90=88=E3=83=9E=E3=82=B9=E3=82=BF=E6=97=A5=E6=AC=A1=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E3=83=90=E3=83=83=E3=83=81(=E9=80=94=E4=B8=AD?= =?UTF-8?q?=E3=81=BE=E3=81=A7)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Integrate_dcf_inst_merge.py | 547 ++++++++++++++++++ 1 file changed, 547 insertions(+) create mode 100644 ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py new file mode 100644 index 00000000..fa6ce77a --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py @@ -0,0 +1,547 @@ +from datetime import datetime, timedelta +from sqlalchemy import CursorResult +from src.batch.batch_functions import logging_sql +from src.batch.common.batch_context import BatchContext +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.time.elapsed_time import ElapsedTime + +logger = get_logger('DCF施設統合マスタ日次更新バッチ') +batch_context = BatchContext.get_instance() + + +def exec(): + db = Database.get_instance() + try: + db.connect() + db.begin() + # + enabled_dst_inst_merge_records = _laundering_enabled_dct_inst_merge(db) + # + _laundering_disabled_dct_inst_merge(db) + # + if len(enabled_dst_inst_merge_records) > 0: + logger.info('') + _add_emp_chg_inst(db, enabled_dst_inst_merge_records) + logger.info('') + _add_ult_ident_presc(db, enabled_dst_inst_merge_records) + db.commit() + # + logger.info('') + except Exception as e: + db.rollback() + raise BatchOperationException(e) + finally: + db.disconnect() + + +def _laundering_enabled_dct_inst_merge(db: Database) -> list[dict]: + # データ取得(無効フラグが『0(有効)』) + valid_dst_inst_merge_records = _select_dct_inst_merge(db, 0, True) + # 移行先DCF施設コードの更新(無効フラグが『0(有効)』) + _update_dcf_inst_merge(db, 0) + # DCF施設統合マスタの過去分の洗い替え + if len(valid_dst_inst_merge_records) == 0: + return + for row in valid_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) + + return valid_dst_inst_merge_records + + +def _laundering_disabled_dct_inst_merge(db: Database): + # データ取得(無効フラグが『1(無効)』) + disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1, False) + # 移行先DCF施設コードの更新(無効フラグが『1(無効)』) + _update_dcf_inst_merge(db, 1) + # DCF施設統合マスタの過去分の洗い替え + if len(disabled_dst_inst_merge_records) == 0: + return + for row in disabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) + + +def _add_ult_ident_presc(db: Database, valid_dst_inst_merge_records: list[dict]): + # + for data_inst_cnt, row in enumerate(valid_dst_inst_merge_records, start=1): + tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + + ult_ident_presc_records = _select_ult_ident_presc(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for data_cnt, ult_row in enumerate(ult_ident_presc_records, start=1): + logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') + # 処方元コード=重複時相手先コードが発生した場合 + if ult_row['opp_count'] > 0: + break + + start_date = _str_to_date_time(ult_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + set_start_date = _date_time_to_str(set_start_date) + is_delete_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_row): + _delete_ult_ident_presc(db, set_start_date, ult_row) + is_delete_duplicate_key = True + else: + logger.info('納入先処方元マスタの重複予定データなし') + _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_row) + + if _str_to_date_time(ult_row['end_date']) < start_date: + _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + continue + if not is_delete_duplicate_key: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_row) + if start_date > last_end_date: + _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + + +def _delete_ult_ident_presc(db: Database, start_date: str, ult_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + DELETE FROM + src05.ult_ident_presc + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :set_start_date + """ + params = { + 'ta_cd': ult_row['ta_cd'], + 'ult_ident_cd': ult_row['ult_ident_cd'], + 'ratio': ult_row['ratio'], + 'start_date': start_date + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + + +def _add_emp_chg_inst(db: Database, valid_dst_inst_merge_records: list[dict]): + # + for row in valid_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for emp_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_row['opp_count'] > 0: + break + + start_date = _str_to_date_time(emp_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + + _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), emp_row) + + if start_date < tekiyo_month_first_day: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_row) + if start_date <= last_end_date: + continue + _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], emp_row['start_date']) + + # if start_date >= tekiyo_month_first_day: + # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) + # continue + # last_end_date = tekiyo_month_first_day - timedelta(days=1) + # _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], last_end_date, emp_row) + # if start_date > last_end_date: + # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) + + +def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.emp_chg_inst + SET + enabled_flg = 'N', + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + inst_cd = :dcf_inst_cd + AND ta_cd = :ta_cd + AND start_date = :start_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'ta_cd': ta_cd, 'start_date': start_date} + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, emp_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.emp_chg_inst + SET end_date = :end_date, + updater = CURRENT_USER(), + update_date= SYSDATE() + WHERE + inst_cd = :dcf_inst_cd + AND ta_cd = :ta_cd + AND emp_cd = :emp_cd + AND bu_cd = :bu_cd + AND start_date = :start_date + """ + params = { + 'end_date': last_end_date, + 'dcf_inst_cd': dcf_inst_cd, + 'ta_cd': emp_row['ta_cd'], + 'emp_cd': emp_row['emp_cd'], + 'bu_cd': emp_row['bu_cd'], + 'start_date': emp_row['start_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, emp_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + INSERT INTO + src05.emp_chg_inst( + inst_cd, + ta_cd, + emp_cd, + bu_cd, + start_date, + end_date, + main_chg_flg, + enabled_flg, + creater, + create_date, + updater, + update_date + ) + VALUES( + :dup_opp_cd, + :ta_cd, + :emp_cd, + :bu_cd, + :start_date, + :end_date, + :main_chg_flg, + 'Y', + CURRENT_USER(), + SYSDATE(), + CURRENT_USER(), + SYSDATE() + ) + """ + params = { + 'dup_opp_cd': dup_opp_cd, + 'ta_cd': emp_row['ta_cd'], + 'emp_cd': emp_row['emp_cd'], + 'bu_cd': emp_row['bu_cd'], + 'start_date': set_start_date, + 'end_date': emp_row['end_date'], + 'main_chg_flg': emp_row['main_chg_flg'] if emp_row['main_chg_flg'] is None else None + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _select_dct_inst_merge(db: Database, muko_flg: int, is_null_dcf_inst_cd_new: bool): + # + try: + sql = """ + SELECT + dim.dcf_inst_cd, + dim.dup_opp_cd, + dim.tekiyo_month + FROM + src05.dcf_inst_merge AS dim + INNER JOIN + src05.hdke_tbl AS ht + ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') + WHERE + dim.muko_flg =: muko_flg + AND dim.enabled_flg = 'Y' + AND dim.dcf_inst_cd_new <= >: is_null_dcf_inst_cd_new + """ + params = { + 'muko_flg': muko_flg, + 'is_null_dcf_inst_cd_new': None + } + dst_inst_merge_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + + return dst_inst_merge_records + + +def _update_dcf_inst_merge(db: Database, muko_flg: int): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.dcf_inst_merge AS updim + INNER JOIN( + SELECT + dim.dcf_inst_cd AS base_dcf_inst_cd, + dim.dup_opp_cd AS base_dup_opp_cd, + dim.tekiyo_month AS base_tekiyo_month, + dim.muko_flg AS base_muko_flg, + dim.enabled_flg AS base_enabled_flg + FROM + src05.dcf_inst_merge AS dim + INNER JOIN + src05.hdke_tbl AS ht + ON dim.tekiyo_month=DATE_FORMAT(ht.syor_date, '%Y%m') + WHERE + dim.muko_flg= :muko_flg + AND dim.enabled_flg='Y' + AND dim.dcf_inst_cd_new IS {$dcfInstCdNew}NULL + ) AS bf_dim + SET + updim.dcf_inst_cd_new = {column}, + updim.updater = CURRENT_USER(), + updim.update_date = SYSDATE() + WHERE + updim.dcf_inst_cd = base_dcf_inst_cd + AND updim.dup_opp_cd = base_dup_opp_cd + AND updim.tekiyo_month = base_tekiyo_month + AND updim.muko_flg =base_muko_flg + AND updim.enabled_flg =base_enabled_flg + """ + params = { + 'muko_flg': muko_flg + } + res = db.execute(sql.format( + column='base_dup_opp_cd' if muko_flg == 1 else 'NULL' + ), params) + logging_sql(logger, sql) + logger.info(f'DCF施設統合マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.dcf_inst_merge + SET + dcf_inst_cd_new = :dcf_inst_cd_new_after, + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + dcf_inst_cd_new = :dcf_inst_cd_new_before + AND enabled_flg = 'Y' + AND muko_flg = 0 + """ + params = {'dcf_inst_cd_new_after': dcf_inst_cd_new_after, 'dcf_inst_cd_new_before': dcf_inst_cd_new_before} + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'移行先DCF施設コードの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_record: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.ult_ident_presc + SET end_date = :end_date, + updater = CURRENT_USER(), + update_date= SYSDATE() + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :start_date + """ + params = { + 'end_date': last_end_date, + 'ta_cd': ult_ident_presc_record['ta_cd'], + 'ult_ident_cd': ult_ident_presc_record['ult_ident_cd'], + 'ratio': ult_ident_presc_record['ratio'], + 'start_date': ult_ident_presc_record['start_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'終了日 > 開始月のため適用終了日を更新, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, ult_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + INSERT INTO + src05.ult_ident_presc( + ta_cd, + ult_ident_cd, + ratio, + start_date, + presc_cd, + end_date, + creater, + create_date, + update_date, + updater + ) + VALUES( + :ta_cd, + :ult_ident_cd, + :ratio, + :start_date, + :presc_cd, + :end_date, + CURRENT_USER(), + SYSDATE(), + SYSDATE(), + CURRENT_USER() + ) + """ + params = { + 'ta_cd': ult_row['ta_cd'], + 'ult_ident_cd': ult_row['ult_ident_cd'], + 'ratio': ult_row['ratio'], + 'start_date': set_Start_Date, + 'presc_cd': dup_opp_cd, + 'end_date': ult_row['end_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'納入先処方元マスタに追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('納入先処方元マスタに追加に失敗') + raise e + + +def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: + # + try: + sql = """ + SELECT + eci.inst_cd, + eci.ta_cd, + eci.emp_cd, + eci.bu_cd, + eci.start_date, + eci.end_date, + eci.main_chg_flg, + eci.enabled_flg, + (SELECT COUNT(eciopp.inst_cd) FROM src05.emp_chg_inst AS eciopp WHERE eciopp.inst_cd = :dup_opp_cd) AS opp_count + FROM + src05.emp_chg_inst AS eci + WHERE + eci.inst_cd = :dcf_inst_cd + AND eci.enabled_flg = 'Y' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < eci.end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + emp_chg_inst_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + return emp_chg_inst_records + + +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: + # + try: + sql = """ + SELECT + uip.ta_cd, + uip.ult_ident_cd, + uip.ratio, + uip.start_date, + uip.end_date, + (SELECT COUNT(uipopp.ta_cd) FROM ult_ident_presc AS uipopp WHERE uipopp.presc_cd = :dup_opp_cd) AS opp_count + FROM + src05.ult_ident_presc AS uip + WHERE + uip.presc_cd = '{$dcfInstCd}' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + ult_ident_presc_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + return ult_ident_presc_records + + +def _count_duplicate_ult_ident_presc(db: Database, set_Start_Date: str, ult_row: CursorResult): + # + try: + sql = """ + SELECT + COUNT(ta_cd) AS cnt + FROM + src05.ult_ident_presc + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :set_Start_Date + """ + params = { + 'ta_cd': ult_row['ta_cd'], + 'ult_ident_cd': ult_row['ult_ident_cd'], + 'ratio': ult_row['ratio'], + 'start_date': set_Start_Date + } + result = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + return result[0]['cnt'] + + +def _get_first_day_of_month(month_day: str): + return datetime.datetime.strptime(month_day, '%Y%m01') + + +def _str_to_date_time(str_date_time: str): + return datetime.datetime.strptime(str_date_time, '%Y%m%d') + + +def _date_time_to_str(date_time: datetime): + return date_time.strptime('%Y%m%d') From 75db3d2251968850ce4d1834d83ed39bef0d4461 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 19 May 2023 16:37:03 +0900 Subject: [PATCH 026/103] =?UTF-8?q?feat:=20DCF=E6=96=BD=E8=A8=AD=E7=B5=B1?= =?UTF-8?q?=E5=90=88=E3=83=9E=E3=82=B9=E3=82=BF=E6=97=A5=E6=AC=A1=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E3=83=90=E3=83=83=E3=83=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Integrate_dcf_inst_merge.py | 358 ++++++++++-------- 1 file changed, 193 insertions(+), 165 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py index fa6ce77a..c37a3a30 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py @@ -7,8 +7,8 @@ from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.time.elapsed_time import ElapsedTime -logger = get_logger('DCF施設統合マスタ日次更新バッチ') batch_context = BatchContext.get_instance() +logger = get_logger('DCF施設統合マスタ日次更新バッチ') def exec(): @@ -16,19 +16,17 @@ def exec(): try: db.connect() db.begin() - # - enabled_dst_inst_merge_records = _laundering_enabled_dct_inst_merge(db) - # - _laundering_disabled_dct_inst_merge(db) - # + logger.debug('DCF施設統合マスタ日次更新バッチ処理開始') + # DCF施設統合マスタ移行先コードのセット(無効フラグが『0(有効)』) + enabled_dst_inst_merge_records = _set_enabled_dct_inst_merge(db) + # DCF施設統合マスタ移行先コードのセット(無効フラグが『1(無効)』) + _set_disabled_dct_inst_merge(db) + # DCF施設統合マスタに無効フラグが『0(有効)』データが存在する場合 if len(enabled_dst_inst_merge_records) > 0: - logger.info('') _add_emp_chg_inst(db, enabled_dst_inst_merge_records) - logger.info('') _add_ult_ident_presc(db, enabled_dst_inst_merge_records) db.commit() - # - logger.info('') + logger.info('DCF施設統合マスタ日次更新バッチ処理終了') except Exception as e: db.rollback() raise BatchOperationException(e) @@ -36,68 +34,101 @@ def exec(): db.disconnect() -def _laundering_enabled_dct_inst_merge(db: Database) -> list[dict]: +def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: # データ取得(無効フラグが『0(有効)』) - valid_dst_inst_merge_records = _select_dct_inst_merge(db, 0, True) + enabled_dst_inst_merge_records = _select_dct_inst_merge(db, 0) # 移行先DCF施設コードの更新(無効フラグが『0(有効)』) - _update_dcf_inst_merge(db, 0) - # DCF施設統合マスタの過去分の洗い替え - if len(valid_dst_inst_merge_records) == 0: - return - for row in valid_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) + if _update_dcf_inst_merge(db, 0) > 0: + # DCF施設統合マスタの過去分の洗い替え + for row in enabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) - return valid_dst_inst_merge_records + return enabled_dst_inst_merge_records -def _laundering_disabled_dct_inst_merge(db: Database): +def _set_disabled_dct_inst_merge(db: Database): # データ取得(無効フラグが『1(無効)』) - disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1, False) + disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1) # 移行先DCF施設コードの更新(無効フラグが『1(無効)』) - _update_dcf_inst_merge(db, 1) - # DCF施設統合マスタの過去分の洗い替え - if len(disabled_dst_inst_merge_records) == 0: - return - for row in disabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) + if _update_dcf_inst_merge(db, 1) > 0: + # DCF施設統合マスタの過去分の洗い替え + for row in disabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) -def _add_ult_ident_presc(db: Database, valid_dst_inst_merge_records: list[dict]): - # - for data_inst_cnt, row in enumerate(valid_dst_inst_merge_records, start=1): +def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): + # 納入先処方元マスタの追加 + logger.info('納入先処方元マスタの登録 開始') + for data_inst_cnt, row in enumerate(enabled_dst_inst_merge_records, start=1): tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) ult_ident_presc_records = _select_ult_ident_presc(db, row['dcf_inst_cd'], row['dup_opp_cd']) - for data_cnt, ult_row in enumerate(ult_ident_presc_records, start=1): + for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') # 処方元コード=重複時相手先コードが発生した場合 - if ult_row['opp_count'] > 0: + if ult_ident_presc_row['opp_count'] > 0: break - start_date = _str_to_date_time(ult_row['start_date']) + start_date = _str_to_date_time(ult_ident_presc_row['start_date']) set_start_date = start_date \ if start_date > tekiyo_month_first_day else tekiyo_month_first_day set_start_date = _date_time_to_str(set_start_date) - is_delete_duplicate_key = False - if _count_duplicate_ult_ident_presc(db, set_start_date, ult_row): - _delete_ult_ident_presc(db, set_start_date, ult_row) - is_delete_duplicate_key = True + is_exists_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: + _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, + '納入先処方元マスタの重複予定データの削除') + is_exists_duplicate_key = True else: logger.info('納入先処方元マスタの重複予定データなし') - _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_row) + _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_ident_presc_row) - if _str_to_date_time(ult_row['end_date']) < start_date: - _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: + _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, + '開始月>適用開始日のため物理削除') continue - if not is_delete_duplicate_key: + if not is_exists_duplicate_key: last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_row) - if start_date > last_end_date: - _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + if start_date > last_end_date: + _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, + '適用終了日更新後 開始日>終了日のため物理削除') + + logger.info('納入先処方元マスタの登録 終了') -def _delete_ult_ident_presc(db: Database, start_date: str, ult_row: CursorResult): - # +def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): + # 従業員担当施設マスタの登録 + logger.info('従業員担当施設マスタの登録 開始') + for row in enabled_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for emp_chg_inst_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_chg_inst_row['opp_count'] > 0: + break + + start_date = _str_to_date_time(emp_chg_inst_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + + _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), + emp_chg_inst_row) + + if start_date < tekiyo_month_first_day: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), + emp_chg_inst_row) + if start_date <= last_end_date: + continue + _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) + + logger.info('従業員担当施設マスタの登録 終了') + + +def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: CursorResult, + log_message: str): + # ult_ident_prescのDelete try: elapsed_time = ElapsedTime() sql = """ @@ -107,56 +138,24 @@ def _delete_ult_ident_presc(db: Database, start_date: str, ult_row: CursorResult ta_cd = :ta_cd AND ult_ident_cd = :ult_ident_cd AND ratio = :ratio - AND start_date = :set_start_date + AND start_date = :start_date """ params = { - 'ta_cd': ult_row['ta_cd'], - 'ult_ident_cd': ult_row['ult_ident_cd'], - 'ratio': ult_row['ratio'], + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], 'start_date': start_date } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info(f'{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug(f'{log_message} 失敗') raise e -def _add_emp_chg_inst(db: Database, valid_dst_inst_merge_records: list[dict]): - # - for row in valid_dst_inst_merge_records: - tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) - emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) - for emp_row in emp_chg_inst_records: - # 重複時相手先コードが存在したかのチェック - if emp_row['opp_count'] > 0: - break - - start_date = _str_to_date_time(emp_row['start_date']) - set_start_date = start_date \ - if start_date > tekiyo_month_first_day else tekiyo_month_first_day - - _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), emp_row) - - if start_date < tekiyo_month_first_day: - last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_row) - if start_date <= last_end_date: - continue - _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], emp_row['start_date']) - - # if start_date >= tekiyo_month_first_day: - # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) - # continue - # last_end_date = tekiyo_month_first_day - timedelta(days=1) - # _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], last_end_date, emp_row) - # if start_date > last_end_date: - # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) - - def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): - # + # emp_chg_instを更新 try: elapsed_time = ElapsedTime() sql = """ @@ -174,14 +173,15 @@ def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, params = {'dcf_inst_cd': dcf_inst_cd, 'ta_cd': ta_cd, 'start_date': start_date} res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'従業員担当施設マスタのYorNフラグ更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタのYorNフラグ更新に失敗') raise e -def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, emp_row: CursorResult): - # +def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, + emp_chg_inst_row: CursorResult): + # emp_chg_instを更新 try: elapsed_time = ElapsedTime() sql = """ @@ -200,21 +200,22 @@ def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: params = { 'end_date': last_end_date, 'dcf_inst_cd': dcf_inst_cd, - 'ta_cd': emp_row['ta_cd'], - 'emp_cd': emp_row['emp_cd'], - 'bu_cd': emp_row['bu_cd'], - 'start_date': emp_row['start_date'] + 'ta_cd': emp_chg_inst_row['ta_cd'], + 'emp_cd': emp_chg_inst_row['emp_cd'], + 'bu_cd': emp_chg_inst_row['bu_cd'], + 'start_date': emp_chg_inst_row['start_date'] } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'従業員担当施設マスタの適用終了日更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタの適用終了日更新 失敗') raise e -def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, emp_row: CursorResult): - # +def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, + emp_chg_inst_row: CursorResult): + # emp_chg_instにInsert try: elapsed_time = ElapsedTime() sql = """ @@ -250,23 +251,24 @@ def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, emp """ params = { 'dup_opp_cd': dup_opp_cd, - 'ta_cd': emp_row['ta_cd'], - 'emp_cd': emp_row['emp_cd'], - 'bu_cd': emp_row['bu_cd'], + 'ta_cd': emp_chg_inst_row['ta_cd'], + 'emp_cd': emp_chg_inst_row['emp_cd'], + 'bu_cd': emp_chg_inst_row['bu_cd'], 'start_date': set_start_date, - 'end_date': emp_row['end_date'], - 'main_chg_flg': emp_row['main_chg_flg'] if emp_row['main_chg_flg'] is None else None + 'end_date': emp_chg_inst_row['end_date'], + 'main_chg_flg': None + if emp_chg_inst_row['main_chg_flg'] is None else emp_chg_inst_row['main_chg_flg'] } res = db.execute(sql, params) logging_sql(logger, sql) logger.info(f'従業員担当施設マスタの追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタの追加に失敗') raise e -def _select_dct_inst_merge(db: Database, muko_flg: int, is_null_dcf_inst_cd_new: bool): - # +def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: + # dcf_inst_mergeからSelect try: sql = """ SELECT @@ -279,25 +281,26 @@ def _select_dct_inst_merge(db: Database, muko_flg: int, is_null_dcf_inst_cd_new: src05.hdke_tbl AS ht ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') WHERE - dim.muko_flg =: muko_flg + dim.muko_flg = :muko_flg AND dim.enabled_flg = 'Y' - AND dim.dcf_inst_cd_new <= >: is_null_dcf_inst_cd_new - """ + AND dim.dcf_inst_cd_new IS {not_null}NULL + """.format( + not_null='' if muko_flg == 0 else 'NOT ' + ) params = { - 'muko_flg': muko_flg, - 'is_null_dcf_inst_cd_new': None + 'muko_flg': muko_flg } dst_inst_merge_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('DCF施設統合マスタの取得に成功') except Exception as e: - logger.debug('') + logger.debug('DCF施設統合マスタの取得に失敗') raise e return dst_inst_merge_records -def _update_dcf_inst_merge(db: Database, muko_flg: int): +def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: # try: elapsed_time = ElapsedTime() @@ -319,7 +322,7 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int): WHERE dim.muko_flg= :muko_flg AND dim.enabled_flg='Y' - AND dim.dcf_inst_cd_new IS {$dcfInstCdNew}NULL + AND dim.dcf_inst_cd_new IS {not_null}NULL ) AS bf_dim SET updim.dcf_inst_cd_new = {column}, @@ -329,24 +332,27 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int): updim.dcf_inst_cd = base_dcf_inst_cd AND updim.dup_opp_cd = base_dup_opp_cd AND updim.tekiyo_month = base_tekiyo_month - AND updim.muko_flg =base_muko_flg - AND updim.enabled_flg =base_enabled_flg - """ + AND updim.muko_flg = base_muko_flg + AND updim.enabled_flg = base_enabled_flg + """.format( + not_null='' if muko_flg == 0 else 'NOT ', + column='base_dup_opp_cd' if muko_flg == 0 else 'NULL' + ) params = { 'muko_flg': muko_flg } - res = db.execute(sql.format( - column='base_dup_opp_cd' if muko_flg == 1 else 'NULL' - ), params) + res = db.execute(sql, params) logging_sql(logger, sql) logger.info(f'DCF施設統合マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('DCF施設統合マスタの更新に失敗') raise e + return res.rowcount + def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): - # + # dcf_inst_mergeをUpdate try: elapsed_time = ElapsedTime() sql = """ @@ -361,17 +367,20 @@ def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_c AND enabled_flg = 'Y' AND muko_flg = 0 """ - params = {'dcf_inst_cd_new_after': dcf_inst_cd_new_after, 'dcf_inst_cd_new_before': dcf_inst_cd_new_before} + params = { + 'dcf_inst_cd_new_after': dcf_inst_cd_new_after, + 'dcf_inst_cd_new_before': dcf_inst_cd_new_before + } res = db.execute(sql, params) logging_sql(logger, sql) logger.info(f'移行先DCF施設コードの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('移行先DCF施設コードの更新に失敗') raise e -def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_record: CursorResult): - # +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: CursorResult): + # ult_ident_presc_endをUpdate try: elapsed_time = ElapsedTime() sql = """ @@ -388,21 +397,22 @@ def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident """ params = { 'end_date': last_end_date, - 'ta_cd': ult_ident_presc_record['ta_cd'], - 'ult_ident_cd': ult_ident_presc_record['ult_ident_cd'], - 'ratio': ult_ident_presc_record['ratio'], - 'start_date': ult_ident_presc_record['start_date'] + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': ult_ident_presc_row['start_date'] } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'終了日 > 開始月のため適用終了日を更新, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'終了日 > 開始月のため適用終了日を更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('終了日 > 開始月のため適用終了日を更新 失敗') raise e -def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, ult_row: CursorResult): - # +def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, + ult_ident_presc_row: CursorResult): + # ult_ident_prescにInsert try: elapsed_time = ElapsedTime() sql = """ @@ -419,7 +429,7 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, update_date, updater ) - VALUES( + VALUES( :ta_cd, :ult_ident_cd, :ratio, @@ -433,23 +443,23 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, ) """ params = { - 'ta_cd': ult_row['ta_cd'], - 'ult_ident_cd': ult_row['ult_ident_cd'], - 'ratio': ult_row['ratio'], + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], 'start_date': set_Start_Date, 'presc_cd': dup_opp_cd, - 'end_date': ult_row['end_date'] + 'end_date': ult_ident_presc_row['end_date'] } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'納入先処方元マスタに追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'納入先処方元マスタに追加 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('納入先処方元マスタに追加に失敗') + logger.debug('納入先処方元マスタに追加 失敗') raise e def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # + # emp_chg_instから取得 try: sql = """ SELECT @@ -461,7 +471,14 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis eci.end_date, eci.main_chg_flg, eci.enabled_flg, - (SELECT COUNT(eciopp.inst_cd) FROM src05.emp_chg_inst AS eciopp WHERE eciopp.inst_cd = :dup_opp_cd) AS opp_count + ( + SELECT + COUNT(eciopp.inst_cd) + FROM + src05.emp_chg_inst AS eciopp + WHERE + eciopp.inst_cd = :dup_opp_cd + ) AS opp_count FROM src05.emp_chg_inst AS eci WHERE @@ -472,15 +489,15 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} emp_chg_inst_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('従業員担当施設マスタの取得 成功') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタの取得 失敗') raise e return emp_chg_inst_records def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # + # ult_ident_prescから取得 try: sql = """ SELECT @@ -489,25 +506,33 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> uip.ratio, uip.start_date, uip.end_date, - (SELECT COUNT(uipopp.ta_cd) FROM ult_ident_presc AS uipopp WHERE uipopp.presc_cd = :dup_opp_cd) AS opp_count + ( + SELECT + COUNT(uipopp.ta_cd) + FROM + ult_ident_presc AS uipopp + WHERE + uipopp.presc_cd = :dup_opp_cd + ) AS opp_count FROM src05.ult_ident_presc AS uip WHERE - uip.presc_cd = '{$dcfInstCd}' + uip.presc_cd = :dcf_inst_cd AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date """ params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} ult_ident_presc_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('納入先処方元マスタの取得 成功') except Exception as e: - logger.debug('') + logger.debug('納入先処方元マスタの取得 失敗') raise e return ult_ident_presc_records -def _count_duplicate_ult_ident_presc(db: Database, set_Start_Date: str, ult_row: CursorResult): - # +def _count_duplicate_ult_ident_presc(db: Database, set_start_date: str, + ult_ident_presc_row: CursorResult) -> int: + # ult_ident_prescの重複時相手先コードの件数取得 try: sql = """ SELECT @@ -518,30 +543,33 @@ def _count_duplicate_ult_ident_presc(db: Database, set_Start_Date: str, ult_row: ta_cd = :ta_cd AND ult_ident_cd = :ult_ident_cd AND ratio = :ratio - AND start_date = :set_Start_Date + AND start_date = :start_date """ params = { - 'ta_cd': ult_row['ta_cd'], - 'ult_ident_cd': ult_row['ult_ident_cd'], - 'ratio': ult_row['ratio'], - 'start_date': set_Start_Date + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': set_start_date } result = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('納入先処方元マスタの重複予定データの存在チェック 成功') except Exception as e: - logger.debug('') + logger.debug('納入先処方元マスタの重複予定データの存在チェック 失敗') raise e return result[0]['cnt'] -def _get_first_day_of_month(month_day: str): - return datetime.datetime.strptime(month_day, '%Y%m01') +def _get_first_day_of_month(year_month: str) -> datetime: + # year_monthの初日の日付を日付型に変換し返却する + return datetime.strptime(year_month + '01', '%Y%m%d') -def _str_to_date_time(str_date_time: str): - return datetime.datetime.strptime(str_date_time, '%Y%m%d') +def _str_to_date_time(str_date_time: str) -> datetime: + # str_date_timeを日付型に変換して返却する + return datetime.strptime(str_date_time, '%Y%m%d') -def _date_time_to_str(date_time: datetime): - return date_time.strptime('%Y%m%d') +def _date_time_to_str(date_time: datetime) -> str: + # date_timeをYmd型に変換して返却する + return date_time.strftime('%Y%m%d') From 3b369b8fc40b258792455aa647e4007f99708398 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 19 May 2023 16:48:11 +0900 Subject: [PATCH 027/103] =?UTF-8?q?feat:=20=E8=B5=B7=E5=8B=95=E3=81=97?= =?UTF-8?q?=E3=81=AA=E3=81=84=E4=B8=8D=E5=85=B7=E5=90=88=E3=81=AE=E5=AF=BE?= =?UTF-8?q?=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/jobctrl_daily.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 1a82f226..71fbea90 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -7,6 +7,7 @@ from src.batch.batch_functions import ( update_batch_processing_flag_in_processing) from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile +from src.batch.dcf_inst_merge import Integrate_dcf_inst_merge from src.batch.laundering import create_dcf_inst_merge, mst_inst_laundering from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException @@ -114,6 +115,14 @@ def exec(): logger.exception(f'DCF施設統合マスタ作成エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS + try: + logger.info('DCF施設統合マスタ日次更新バッチ:起動') + Integrate_dcf_inst_merge.exec() + logger.info('DCF施設統合マスタ日次更新バッチ:終了') + except BatchOperationException as e: + logger.exception(f'DCF施設統合マスタ日次更新バッチエラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + # バッチ処理完了とし、処理日、バッチ処置中フラグ、dump取得状態区分を更新 logger.info('業務日付更新・バッチステータスリフレッシュ:起動') try: From 7da4be471c97a1eba6b3fe86e6342c8272ff9021 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 25 May 2023 15:30:51 +0900 Subject: [PATCH 028/103] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...t_merge.py => integrate_dcf_inst_merge.py} | 87 ++++++++++--------- .../src/batch/laundering/sales_laundering.py | 3 + ecs/jskult-batch-daily/src/jobctrl_daily.py | 9 -- 3 files changed, 51 insertions(+), 48 deletions(-) rename ecs/jskult-batch-daily/src/batch/dcf_inst_merge/{Integrate_dcf_inst_merge.py => integrate_dcf_inst_merge.py} (83%) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py similarity index 83% rename from ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py rename to ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index c37a3a30..ffb2ba06 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -1,5 +1,4 @@ from datetime import datetime, timedelta -from sqlalchemy import CursorResult from src.batch.batch_functions import logging_sql from src.batch.common.batch_context import BatchContext from src.db.database import Database @@ -8,7 +7,7 @@ from src.logging.get_logger import get_logger from src.time.elapsed_time import ElapsedTime batch_context = BatchContext.get_instance() -logger = get_logger('DCF施設統合マスタ日次更新バッチ') +logger = get_logger('DCF施設統合マスタ日次更新') def exec(): @@ -16,7 +15,7 @@ def exec(): try: db.connect() db.begin() - logger.debug('DCF施設統合マスタ日次更新バッチ処理開始') + logger.debug('DCF施設統合マスタ日次更新処理開始') # DCF施設統合マスタ移行先コードのセット(無効フラグが『0(有効)』) enabled_dst_inst_merge_records = _set_enabled_dct_inst_merge(db) # DCF施設統合マスタ移行先コードのセット(無効フラグが『1(無効)』) @@ -26,7 +25,7 @@ def exec(): _add_emp_chg_inst(db, enabled_dst_inst_merge_records) _add_ult_ident_presc(db, enabled_dst_inst_merge_records) db.commit() - logger.info('DCF施設統合マスタ日次更新バッチ処理終了') + logger.debug('DCF施設統合マスタ日次更新処理終了') except Exception as e: db.rollback() raise BatchOperationException(e) @@ -59,10 +58,11 @@ def _set_disabled_dct_inst_merge(db: Database): def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): # 納入先処方元マスタの追加 logger.info('納入先処方元マスタの登録 開始') - for data_inst_cnt, row in enumerate(enabled_dst_inst_merge_records, start=1): - tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): + tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) - ult_ident_presc_records = _select_ult_ident_presc(db, row['dcf_inst_cd'], row['dup_opp_cd']) + ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], + enabled_merge_record['dup_opp_cd']) for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') # 処方元コード=重複時相手先コードが発生した場合 @@ -80,13 +80,17 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict is_exists_duplicate_key = True else: logger.info('納入先処方元マスタの重複予定データなし') - _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_ident_presc_row) + _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) + # 適用終了日 < 適用開始日の場合 if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: + # 対象レコードを物理削除する _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, '開始月>適用開始日のため物理削除') continue + # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 if not is_exists_duplicate_key: + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 last_end_date = tekiyo_month_first_day - timedelta(days=1) _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) if start_date > last_end_date: @@ -99,9 +103,9 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): # 従業員担当施設マスタの登録 logger.info('従業員担当施設マスタの登録 開始') - for row in enabled_dst_inst_merge_records: - tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) - emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for enabled_merge_record in enabled_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) + emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd']) for emp_chg_inst_row in emp_chg_inst_records: # 重複時相手先コードが存在したかのチェック if emp_chg_inst_row['opp_count'] > 0: @@ -111,22 +115,24 @@ def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): set_start_date = start_date \ if start_date > tekiyo_month_first_day else tekiyo_month_first_day - _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), + _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), emp_chg_inst_row) + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 if start_date < tekiyo_month_first_day: + # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), + _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_chg_inst_row) - if start_date <= last_end_date: - continue - _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], - emp_chg_inst_row['start_date']) + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) logger.info('従業員担当施設マスタの登録 終了') -def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: CursorResult, +def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: dict, log_message: str): # ult_ident_prescのDelete try: @@ -154,8 +160,8 @@ def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: raise e -def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): - # emp_chg_instを更新 +def _update_emp_chg_inst_disabled(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): + # emp_chg_instをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -180,8 +186,8 @@ def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, - emp_chg_inst_row: CursorResult): - # emp_chg_instを更新 + emp_chg_inst_row: dict): + # emp_chg_instをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -214,8 +220,8 @@ def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, - emp_chg_inst_row: CursorResult): - # emp_chg_instにInsert + emp_chg_inst_row: dict): + # emp_chg_instにINSERT try: elapsed_time = ElapsedTime() sql = """ @@ -268,7 +274,8 @@ def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: - # dcf_inst_mergeからSelect + # dcf_inst_mergeからSELECT + # muko_flgの値によって、SQLのWHERE条件を変更 try: sql = """ SELECT @@ -301,9 +308,11 @@ def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: - # - try: + # dcf_inst_mergeをUPDATE + # muko_flgの値によって、SQLのWHERE条件とSET句を変更 + try: elapsed_time = ElapsedTime() + log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' sql = """ UPDATE src05.dcf_inst_merge AS updim @@ -318,9 +327,9 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: src05.dcf_inst_merge AS dim INNER JOIN src05.hdke_tbl AS ht - ON dim.tekiyo_month=DATE_FORMAT(ht.syor_date, '%Y%m') + ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') WHERE - dim.muko_flg= :muko_flg + dim.muko_flg = :muko_flg AND dim.enabled_flg='Y' AND dim.dcf_inst_cd_new IS {not_null}NULL ) AS bf_dim @@ -343,16 +352,16 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'DCF施設統合マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'DCF施設統合マスタの有効データを{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('DCF施設統合マスタの更新に失敗') + logger.debug(f'DCF施設統合マスタの{log_message} 失敗') raise e return res.rowcount def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): - # dcf_inst_mergeをUpdate + # dcf_inst_mergeをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -379,8 +388,8 @@ def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_c raise e -def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: CursorResult): - # ult_ident_presc_endをUpdate +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: dict): + # ult_ident_presc_endをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -411,8 +420,8 @@ def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, - ult_ident_presc_row: CursorResult): - # ult_ident_prescにInsert + ult_ident_presc_row: dict): + # ult_ident_prescにINSERT try: elapsed_time = ElapsedTime() sql = """ @@ -459,7 +468,7 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # emp_chg_instから取得 + # emp_chg_instからSELECT try: sql = """ SELECT @@ -497,7 +506,7 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # ult_ident_prescから取得 + # ult_ident_prescからSELECT try: sql = """ SELECT @@ -531,7 +540,7 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> def _count_duplicate_ult_ident_presc(db: Database, set_start_date: str, - ult_ident_presc_row: CursorResult) -> int: + ult_ident_presc_row: dict) -> int: # ult_ident_prescの重複時相手先コードの件数取得 try: sql = """ diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index f6d682b4..019f8c29 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -1,5 +1,6 @@ from src.batch.common.batch_context import BatchContext from src.batch.laundering import create_inst_merge_for_laundering, emp_chg_inst_laundering, ult_ident_presc_laundering +from src.batch.dcf_inst_merge import integrate_dcf_inst_merge from src.logging.get_logger import get_logger batch_context = BatchContext.get_instance() @@ -16,6 +17,8 @@ def exec(): return # 洗替用マスタ作成 create_inst_merge_for_laundering.exec() + # DCF施設統合マスタ日次更新 + integrate_dcf_inst_merge.exec() # 施設担当者洗替 emp_chg_inst_laundering.exec() # 納入先処方元マスタ洗替 diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 71fbea90..1a82f226 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -7,7 +7,6 @@ from src.batch.batch_functions import ( update_batch_processing_flag_in_processing) from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile -from src.batch.dcf_inst_merge import Integrate_dcf_inst_merge from src.batch.laundering import create_dcf_inst_merge, mst_inst_laundering from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException @@ -115,14 +114,6 @@ def exec(): logger.exception(f'DCF施設統合マスタ作成エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS - try: - logger.info('DCF施設統合マスタ日次更新バッチ:起動') - Integrate_dcf_inst_merge.exec() - logger.info('DCF施設統合マスタ日次更新バッチ:終了') - except BatchOperationException as e: - logger.exception(f'DCF施設統合マスタ日次更新バッチエラー(異常終了){e}') - return constants.BATCH_EXIT_CODE_SUCCESS - # バッチ処理完了とし、処理日、バッチ処置中フラグ、dump取得状態区分を更新 logger.info('業務日付更新・バッチステータスリフレッシュ:起動') try: From 5d16bceec95f4bd064d18246f55fc32dd5736e54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 25 May 2023 16:22:05 +0900 Subject: [PATCH 029/103] =?UTF-8?q?feat:=20SQL=E4=BF=AE=E6=AD=A3=E3=80=81?= =?UTF-8?q?=E5=8D=98=E4=BD=93=E8=A9=A6=E9=A8=93=E6=9B=B8=E3=83=AC=E3=83=93?= =?UTF-8?q?=E3=83=A5=E3=83=BC=E6=99=82=E3=81=AE=E6=8C=87=E6=91=98=E3=81=AE?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrate_dcf_inst_merge.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index ffb2ba06..6e9bf5e4 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -90,12 +90,15 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict continue # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 if not is_exists_duplicate_key: - # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + # 適用開始日 > DCF施設統合マスタの適用月度の前月末日の場合 if start_date > last_end_date: + # 対象レコードを物理削除する _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, '適用終了日更新後 開始日>終了日のため物理削除') + continue + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) logger.info('納入先処方元マスタの登録 終了') @@ -195,7 +198,7 @@ def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: src05.emp_chg_inst SET end_date = :end_date, updater = CURRENT_USER(), - update_date= SYSDATE() + update_date = SYSDATE() WHERE inst_cd = :dcf_inst_cd AND ta_cd = :ta_cd @@ -310,9 +313,9 @@ def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: # dcf_inst_mergeをUPDATE # muko_flgの値によって、SQLのWHERE条件とSET句を変更 - try: + try: elapsed_time = ElapsedTime() - log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' + log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' sql = """ UPDATE src05.dcf_inst_merge AS updim @@ -330,7 +333,7 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') WHERE dim.muko_flg = :muko_flg - AND dim.enabled_flg='Y' + AND dim.enabled_flg ='Y' AND dim.dcf_inst_cd_new IS {not_null}NULL ) AS bf_dim SET @@ -397,7 +400,7 @@ def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident src05.ult_ident_presc SET end_date = :end_date, updater = CURRENT_USER(), - update_date= SYSDATE() + update_date = SYSDATE() WHERE ta_cd = :ta_cd AND ult_ident_cd = :ult_ident_cd @@ -519,7 +522,7 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> SELECT COUNT(uipopp.ta_cd) FROM - ult_ident_presc AS uipopp + src05.ult_ident_presc AS uipopp WHERE uipopp.presc_cd = :dup_opp_cd ) AS opp_count From 0f8fb0026f88accd75207484ad115213360a99c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 25 May 2023 18:16:09 +0900 Subject: [PATCH 030/103] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 6e9bf5e4..182b7232 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -278,7 +278,8 @@ def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: # dcf_inst_mergeからSELECT - # muko_flgの値によって、SQLのWHERE条件を変更 + # 無効フラグがOFFのときは、移行先DCF施設コードが設定されてないデータを抽出する。 + # ONのときは、移行先DCF施設コードが設定されているデータを抽出する。 try: sql = """ SELECT @@ -312,7 +313,10 @@ def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: # dcf_inst_mergeをUPDATE - # muko_flgの値によって、SQLのWHERE条件とSET句を変更 + # 無効フラグがOFFのときは、 + # 移行先DCF施設コードが設定されていないデータを抽出し、移行先DCF施設コードに重複時相手先コードを上書きする + # 無効フラグがONのときは、 + # 移行先DCF施設コードが設定されているデータを抽出し、移行先DCF施設コードにNULLを上書きする。 try: elapsed_time = ElapsedTime() log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' From f3e772e0f34b8f824a2283cdd3ba7926cd622d4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 30 May 2023 16:25:48 +0900 Subject: [PATCH 031/103] =?UTF-8?q?feat:=20=E5=8D=98=E4=BD=93=E8=A9=A6?= =?UTF-8?q?=E9=A8=93=E5=AE=9F=E6=96=BD=E4=B8=AD=E3=81=AB=E7=99=BA=E8=A6=8B?= =?UTF-8?q?=E3=81=95=E3=82=8C=E3=81=9F=E3=83=87=E3=83=83=E3=83=88=E3=82=B3?= =?UTF-8?q?=E3=83=BC=E3=83=89=E3=81=AE=E5=89=8A=E9=99=A4=E3=80=81=E3=81=BB?= =?UTF-8?q?=E3=81=8B=E3=80=81=E3=83=87=E3=83=BC=E3=82=BF=E3=81=AE=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E6=9D=A1=E4=BB=B6=E3=81=AE=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dcf_inst_merge/integrate_dcf_inst_merge.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 182b7232..20ac5fcc 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -91,12 +91,6 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 if not is_exists_duplicate_key: last_end_date = tekiyo_month_first_day - timedelta(days=1) - # 適用開始日 > DCF施設統合マスタの適用月度の前月末日の場合 - if start_date > last_end_date: - # 対象レコードを物理削除する - _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, - '適用終了日更新後 開始日>終了日のため物理削除') - continue # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) @@ -121,14 +115,15 @@ def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), emp_chg_inst_row) - # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 - if start_date < tekiyo_month_first_day: + # 適用終了日 ≧ 適用開始日の場合 + if _str_to_date_time(emp_chg_inst_row['end_date']) >= start_date: # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する last_end_date = tekiyo_month_first_day - timedelta(days=1) _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_chg_inst_row) - continue - # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + if last_end_date >= start_date: + continue + # DCF施設統合マスタの適用月度の前月末日 < 適用開始日、または適用終了日 < 適用開始日の場合、N(論理削除レコード)に設定する _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], emp_chg_inst_row['start_date']) From 9ae92789678fad933360267f7f675b50b5074348 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 1 Jun 2023 15:58:52 +0900 Subject: [PATCH 032/103] =?UTF-8?q?feat:=20=E3=83=AD=E3=82=B0=E5=87=BA?= =?UTF-8?q?=E5=8A=9B=E3=83=A1=E3=83=83=E3=82=BB=E3=83=BC=E3=82=B8=E3=82=92?= =?UTF-8?q?=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dcf_inst_merge/integrate_dcf_inst_merge.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 20ac5fcc..832cc20c 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -40,7 +40,7 @@ def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: if _update_dcf_inst_merge(db, 0) > 0: # DCF施設統合マスタの過去分の洗い替え for row in enabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], 0) return enabled_dst_inst_merge_records @@ -52,7 +52,7 @@ def _set_disabled_dct_inst_merge(db: Database): if _update_dcf_inst_merge(db, 1) > 0: # DCF施設統合マスタの過去分の洗い替え for row in disabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], 1) def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): @@ -356,15 +356,16 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: logging_sql(logger, sql) logger.info(f'DCF施設統合マスタの有効データを{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug(f'DCF施設統合マスタの{log_message} 失敗') + logger.debug(f'DCF施設統合マスタの有効データを{log_message} 失敗') raise e return res.rowcount -def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, muko_flg: int): # dcf_inst_mergeをUPDATE try: + log_message = '' if muko_flg == 0 else '戻し' elapsed_time = ElapsedTime() sql = """ UPDATE @@ -384,9 +385,9 @@ def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_c } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'移行先DCF施設コードの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'移行先DCF施設コードの{log_message}更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('移行先DCF施設コードの更新に失敗') + logger.debug(f'移行先DCF施設コードの{log_message}更新に失敗') raise e From b9e4a73b941909476e503768647abc7fa57bd3d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 2 Jun 2023 09:51:16 +0900 Subject: [PATCH 033/103] =?UTF-8?q?feat:=20=E3=83=AD=E3=82=B0=E5=87=BA?= =?UTF-8?q?=E5=8A=9B=E9=83=A8=E5=88=86=E3=82=92=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 832cc20c..b4b6e4e7 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -40,7 +40,7 @@ def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: if _update_dcf_inst_merge(db, 0) > 0: # DCF施設統合マスタの過去分の洗い替え for row in enabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], 0) + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], '') return enabled_dst_inst_merge_records @@ -52,7 +52,7 @@ def _set_disabled_dct_inst_merge(db: Database): if _update_dcf_inst_merge(db, 1) > 0: # DCF施設統合マスタの過去分の洗い替え for row in disabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], 1) + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): @@ -362,10 +362,9 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: return res.rowcount -def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, muko_flg: int): +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, log_message: str): # dcf_inst_mergeをUPDATE try: - log_message = '' if muko_flg == 0 else '戻し' elapsed_time = ElapsedTime() sql = """ UPDATE From 9c8e3d1bac519a1c82ebbe9170ad20ff8e96dc72 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 18:15:11 +0900 Subject: [PATCH 034/103] =?UTF-8?q?=E5=8F=97=E9=A0=98=E6=99=82=E3=81=AE?= =?UTF-8?q?=E5=BD=A2=E5=BC=8F=E3=81=8B=E3=82=89S3download=E3=83=BB?= =?UTF-8?q?=E8=A7=A3=E5=87=8D=E3=83=BB=E7=99=BB=E9=8C=B2=E3=83=BB=E3=83=90?= =?UTF-8?q?=E3=83=83=E3=82=AF=E3=82=A2=E3=83=83=E3=83=97=E3=81=AE=E4=B8=80?= =?UTF-8?q?=E9=80=A3=E5=87=A6=E7=90=86=E3=82=92=E7=A2=BA=E8=AA=8D=E3=81=99?= =?UTF-8?q?=E3=82=8B=E7=94=A8=E3=81=AE=E3=83=86=E3=82=B9=E3=83=88=E3=83=87?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E3=82=92=E3=82=B5=E3=83=96=E3=83=95=E3=82=A9?= =?UTF-8?q?=E3=83=AB=E3=83=80=E3=81=AB=E7=A7=BB=E5=8B=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bio_slip_data_202304270000.gz | Bin .../fcl_mst_202304270000.gz | Bin .../hld_mst_202304270000.gz | Bin .../lot_num_mst_202304270000.gz | Bin .../mdb_conv_mst_202304270000.gz | Bin .../mkr_org_horizon_202304270000.gz | Bin .../org_cnv_mst_202304270000.gz | Bin .../phm_prd_mst_202304270000.gz | Bin .../phm_price_mst_202304270000.gz | Bin .../slip_data_202304270000.gz | Bin .../stock_slip_data_202304270000.gz | Bin .../tran_kbn_mst_202304270000.gz | Bin .../vop_hco_merge_202304270000.gz | Bin .../whs_customer_mst_202304270000.gz | Bin .../whs_mst_202304270000.gz | Bin 15 files changed, 0 insertions(+), 0 deletions(-) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/bio_slip_data_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/fcl_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/hld_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/lot_num_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/mdb_conv_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/mkr_org_horizon_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/org_cnv_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/phm_prd_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/phm_price_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/slip_data_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/stock_slip_data_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/tran_kbn_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/vop_hco_merge_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/whs_customer_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/whs_mst_202304270000.gz (100%) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz From 368fef1f5bc68de02f0042aa48112160e1073290 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 18:16:19 +0900 Subject: [PATCH 035/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E5=9C=A8=E5=BA=AB?= =?UTF-8?q?=E3=83=87=E3=83=BC=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 97 +++++++++++++------ .../testdata/stock_slip_data_202304280000.tsv | 5 + .../testdata/stock_slip_data_202304290000.tsv | 5 + .../tests/testing_vjsk_utility.py | 91 +++++++++++++++++ 4 files changed, 166 insertions(+), 32 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv create mode 100644 ecs/jskult-batch-daily/tests/testing_vjsk_utility.py diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 8dac0688..e1d3571d 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -4,22 +4,28 @@ import pytest from src.aws.s3 import VjskReceiveBucket from src.batch.common.batch_context import BatchContext +from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager # from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_importer import (_check_received_files, _import_file_to_db) from src.db.database import Database +# from tests.testing_vjsk_utility import create_vjsk_assertion_dictionary +from tests.testing_vjsk_utility import (assert_table_results, + create_vjsk_assertion_list) class TestImportFileToDb: db: Database batch_context: BatchContext - test_file_path: str + test_file_path_import_all: str + test_file_path_load_individual: str @pytest.fixture(autouse=True, scope='function') def pre_test(self, database: Database): """テスト実行前後処理""" # setup - self.test_file_path = path.join(path.dirname(__file__), "testdata") + self.test_file_path_import_all = path.join(path.dirname(__file__), "testdata", "TestImportFileToDb") + self.test_file_path_load_individual = path.join(path.dirname(__file__), "testdata") self.batch_context = BatchContext.get_instance() @@ -69,7 +75,7 @@ class TestImportFileToDb: "lot_num_mst_202304270000.gz" ] for test_file in test_files: - file_name = path.join(self.test_file_path, test_file) + file_name = path.join(self.test_file_path_import_all, test_file) key = f"{receive_folder}/{test_file}" s3_client.upload_file(file_name, bucket_name, key) @@ -93,8 +99,6 @@ class TestImportFileToDb: received_s3_files = _check_received_files() _import_file_to_db(received_s3_files) - # self.db.connect() - # # 検証 (卸在庫データファイル) # table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) # table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) @@ -116,36 +120,65 @@ class TestImportFileToDb: key = f"{receive_folder}/{test_file}" s3_client.delete_object(Bucket=bucket_name, Key=key) - # def test_load_stock_slip_data_ok(self, mapper): - # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) - # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + def test_load_01_stock_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) - # # setup - # self.batch_context.is_vjsk_stock_import_day = True - # self.db.execute(f"truncate table {table_name_src}") + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") - # # assertion (insert) - # target_dict = { - # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, - # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304280000.tsv") - # } - # VjskDataLoadManager.load(target_dict) + # assertion1 (insert 4row) - # result = self.db.execute(f"select * from {table_name_org}") - # assert result.rowcount == 4 - # result = self.db.execute(f"select * from {table_name_src}") - # assert result.rowcount == 4 + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "stock_slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) - # # assertion (update) - # target_dict = { - # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, - # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304290000.tsv") - # } - # VjskDataLoadManager.load(target_dict) + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) - # result_org = self.db.execute(f"select * from {table_name_org}") - # assert result_org.rowcount == 4 - # result_src1 = self.db.execute(f"select * from {table_name_src}") - # assert result_src1.rowcount == 6 + # assertion2 (update 2row +insert 2row) - # # teardown + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "stock_slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv new file mode 100644 index 00000000..cc6c3902 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_sto_place" "rec_stock_ymd" "rec_comm_cd" "rec_amt" "rev_stok_no_sign" "rev_jan_cd" "rec_free_item" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "exec_dt" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "rec_sts_kbn" "ins_dt" "ins_usr" +"D463630101 23022849630021900003500000 セトロタイドチユウシヤヨウ0.25MG 1V" "363" "01" "01 " "230228" "496300219" "000035" "0" "0000" " セトロタイドチユウシヤヨウ0.25MG 1V" "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000059" "59" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963001270000040000001ゴナールエフヒカチユウペン450 1トウ40 " "253" "01" "026 " "230228" "496300127" "000004" "0" "0000" "01ゴナールエフヒカチユウペン450 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000060" "60" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963001340000220000001ゴナールエフヒカチユウペン900 1トウ40 " "253" "01" "026 " "230228" "496300134" "000022" "0" "0000" "01ゴナールエフヒカチユウペン900 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000061" "61" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963004170000500000001オビドレルヒカチユウシリンジ250MCG 140 " "253" "01" "026 " "230228" "496300417" "000050" "0" "0000" "01オビドレルヒカチユウシリンジ250MCG 140 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000062" "62" "202303142041" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "23-03-14 20:41:57" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv new file mode 100644 index 00000000..0b180c24 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_sto_place" "rec_stock_ymd" "rec_comm_cd" "rec_amt" "rev_stok_no_sign" "rev_jan_cd" "rec_free_item" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "exec_dt" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "rec_sts_kbn" "ins_dt" "ins_usr" +"DAY2-301026 2302284963001340000220000001ゴナールエフヒカチユウペン900 1トウ40 " "253" "01" "026 " "230228" "496300134" "000022" "0" "0000" "01ゴナールエフヒカチユウペン900 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000061" "61" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301026 2302284963004170000500000001オビドレルヒカチユウシリンジ250MCG 140 " "253" "01" "026 " "230228" "496300417" "000050" "0" "0000" "01オビドレルヒカチユウシリンジ250MCG 140 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000062" "62" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301027 2302284963001100000020000001ゴナールエフヒカチユウペン300 1トウ40 " "253" "01" "027 " "230228" "496300110" "000002" "0" "0000" "01ゴナールエフヒカチユウペン300 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000063" "63" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301027 2302284963001270000110000001ゴナールエフヒカチユウペン450 1トウ40 " "253" "01" "027 " "230228" "496300127" "000011" "0" "0000" "01ゴナールエフヒカチユウペン450 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000064" "64" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py new file mode 100644 index 00000000..1849e325 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -0,0 +1,91 @@ + + +import csv +from datetime import datetime + + +def create_vjsk_assertion_list(file_path: str) -> list: + """DB登録期待値リストを作成する + + Args: + file_path (str): DB登録期待値ファイル(tsvファイル)のパス + ※DB登録期待値ファイルの前提 + 受領データファイルと同じ + BOM付きtsv形式 + 一行目はカラム名になっているヘッダ行 + + Returns: + List(dict) DB登録期待値辞書リスト + """ + with open(file_path, encoding='utf_8_sig', newline='') as tsv_file: + header = tsv_file.readline().strip('\n').replace('"', '').split('\t') + reader = csv.DictReader(tsv_file, fieldnames=header, delimiter='\t') + rows = [r for r in reader] + + # DB抽出値と比較できるように、リテラル値をDB抽出値と同じデータフォーマットに変換 + for row in rows: + for k, v in row.items(): + converted_value = v + if v == 'NULL': + converted_value = None + if is_valid_date_format(v, '%Y/%m/%d') is True: # YYYY/MM/DD + converted_value = datetime.strptime(v, '%Y/%m/%d').date() + if is_valid_date_format(v, '%Y-%m-%d') is True: # YYYY-MM-DD + converted_value = datetime.strptime(v, '%Y-%m-%d').date() + if is_valid_date_format(v, '%Y/%m/%d %H:%M:%S') is True: # YYYY/MM/DD HH:MM:SS + converted_value = datetime.strptime(v, '%Y/%m/%d %H:%M:%S') + if is_valid_date_format(v, '%Y-%m-%d %H:%M:%S') is True: # YYYY-MM-DD HH:MM:SS + converted_value = datetime.strptime(v, '%Y-%m-%d %H:%M:%S') + if is_valid_date_format(v, '%y-%m-%d %H:%M:%S') is True: # YY-MM-DD HH:MM:SS + converted_value = datetime.strptime(v, '%y-%m-%d %H:%M:%S') + + row[k] = converted_value + + return rows + + +def is_valid_date_format(date_str: str, date_format): + """日付文字列が、与えられたフォーマットにマッチするかを検査する + + Args: + date_str (str): 日付文字列 + date_format (str, optional): 日付のフォーマット + + Returns: + _type_: 正しい日付文字列の場合、True、それ以外はFalse + """ + try: + datetime.strptime(date_str, date_format) + return True + except ValueError: + return False + + +def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_name: list = None) -> None: + """テーブル同士の取得結果突き合わせ + + Args: + actual_rows (list[dict]): テスト結果の辞書リスト + expect_rows (list[dict]): 期待値の辞書リスト + ignore_col_name (list): 比較を無視するDBのカラム名. Default None. + """ + # 取得件数が一致すること + assert len(actual_rows) == len(expect_rows) + + line_number = 0 + # 1行ずつ調査 + for actual_row, expect_row in zip(actual_rows, expect_rows): + line_number += 1 + # 1カラムずつ調査 + for actual_col_name, expect_col_name in zip(actual_row, expect_row): + # テストメソッド側で個別に確認するものはスキップさせる + if ignore_col_name is not None and actual_col_name in ignore_col_name: + continue + else: + actual_value = actual_row[actual_col_name] + expect_value = expect_row[expect_col_name] + if isinstance(actual_value, (int)): + expect_value = int(expect_value) + if isinstance(actual_value, (float)): + expect_value = float(expect_value) + assert actual_value == expect_value, f'{line_number}行目:{actual_col_name}が、期待値と一致しませんでした' From 36c18835f1641ab9da05d15c198b626791558921 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 20:43:51 +0900 Subject: [PATCH 036/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E8=B2=A9=E5=A3=B2?= =?UTF-8?q?=E3=83=87=E3=83=BC=E3=82=BF=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=80=81=EF=BC=B6=E5=8D=B8=E7=B5=84=E7=B9=94=E5=A4=89=E6=8F=9B?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=80=81=EF=BC=B6=E6=96=BD=E8=A8=AD?= =?UTF-8?q?=E7=B5=B1=E5=90=88=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 56 +++--- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 189 ++++++++++++++++++ .../testdata/org_cnv_mst_202304280000.tsv | 5 + .../testdata/org_cnv_mst_202304290000.tsv | 5 + .../testdata/slip_data_202304280000.tsv | 5 + .../testdata/slip_data_202304290000.tsv | 5 + .../testdata/vop_hco_merge_202304280000.tsv | 5 + .../testdata/vop_hco_merge_202304290000.tsv | 5 + .../tests/testing_vjsk_utility.py | 13 +- 9 files changed, 259 insertions(+), 29 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 8b372dc2..352e2f91 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -201,7 +201,7 @@ class VjskReceiveFileMapper: ,t.tksnbk_kbn ,t.fcl_exec_kbn ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.sales AS t @@ -287,7 +287,7 @@ class VjskReceiveFileMapper: ,tksnbk_kbn=t.tksnbk_kbn ,fcl_exec_kbn=t.fcl_exec_kbn ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dT=SYSDATE() ; @@ -326,7 +326,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.hld_mst_v AS t @@ -340,7 +340,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -389,7 +389,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.whs_mst_v AS t @@ -408,7 +408,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -517,7 +517,7 @@ class VjskReceiveFileMapper: ,t.start_date ,t.end_date ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.mkr_org_horizon_v AS t @@ -566,7 +566,7 @@ class VjskReceiveFileMapper: ,start_date=t.start_date ,end_date=t.end_date ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -605,7 +605,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.org_cnv_mst_v AS t @@ -619,7 +619,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -654,7 +654,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.tran_kbn_mst_v AS t @@ -666,7 +666,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -731,7 +731,7 @@ class VjskReceiveFileMapper: ,t.admin_kbn ,t.fcl_type ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.fcl_mst_v AS t @@ -758,7 +758,7 @@ class VjskReceiveFileMapper: ,admin_kbn=t.admin_kbn ,fcl_type=t.fcl_type ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -831,7 +831,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.phm_prd_mst_v AS t @@ -862,7 +862,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -899,7 +899,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.phm_price_mst_v AS t @@ -912,7 +912,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -995,7 +995,7 @@ class VjskReceiveFileMapper: ,t.postal_cd ,t.tel_num ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.whs_customer_mst_v AS t @@ -1015,7 +1015,7 @@ class VjskReceiveFileMapper: ,postal_cd=t.postal_cd ,tel_num=t.tel_num ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -1048,7 +1048,7 @@ class VjskReceiveFileMapper: ,t.reliability ,t.start_date ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.mdb_cnv_mst_v AS t @@ -1059,7 +1059,7 @@ class VjskReceiveFileMapper: ,reliability=t.reliability ,start_date=t.start_date ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -1134,7 +1134,7 @@ class VjskReceiveFileMapper: ,t.err_flg9 ,t.err_flg10 ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.whole_stock AS t @@ -1166,7 +1166,7 @@ class VjskReceiveFileMapper: ,err_flg9=t.err_flg9 ,err_flg10=t.err_flg10 ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dt=SYSDATE() ; @@ -1339,7 +1339,7 @@ class VjskReceiveFileMapper: ,t.tksnbk_kbn ,t.fcl_exec_kbn ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.bio_sales AS t @@ -1420,7 +1420,7 @@ class VjskReceiveFileMapper: ,tksnbk_kbn=t.tksnbk_kbn ,fcl_exec_kbn=t.fcl_exec_kbn ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dt=SYSDATE() ; @@ -1449,7 +1449,7 @@ class VjskReceiveFileMapper: ,t.lot_num ,t.expr_dt ,t.frst_mov_dt - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.lot_num_mst AS t @@ -1458,7 +1458,7 @@ class VjskReceiveFileMapper: ,lot_num=t.lot_num ,expr_dt=t.expr_dt ,frst_mov_dt=t.frst_mov_dt - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dt=SYSDATE() ; diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index e1d3571d..fc3cee21 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -182,3 +182,192 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_02_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_03_org_cnv_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_ORG_CNV_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_ORG_CNV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "org_cnv_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_ORG_CNV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "org_cnv_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.whs_cd = o.whs_cd and s.whs_sub_cd = o.whs_sub_cd and s.org_cd = o.org_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_04_vop_hco_merge_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_VOP_HCO_MERGE) + table_name_src = mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_VOP_HCO_MERGE, + "src_file_path": path.join(self.test_file_path_load_individual, "vop_hco_merge_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_VOP_HCO_MERGE, + "src_file_path": path.join(self.test_file_path_load_individual, "vop_hco_merge_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_inst_cd = o.v_inst_cd and s.apply_dt = o.apply_dt)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv new file mode 100644 index 00000000..208d1a20 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "org_cd" "sub_no" "v_org_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"042" "01" "361007" "0" "300006657" "20230401" "99991231" "0" "0" "23-04-12 11:24:06" "23-04-12 11:24:06" +"042" "01" "381207" "0" "300006658" "20230401" "99991231" "0" "0" "23-04-12 11:24:27" "23-04-12 11:24:27" +"080" "00" "02780" "0" "300006526" "20220401" "99991231" "0" "9" "22-04-11 15:57:35" "23-04-12 10:46:48" +"080" "00" "02780" "1" "300006526" "20220401" "20230331" "0" "0" "23-04-12 10:46:48" "23-04-12 10:46:48" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv new file mode 100644 index 00000000..c91eafee --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "org_cd" "sub_no" "v_org_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"080" "00" "02780" "0" "300006526" "20220401" "99991231" "0" "9" "22-04-11 15:57:35" "23-04-12 10:46:48" +"080" "00" "02780" "1" "300006526" "20220401" "20230331" "0" "0" "23-04-12 10:46:48" "23-04-12 10:46:48" +"080" "00" "21807" "2" "300006649" "20230401" "99991231" "0" "0" "23-04-12 10:49:23" "23-04-12 10:49:23" +"080" "00" "25110" "0" "300005251" "20000101" "99991231" "0" "9" "21-03-11 14:59:47" "23-04-12 10:47:42" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv new file mode 100644 index 00000000..d9b11f17 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"recvdata" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rev_hsdnYmd_wrk" "rev_hsdnYmd_srk" "rec_urag_no" "rec_amt" "rec_unit_price" "rec_price" "rec_comm_nm" "rec_nnskFcl_nm" "free_item" "rec_nnsk_fcl_addr" "rec_nnsk_fcl_post" "rec_nnsk_fcl_tel" "rec_bef_hsdn_ymd" "rec_bef_slip_no" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_kn" "fcl_nm" "fcl_addr_v" "comm_cd" "comm_nm" "nn_amt" "nn_unitPrice" "nn_price" "unit_price" "unit_amt" "drag_price" "drag_amt" "whsPos_err_kbn" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "bef_hsdn_ymd" "bef_slip_no" "slip_org_kbn" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D4420202011611A4 0183733 23030133625911102303 4963500230000020003110000000622000000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vハコダテチユウオウビヨウイン 00000408585ハコダテシホンチヨウ33バン2ゴウ " "202" "02" "011611A4 " "0183733 " "496350023" "110" "230301" "20230301" "3362591" "000002" "00031100" "0000062200" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ハコダテチユウオウビヨウイン " "0408585ハコダテシホンチヨウ33バン2ゴウ " "ハコダテシホンチヨウ33バン2ゴウ " "0408585" "" "000000" " " "20230222" "J" "VJSK_J_MERCK_2023022" "J2023022200000022" "29" "20230301" "202303142041" "110" "売上" "01161" "300000383" "函館支店" "" "200000016" "株式会社スズケン" "0183733 " "670234934576694289" "シャカイフクシホウジンハコダテコウセイイン ハコダテチュウオウビョウイン" "社会福祉法人函館厚生院 函館中央病院" "040-0011 北海道函館市本町33−2" "496350023" "アービタックス 注射剤 100mg 1VIAL" "2" "31100" "62200" "31438" "62876" "35309" "70618" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4420202011611A4 0183733 23030133625921102303 4963500230000080003110000002488000000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vハコダテチユウオウビヨウイン 00000408585ハコダテシホンチヨウ33バン2ゴウ " "202" "02" "011611A4 " "0183733 " "496350023" "110" "230301" "20230301" "3362592" "000008" "00031100" "0000248800" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ハコダテチユウオウビヨウイン " "0408585ハコダテシホンチヨウ33バン2ゴウ " "ハコダテシホンチヨウ33バン2ゴウ " "0408585" "" "000000" " " "20230222" "J" "VJSK_J_MERCK_2023022" "J2023022200000023" "30" "20230301" "202303142041" "110" "売上" "01161" "300000383" "函館支店" "" "200000016" "株式会社スズケン" "0183733 " "670234934576694289" "シャカイフクシホウジンハコダテコウセイイン ハコダテチュウオウビョウイン" "社会福祉法人函館厚生院 函館中央病院" "040-0011 北海道函館市本町33−2" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "31100" "248800" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4416101311101A8 5140013 23030173719811122303 4963500230000120002738100003285720000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7371981" "000012" "00027381" "0000328572" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000011" "16" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "12" "27381" "328572" "31438" "377256" "35309" "423708" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4416101311101A8 5140013 23030173720211122303 4963500230000080002738100002190480000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7372021" "000008" "00027381" "0000219048" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000012" "17" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "27381" "219048" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv new file mode 100644 index 00000000..5299839a --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"recvdata" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rev_hsdnYmd_wrk" "rev_hsdnYmd_srk" "rec_urag_no" "rec_amt" "rec_unit_price" "rec_price" "rec_comm_nm" "rec_nnskFcl_nm" "free_item" "rec_nnsk_fcl_addr" "rec_nnsk_fcl_post" "rec_nnsk_fcl_tel" "rec_bef_hsdn_ymd" "rec_bef_slip_no" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_kn" "fcl_nm" "fcl_addr_v" "comm_cd" "comm_nm" "nn_amt" "nn_unitPrice" "nn_price" "unit_price" "unit_amt" "drag_price" "drag_amt" "whsPos_err_kbn" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "bef_hsdn_ymd" "bef_slip_no" "slip_org_kbn" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"DAY2-101311101A8 5140013 23030173719811122303 4963500230000120002738100003285720000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7371981" "000012" "00027381" "0000328572" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000011" "16" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "12" "27381" "328572" "31438" "377256" "35309" "423708" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-101311101A8 5140013 23030173720211122303 4963500230000080002738100002190480000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7372021" "000008" "00027381" "0000219048" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000012" "17" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "27381" "219048" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-202041131A1 1409581 23030106357711102303 4963500230000070002966000002076200000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトウホクロウサイビヨウイン 00009818563センダイシアオバクダイノハラ " "202" "02" "041131A1 " "1409581 " "496350023" "110" "230301" "20230301" "0635771" "000007" "00029660" "0000207620" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トウホクロウサイビヨウイン " "9818563センダイシアオバクダイノハラ " "センダイシアオバクダイノハラ " "9818563" "" "000000" " " "20230227" "J" "VJSK_J_MERCK_2023022" "J2023022700000128" "135" "20230301" "202303142041" "110" "売上" "04113" "300000354" "北仙台支店" "" "200000016" "株式会社スズケン" "1409581 " "670232828063007745" "ドクリツギョウセイホウジンロウドウシャケンコウアンゼンキコウ トウホクロウサイビョウイン" "独立行政法人労働者健康安全機構 東北労災病院" "981-0911 宮城県仙台市青葉区台原4−3−21" "496350023" "アービタックス 注射剤 100mg 1VIAL" "7" "29660" "207620" "31438" "220066" "35309" "247163" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-202141234B1 2607506 23030109289511102303 4963500230000100003036600003036600000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vヨコスカキヨウサイビヨウイン 00002380011カナガワケンヨコスカシヨネガハマドオリ1-16 " "202" "02" "141234B1 " "2607506 " "496350023" "110" "230301" "20230301" "0928951" "000010" "00030366" "0000303660" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ヨコスカキヨウサイビヨウイン " "2380011カナガワケンヨコスカシヨネガハマドオリ1-16 " "カナガワケンヨコスカシヨネガハマドオリ1-16 " "2380011" "" "000000" " " "20230228" "J" "VJSK_J_MERCK_2023022" "J2023022800000094" "101" "20230301" "202303142041" "110" "売上" "14123" "300000274" "磯子支店" "" "200000016" "株式会社スズケン" "2607506 " "670236609488110605" "コッカコウムインキョウサイクミアイレンゴウカイ ヨコスカキョウサイビョウイン" "国家公務員共済組合連合会 横須賀共済病院" "238-0011 神奈川県横須賀市米が浜通1−16" "496350023" "アービタックス 注射剤 100mg 1VIAL" "10" "30366" "303660" "31438" "314380" "35309" "353090" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv new file mode 100644 index 00000000..dca02347 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "hco_vid__v_merge" "apply_dt" "merge_reason" +"100000001" "900000001" "20230509" "事由01" +"100000002" "900000002" "20230509" "事由02" +"100000003" "900000003" "20230509" "事由03" +"100000004" "900000004" "20230509" "事由04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv new file mode 100644 index 00000000..c99d9f76 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "hco_vid__v_merge" "apply_dt" "merge_reason" +"100000003" "900000003" "20230509" "DAY-2事由03" +"100000004" "900000004" "20230509" "DAY-2事由04" +"100000005" "900000005" "20230509" "DAY-2事由05" +"100000006" "900000006" "20230509" "DAY-2事由06" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index 1849e325..4aec424d 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -1,6 +1,7 @@ import csv +import decimal from datetime import datetime @@ -84,8 +85,18 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor else: actual_value = actual_row[actual_col_name] expect_value = expect_row[expect_col_name] + + # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする if isinstance(actual_value, (int)): expect_value = int(expect_value) if isinstance(actual_value, (float)): expect_value = float(expect_value) - assert actual_value == expect_value, f'{line_number}行目:{actual_col_name}が、期待値と一致しませんでした' + if isinstance(actual_value, (decimal.Decimal)): + expect_value = decimal.Decimal(expect_value) + # if type(actual_value) == datetime.date: + if type(actual_value).__name__ == "date": + if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD + expect_value = datetime.strptime(expect_value, '%Y%m%d').date() + + # 検証 + assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' From 4b21279d5f8b5f53cf2061f0bce125d07d794d74 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 23:48:52 +0900 Subject: [PATCH 037/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E3=83=9E=E3=82=B9?= =?UTF-8?q?=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/whs_mst_202304280000.tsv | 5 ++ .../testdata/whs_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index fc3cee21..62d6245c 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -371,3 +371,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_05_whs_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_WHS_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_WHS_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_whs_cd = o.v_whs_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv new file mode 100644 index 00000000..e9280e36 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_whs_cd" "sub_no" "nm" "kn_nm" "sht_nm" "zip_cd" "addr" "kn_addr" "tel_no" "v_hld_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"200000002" "0" "株式会社モロオ" "カナ01" "モロオ" "1110001" "住所01" "ジュウショ01" "00-0000-0001" "0" "20000101" "99991231" "20" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000005" "0" "岩渕薬品株式会社" "カナ02" "岩渕薬品" "1110002" "住所02" "ジュウショ02" "00-0000-0002" "0" "20000101" "99991231" "50" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000009" "0" "株式会社マルタケ" "カナ03" "マルタケ" "1110003" "住所03" "ジュウショ03" "00-0000-0003" "0" "20000101" "99991231" "90" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000010" "0" "株式会社ファイネス" "カナ04" "ファイネス" "1110004" "住所04" "ジュウショ04" "00-0000-0004" "0" "20000101" "99991231" "100" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv new file mode 100644 index 00000000..c16f188e --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_whs_cd" "sub_no" "nm" "kn_nm" "sht_nm" "zip_cd" "addr" "kn_addr" "tel_no" "v_hld_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"200000009" "0" "株式会社マルタケ" "" "マルタケ" "" "" "" "" "0" "20000101" "99991231" "90" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000010" "0" "株式会社ファイネス" "" "ファイネス" "" "" "" "" "0" "20000101" "99991231" "100" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000011" "0" "鍋林株式会社" "" "鍋林" "" "" "" "" "0" "20000101" "99991231" "110" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000012" "0" "岡野薬品株式会社" "" "岡野薬品" "" "" "" "" "0" "20000101" "99991231" "120" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" From fa0ff77851cba603fa753cde9754d6eeffaa566a Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 23:57:08 +0900 Subject: [PATCH 038/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E3=83=9B=E3=83=BC?= =?UTF-8?q?=E3=83=AB=E3=83=87=E3=82=A3=E3=83=B3=E3=82=B0=E3=82=B9=E3=83=9E?= =?UTF-8?q?=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/hld_mst_202304280000.tsv | 5 ++ .../testdata/hld_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 62d6245c..1c200856 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -434,3 +434,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_06_hld_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_HLD_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_HLD_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_HLD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "hld_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_HLD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "hld_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_hld_cd = o.v_hld_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv new file mode 100644 index 00000000..2c4a8c1f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_hld_cd" "sub_no" "nm" "kn_nm" "sht_nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "1" "卸ホールディングス名01-1" "オロシホールディングスメイ01-1" "卸名01-1" "20230101" "99991231" "1" "0" "23-05-09 12:00:01" "23-05-09 13:00:01" +"10001" "2" "卸ホールディングス名01-2" "オロシホールディングスメイ01-2" "卸名01-2" "20230102" "99991231" "1" "0" "23-05-09 12:00:02" "23-05-09 13:00:02" +"10001" "3" "卸ホールディングス名01-3" "オロシホールディングスメイ01-3" "卸名01-3" "20230103" "99991231" "1" "0" "23-05-09 12:00:03" "23-05-09 13:00:03" +"10001" "4" "卸ホールディングス名01-4" "オロシホールディングスメイ01-4" "卸名01-4" "20230104" "99991231" "1" "0" "23-05-09 12:00:04" "23-05-09 13:00:04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv new file mode 100644 index 00000000..9728bb5f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_hld_cd" "sub_no" "nm" "kn_nm" "sht_nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "3" "卸ホールディングス名01-3" "オロシホールディングスメイ01-3" "卸名01-3" "20230101" "99991231" "1" "0" "23-05-09 12:00:03" "23-05-09 13:00:03" +"10001" "4" "卸ホールディングス名01-4" "オロシホールディングスメイ01-4" "卸名01-4" "20230102" "99991231" "1" "0" "23-05-09 12:00:04" "23-05-09 13:00:04" +"10001" "5" "卸ホールディングス名01-5" "オロシホールディングスメイ01-5" "卸名01-5" "20230103" "99991231" "1" "0" "23-05-09 12:00:05" "23-05-09 13:00:05" +"10002" "1" "卸ホールディングス名02-1" "オロシホールディングスメイ02-1" "卸名01-1" "20230104" "99991231" "1" "0" "23-05-09 12:00:06" "23-05-09 13:00:06" From bf7f76e3478446de552f98d244f5b8bee4cebf9c Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Sat, 3 Jun 2023 01:00:47 +0900 Subject: [PATCH 039/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E6=96=BD=E8=A8=AD?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 67 +++++++++++++++++++ .../testdata/fcl_mst_202304280000.tsv | 5 ++ .../testdata/fcl_mst_202304290000.tsv | 5 ++ .../tests/testing_vjsk_utility.py | 17 +++-- 4 files changed, 88 insertions(+), 6 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 1c200856..73ad96de 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -497,3 +497,70 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_07_fcl_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_FCL_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_FCL_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_FCL_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "fcl_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns, force_cast_to_str_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns, force_cast_to_str_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_FCL_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "fcl_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns, force_cast_to_str_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_inst_cd = o.v_inst_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns, force_cast_to_str_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv new file mode 100644 index 00000000..9615f392 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"fcl_cd" "sub_no" "start_dt" "end_dt" "closed_dt" "nm" "kn_nm" "sht_nm" "sht_kn_nm" "mkr_cd" "jsk_proc_kbn" "fmt_addr" "fmt_kn_addr" "post_cd" "prft_cd" "prft_nm" "city_nm" "addr_line_1" "tel_no" "admin_kbn" "fcl_type" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229430760653825" "0" "20000101" "99991231" "" "駅前町歯科診療所" "エキマエチョウシカシンリョウジョ" "駅前町歯科診療所" "エキマエチョウシカシンリョウジョ" "" "0" "700-0023 岡山県岡山市北区駅前町1−6−20" "オカヤマケン オカヤマシキタク エキマエチョウ1-6-20" "700-0023" "33" "岡山県" "岡山市北区" "駅前町1−6−20" "0862236468" "33101" "30" "1" "" "" +"670229435466662922" "0" "20000101" "99991231" "" "医療法人社団仁卓会 ほりかわ歯科クリニック" "イリョウホウジンシャダンジンタクカイ ホリカワシカクリニック" "ほりかわ歯科クリニック (医社)" "ホリカワシカクリニック (イシャ)" "" "0" "675-0101 兵庫県加古川市平岡町新在家1573−1−4F" "ヒョウゴケン カコガワシ ヒラオカチョウシンザイケ1573-1-4F" "675-0101" "28" "兵庫県" "加古川市" "平岡町新在家1573−1−4F" "0794244617" "28210" "30" "1" "" "" +"670229435785430019" "0" "20000101" "99991231" "" "株式会社コミュニティメディカル なつめ薬局 千歳船橋店" "カブシキガイシャコミュニティメディカル ナツメヤッキョク チトセフナバシテン" "なつめ薬局 千歳船橋店 (株)" "ナツメヤッキョク チトセフナバシテン (カ)" "" "0" "156-0054 東京都世田谷区桜丘2−24−2" "トウキョウト セタガヤク サクラガオカ2-24-2" "156-0054" "13" "東京都" "世田谷区" "桜丘2−24−2" "0364136189" "13112" "20" "1" "" "" +"670229447437206529" "0" "20000101" "20230407" "2023-04-07" "ヒカリ薬局" "ヒカリヤッキョク" "ヒカリ薬局" "ヒカリヤッキョク" "" "0" "670-0955 兵庫県姫路市安田4−47−8−1F" "ヒョウゴケン ヒメジシ ヤスダ4-47-8-1F" "670-0955" "28" "兵庫県" "姫路市" "安田4−47−8−1F" "0792846396" "28201" "20" "1" "" "" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv new file mode 100644 index 00000000..b4c05930 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"fcl_cd" "sub_no" "start_dt" "end_dt" "closed_dt" "nm" "kn_nm" "sht_nm" "sht_kn_nm" "mkr_cd" "jsk_proc_kbn" "fmt_addr" "fmt_kn_addr" "post_cd" "prft_cd" "prft_nm" "city_nm" "addr_line_1" "tel_no" "admin_kbn" "fcl_type" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229435785430019" "0" "20000202" "99991231" "" "株式会社コミュニティメディカル なつめ薬局 千歳船橋店" "カブシキガイシャコミュニティメディカル ナツメヤッキョク チトセフナバシテン" "なつめ薬局 千歳船橋店 (株)" "ナツメヤッキョク チトセフナバシテン (カ)" "" "0" "156-0054 東京都世田谷区桜丘2−24−2" "トウキョウト セタガヤク サクラガオカ2-24-2" "156-0054" "13" "東京都" "世田谷区" "桜丘2−24−2" "0364136189" "13112" "20" "1" "" "" +"670229447437206529" "0" "20000202" "20230407" "2023-04-07" "ヒカリ薬局" "ヒカリヤッキョク" "ヒカリ薬局" "ヒカリヤッキョク" "" "0" "670-0955 兵庫県姫路市安田4−47−8−1F" "ヒョウゴケン ヒメジシ ヤスダ4-47-8-1F" "670-0955" "28" "兵庫県" "姫路市" "安田4−47−8−1F" "0792846396" "28201" "20" "1" "" "" +"670229463350395910" "0" "20000101" "99991231" "" "こうだ歯科" "コウダシカ" "こうだ歯科" "コウダシカ" "" "0" "770-0831 徳島県徳島市寺島本町西1−10" "トクシマケン トクシマシ テラシマホンチョウニシ1-10" "770-0831" "36" "徳島県" "徳島市" "寺島本町西1−10" "0886552625" "36201" "30" "1" "" "" +"670229489380246545" "0" "20020521" "99991231" "" "社会医療法人社団埼玉巨樹の会 狭山中央病院" "シャカイイリョウホウジンシャダンサイタマキョジュノカイ サヤマチュウオウビョウイン" "狭山中央病院 (社社)" "サヤマチュウオウビョウイン (シャシャ)" "" "0" "350-1306 埼玉県狭山市富士見2−19−35" "サイタマケン サヤマシ フジミ2-19-35" "350-1306" "11" "埼玉県" "狭山市" "富士見2−19−35" "0429597111" "11215" "10" "1" "" "" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index 4aec424d..a7982a05 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -62,7 +62,7 @@ def is_valid_date_format(date_str: str, date_format): return False -def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_name: list = None) -> None: +def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_names: list = None, force_cast_to_str_columns: list = []) -> None: """テーブル同士の取得結果突き合わせ Args: @@ -80,23 +80,28 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor # 1カラムずつ調査 for actual_col_name, expect_col_name in zip(actual_row, expect_row): # テストメソッド側で個別に確認するものはスキップさせる - if ignore_col_name is not None and actual_col_name in ignore_col_name: + if ignore_col_names is not None and actual_col_name in ignore_col_names: continue else: actual_value = actual_row[actual_col_name] expect_value = expect_row[expect_col_name] # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする - if isinstance(actual_value, (int)): + if actual_col_name in force_cast_to_str_columns: + if type(expect_value).__name__ == 'date': + expect_value = expect_value.strftime('%Y-%m-%d') + elif isinstance(actual_value, (int)): expect_value = int(expect_value) - if isinstance(actual_value, (float)): + elif isinstance(actual_value, (float)): expect_value = float(expect_value) - if isinstance(actual_value, (decimal.Decimal)): + elif isinstance(actual_value, (decimal.Decimal)): expect_value = decimal.Decimal(expect_value) # if type(actual_value) == datetime.date: - if type(actual_value).__name__ == "date": + elif type(actual_value).__name__ == "date": if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD expect_value = datetime.strptime(expect_value, '%Y%m%d').date() + elif actual_value is None and expect_value == "": + expect_value = None # 検証 assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' From 0b2c6fe1512cc451a1fb68012e19e17163ab8a48 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Sat, 3 Jun 2023 01:51:55 +0900 Subject: [PATCH 040/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E3=83=A1=E3=83=BC?= =?UTF-8?q?=E3=82=AB=E3=83=BC=E5=8D=B8=E7=B5=84=E7=B9=94=E5=B1=95=E9=96=8B?= =?UTF-8?q?=E8=A1=A8=E3=80=80=E2=80=BB=E6=A4=9C=E8=A8=BC=E4=B8=AD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/mkr_org_horizon_202304280000.tsv | 5 ++ .../testdata/mkr_org_horizon_202304290000.tsv | 5 ++ .../tests/testing_vjsk_utility.py | 2 +- 4 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 73ad96de..fcdd5682 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -564,3 +564,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_08_mkr_org_horizon_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_MKR_ORG_HORIZON) + table_name_src = mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MKR_ORG_HORIZON, + "src_file_path": path.join(self.test_file_path_load_individual, "mkr_org_horizon_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MKR_ORG_HORIZON, + "src_file_path": path.join(self.test_file_path_load_individual, "mkr_org_horizon_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_cd_1 = o.v_cd_1 and s.v_cd_2 = o.v_cd_2)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv new file mode 100644 index 00000000..3755e224 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv @@ -0,0 +1,5 @@ +"vid_kind_1" "v_cd_1" "nm_1" "dsp_odr_1" "vid_kind_2" "v_cd_2" "nm_2" "dsp_odr_2" "vid_kind_3" "v_cd_3" "nm_3" "dsp_odr_3" "vid_kind_4" "v_cd_4" "nm_4" "dsp_odr_4" "vid_kind_5" "v_cd_5" "nm_5" "dsp_odr_5" "vid_kind_6" "v_cd_6" "nm_6" "dsp_odr_6" "vid_kind_7" "v_cd_7" "nm_7" "dsp_odr_7" "vid_kind_8" "v_cd_8" "nm_8" "dsp_odr_8" "vid_kind_9" "v_cd_9" "nm_9" "dsp_odr_9" "vid_kind_10" "v_cd_10" "nm_10" "dsp_odr_10" "v_whs_cd" "start_dt" "end_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"3" "300003202" "その他営業本部卸" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003138" "北関東甲信越営業本部" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003202" "その他営業本部卸" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"a" "300003144" "メディカル営業本部1" "1" "b" "300003202" "東海スタッフ医療2" "2" "c" "300003203" "東海スタッフ医療3" "3" "d" "300003204" "東海スタッフ医療4" "4" "e" "300003205" "東海スタッフ医療5" "5" "f" "300003206" "東海スタッフ医療6" "6" "g" "300003207" "東海スタッフ医療7" "7" "h" "300003208" "東海スタッフ医療8" "8" "i" "300003209" "東海スタッフ医療9" "9" "j" "300003210" "東海スタッフ医療10" "10" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv new file mode 100644 index 00000000..71f9d85b --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv @@ -0,0 +1,5 @@ +"vid_kind_1" "v_cd_1" "nm_1" "dsp_odr_1" "vid_kind_2" "v_cd_2" "nm_2" "dsp_odr_2" "vid_kind_3" "v_cd_3" "nm_3" "dsp_odr_3" "vid_kind_4" "v_cd_4" "nm_4" "dsp_odr_4" "vid_kind_5" "v_cd_5" "nm_5" "dsp_odr_5" "vid_kind_6" "v_cd_6" "nm_6" "dsp_odr_6" "vid_kind_7" "v_cd_7" "nm_7" "dsp_odr_7" "vid_kind_8" "v_cd_8" "nm_8" "dsp_odr_8" "vid_kind_9" "v_cd_9" "nm_9" "dsp_odr_9" "vid_kind_10" "v_cd_10" "nm_10" "dsp_odr_10" "v_whs_cd" "start_dt" "end_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"3" "300003202" "その他営業本部卸" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003144" "メディカル営業本部" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003202" "その他営業本部卸" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003143" "医薬営業統括本部" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "200000007" "20190401" "20190930" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index a7982a05..15813e5a 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -71,7 +71,7 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor ignore_col_name (list): 比較を無視するDBのカラム名. Default None. """ # 取得件数が一致すること - assert len(actual_rows) == len(expect_rows) + assert len(actual_rows) == len(expect_rows), f'レコード件数が一致しません。DBレコード数 : {len(actual_rows)} 期待値 : {len(expect_rows)}' line_number = 0 # 1行ずつ調査 From 993c0497e5f786605404e818da0bf6ef1b2faecb Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 15:28:25 +0900 Subject: [PATCH 041/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E3=83=A1=E3=83=BC?= =?UTF-8?q?=E3=82=AB=E3=83=BC=E5=8D=B8=E7=B5=84=E7=B9=94=E5=B1=95=E9=96=8B?= =?UTF-8?q?=E8=A1=A8=E3=80=80=E2=80=BBPK=E9=A0=85=E7=9B=AE=E3=81=8C?= =?UTF-8?q?=E3=81=AA=E3=81=84=E3=83=86=E3=83=BC=E3=83=96=E3=83=AB=E3=81=AF?= =?UTF-8?q?INSERT=E3=82=92=E6=9C=9F=E5=BE=85=E5=80=A4=E3=81=A8=E3=81=99?= =?UTF-8?q?=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index fcdd5682..ef3c973a 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1,3 +1,4 @@ +import time from os import path import pytest @@ -597,7 +598,12 @@ class TestImportFileToDb: # srcテーブル結果が期待値通りかを突合 assert_table_results(result_src, assert_list, ignore_columns) - # assertion2 (update 2row +insert 2row) + # assertion2 (update 0row +insert 4row) ※PK項目がないテーブルなのですべてinsertになる + + assetion1_done_dt = self.db.execute_select("select SYSDATE()")[0]["SYSDATE()"] + + # assertion2でinsertされたレコードをdwh_upd_dtで判断するため、assertion1からの実行間隔を明確に空けるためにスリープを挟む + time.sleep(3) # 処理実行 target_dict = { @@ -616,14 +622,14 @@ class TestImportFileToDb: assert_table_results(result_org, assert_list, ignore_columns) # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 result_src = self.db.execute_select( - f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_cd_1 = o.v_cd_1 and s.v_cd_2 = o.v_cd_2)") + f"select * from {table_name_src} where dwh_upd_dt > :dt_value", {"dt_value": assetion1_done_dt}) # 突合から除外する項目 ignore_columns = ['dwh_upd_dt'] # srcテーブル結果が期待値通りかを突合 assert_table_results(result_src, assert_list, ignore_columns) - # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + # srcテーブル結果のレコード件数 (insert 4row + update 0row + insert 4row = 8row) ※PK項目がないテーブルなのですべてinsertになる result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") - assert result_src_count[0]['count(*)'] == 6 + assert result_src_count[0]['count(*)'] == 8 # teardown From 8e186cc3f310035630e99f597df719d643258446 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 16:04:59 +0900 Subject: [PATCH 042/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E5=8F=96=E5=BC=95?= =?UTF-8?q?=E5=8C=BA=E5=88=86=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/tran_kbn_mst_202304280000.tsv | 5 ++ .../testdata/tran_kbn_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index ef3c973a..d530f3ce 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -633,3 +633,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 8 # teardown + + def test_load_08_tran_kbn_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_TRAN_KBN_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_TRAN_KBN_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "tran_kbn_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_TRAN_KBN_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "tran_kbn_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_tran_cd = o.v_tran_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv new file mode 100644 index 00000000..cb5d5ff8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_tran_cd" "sub_no" "nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "1" "Veeva取引区分名01-1" "20230101" "99991231" "1" "0" "2023-05-09 12:00:01" "2023-05-09 13:00:01" +"10001" "2" "Veeva取引区分名01-2" "20230102" "99991231" "1" "0" "2023-05-09 12:00:02" "2023-05-09 13:00:02" +"10001" "3" "Veeva取引区分名01-3" "20230103" "99991231" "1" "0" "2023-05-09 12:00:03" "2023-05-09 13:00:03" +"10001" "4" "Veeva取引区分名01-4" "20230104" "99991231" "1" "0" "2023-05-09 12:00:04" "2023-05-09 13:00:04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv new file mode 100644 index 00000000..0813b3df --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_tran_cd" "sub_no" "nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "3" "Veeva取引区分名01-3" "20230201" "20301231" "1" "0" "2023-05-09 12:00:03" "2023-05-09 13:00:03" +"10001" "4" "Veeva取引区分名01-4" "20230202" "20301231" "1" "0" "2023-05-09 12:00:04" "2023-05-09 13:00:04" +"10001" "5" "Veeva取引区分名01-5" "20230203" "20301231" "1" "0" "2023-05-09 12:00:05" "2023-05-09 13:00:05" +"10002" "1" "Veeva取引区分名02-1" "20230204" "20301231" "1" "0" "2023-05-09 12:00:06" "2023-05-09 13:00:06" From 800f3eba45a1c6d531e8c63e1db3aad2e7f51a9a Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 16:50:32 +0900 Subject: [PATCH 043/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E8=A3=BD=E5=93=81?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 65 ++++++++++++++++++- .../testdata/phm_prd_mst_202304280000.tsv | 5 ++ .../testdata/phm_prd_mst_202304290000.tsv | 5 ++ .../tests/testing_vjsk_utility.py | 13 +++- 4 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index d530f3ce..47601d58 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -634,7 +634,7 @@ class TestImportFileToDb: # teardown - def test_load_08_tran_kbn_mst_ok(self, mapper): + def test_load_09_tran_kbn_mst_ok(self, mapper): table_name_org = mapper.get_org_table(mapper.CONDKEY_TRAN_KBN_MST) table_name_src = mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST) @@ -696,3 +696,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_10_phm_prd_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_PHM_PRD_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_prd_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_prd_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.prd_cd = o.prd_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv new file mode 100644 index 00000000..9c4d4d86 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"prd_cd" "sub_no" "prd_nm" "prd_e_nm" "mkr_cd" "mkr_inf_1" "mkr_inf_2" "phm_itm_cd" "itm_nm" "itm_sht_nm" "form_cd" "form_nm" "vol_cd" "vol_nm" "cont_cd" "cont_nm" "pkg_cd" "pkg_nm" "cnv_num" "jsk_start_dt" "prd_sale_kbn" "jsk_proc_kbn" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"496201110" "0" "セロフェン 錠剤 50mg 30TAB" "" "F21206A0" "セロフェン 錠 50MG" "SEROPHENE TAB. 50 MG. (30)" "001" "セロフェン" "SP" "F003" "錠剤" "0000" "" "V009" "50mg" "P007" "30TAB" "30" "" "0" "0" "20080101" "20190930" "140" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" +"496201127" "0" "セロフェン 錠剤 50mg 30TAB" "" "F21206A0" "セロフェン 錠 50MG" "SEROPHENE TAB. 50 MG. (30)" "001" "セロフェン" "SP" "F003" "錠剤" "0000" "" "V009" "50mg" "P007" "30TAB" "30" "" "0" "0" "20070401" "20190930" "150" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" +"496300110" "2" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "F1990608" "ゴナールエフ皮下注ペン 300" "GONAL-F PEN 300IU (1) - JPN" "005" "セロスティム" "ST" "F005" "皮下注ペン" "0000" "" "V017" "300IU" "P011" "1PEN" "1" "" "0" "0" "20190501" "20190930" "100" "9" "19-04-23 16:35:36" "19-04-23 16:40:38" +"496300127" "2" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "F19D0608" "ゴナールエフ皮下注ペン450" "Gonalef Pen 450 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V018" "450IU" "P011" "1PEN" "1" "" "0" "0" "20190501" "20190930" "120" "9" "19-04-23 16:37:10" "19-04-23 16:40:54" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv new file mode 100644 index 00000000..29824385 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"prd_cd" "sub_no" "prd_nm" "prd_e_nm" "mkr_cd" "mkr_inf_1" "mkr_inf_2" "phm_itm_cd" "itm_nm" "itm_sht_nm" "form_cd" "form_nm" "vol_cd" "vol_nm" "cont_cd" "cont_nm" "pkg_cd" "pkg_nm" "cnv_num" "jsk_start_dt" "prd_sale_kbn" "jsk_proc_kbn" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"496300110" "2" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "F1990608" "ゴナールエフ皮下注ペン 300" "GONAL-F PEN 300IU (1) - JPN" "005" "セロスティム" "ST" "F005" "皮下注ペン" "0000" "" "V017" "300IU" "P011" "1PEN" "1" "20230601" "0" "0" "20190501" "20190930" "100" "9" "19-04-23 16:35:36" "19-04-23 16:40:38" +"496300127" "2" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "F19D0608" "ゴナールエフ皮下注ペン450" "Gonalef Pen 450 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V018" "450IU" "P011" "1PEN" "1" "20230602" "0" "0" "20190501" "20190930" "120" "9" "19-04-23 16:37:10" "19-04-23 16:40:54" +"496300134" "2" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "F19B0608" "ゴナールエフ皮下注ペン900" "Gonalef Pen 900 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V019" "900IU" "P011" "1PEN" "1" "20230603" "0" "0" "20190501" "20190930" "110" "9" "19-04-23 16:38:36" "19-04-23 16:41:05" +"496301315" "0" "プロファシー 注射剤 5000IU 10VIAL" "" "F47706A0" "プロファシー 注 5000IU" "PROFASI M AMP. 5000 IU (10)" "002" "プロファシー" "PF" "F002" "注射剤" "0000" "" "V020" "5000IU" "P005" "10VIAL" "10" "20230604" "0" "0" "20070401" "20190930" "160" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index 15813e5a..441c5c9e 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -88,20 +88,31 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする if actual_col_name in force_cast_to_str_columns: + # DB項目(varchar)に日付型としてキャスト可能な値が期待値である場合、force_cast_to_str_columnsに基づいて強制的に文字列キャストする if type(expect_value).__name__ == 'date': expect_value = expect_value.strftime('%Y-%m-%d') elif isinstance(actual_value, (int)): + # DB項目(int)の場合、期待値もintにキャストする expect_value = int(expect_value) elif isinstance(actual_value, (float)): + # DB項目(float)の場合、期待値もfloatにキャストする expect_value = float(expect_value) elif isinstance(actual_value, (decimal.Decimal)): + # DB項目(decimal)の場合、期待値もdecimalにキャストする expect_value = decimal.Decimal(expect_value) - # if type(actual_value) == datetime.date: elif type(actual_value).__name__ == "date": + # DB項目(date)の場合、期待値("YYYYMMDD")もdateにキャストする if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD expect_value = datetime.strptime(expect_value, '%Y%m%d').date() elif actual_value is None and expect_value == "": + # DB項目値がNULLの場合、期待値が""であればNoneに置換する expect_value = None + elif actual_value == "0000-00-00" and expect_value == "": + # DB項目(date)がゼロ日付(NULL代替値)の場合、期待値が""であれば"0000-00-00"に置換する + expect_value = "0000-00-00" + elif actual_value == "0000-00-00 00:00:00" and expect_value == "": + # DB項目(datetime)がゼロ日付(NULL代替値)の場合、期待値が""であれば"0000-00-00 00:00:00"に置換する + expect_value = "0000-00-00 00:00:00" # 検証 assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' From 5ac63d6f4f34e30d5af701a79e9e039ff47c5c15 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 17:01:17 +0900 Subject: [PATCH 044/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E8=A3=BD=E5=93=81?= =?UTF-8?q?=E4=BE=A1=E6=A0=BC=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/phm_price_mst_202304280000.tsv | 5 ++ .../testdata/phm_price_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 47601d58..2fb187ae 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -759,3 +759,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_11_phm_price_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_PHM_PRICE_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.phm_prd_cd = o.phm_prd_cd and s.phm_price_kind = o.phm_price_kind and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv new file mode 100644 index 00000000..b47ae0e6 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"114430502" "01" "2" "10060.2" "20180401" "20190930" "10" "0" "18-03-07 09:33:37" "19-09-19 11:23:47" +"114430502" "01" "3" "10237.2" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" +"114430502" "03" "2" "100602" "20180401" "20190930" "30" "0" "18-03-07 09:39:48" "19-09-19 11:23:47" +"114430502" "03" "3" "102372" "20191001" "99991231" "30" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv new file mode 100644 index 00000000..c5331854 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"114430502" "03" "2" "100602" "20180401" "20190930" "30" "0" "18-03-07 09:39:48" "19-09-19 11:23:47" +"114430502" "03" "3" "102372" "20191001" "99991231" "30" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" +"114430601" "01" "2" "12362.4" "20180401" "20190930" "10" "0" "18-03-07 09:48:00" "19-09-19 11:23:47" +"114430601" "01" "3" "12587.8" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" From c33940eee1e4658af2283ebe9685a62c07a24814 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 19:48:07 +0900 Subject: [PATCH 045/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E5=8D=B8=E5=BE=97?= =?UTF-8?q?=E6=84=8F=E5=85=88=E6=83=85=E5=A0=B1=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../whs_customer_mst_202304280000.tsv | 5 ++ .../whs_customer_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 2fb187ae..ca63e86d 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -822,3 +822,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_12_whs_customer_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_WHS_CUSTOMER_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_CUSTOMER_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_customer_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_CUSTOMER_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_customer_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.whs_cd = o.whs_cd and s.whs_sub_cd = o.whs_sub_cd and s.customer_cd = o.customer_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv new file mode 100644 index 00000000..0c23e674 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "customer_cd" "sub_no" "start_dt" "end_dt" "org_cd" "src_org_cd" "nm" "kn_nm" "addr" "kn_addr" "zip_cd" "tel_no" "rec_sts_kbn" "ins_dt" "upd_dt" +"006" "01" "1002900000" "0" "20000101" "99991231" "11" "1131A2283316" "辻内科小児科医院               " "ツジナイカシヨウニカ イイン" "長崎県 佐世保市皆瀬町29                   " "ナガサキケン サセボシカイゼチヨウ 29" "8570144" "0956492319" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1005400000" "0" "20000101" "99991231" "12" "1211C3415515" "医療法人 愛恵会 佐世保愛恵病院       " "イリヨウホウジンアイケイカイサセボアイケイビ" "長崎県 佐世保市瀬戸越4丁目 2−15             " "ナガサキケン サセボシセトゴシ 4チヨウメ 2-15" "8570134" "0956493335" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007200000" "0" "20000101" "99991231" "11" "1131A2407312" "医療法人 山祇診療所             " "イリヨウホウジンヤマズミシンリヨウジヨ" "長崎県 佐世保市山祇町 19−36               " "ナガサキケン サセボシヤマズミチョウ" "8570822" "0956313633" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007800000" "0" "20000101" "99991231" "11" "1121A2402213" "医療法人道仁会 品川医院           " "イリヨウホウジンドウジンカイ シナガワイイン" "長崎県 佐世保市柚木町2188                 " "ナガサキケン サセボシユノキチヨウ 2188" "8570112" "0956460005" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv new file mode 100644 index 00000000..e5c22a27 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "customer_cd" "sub_no" "start_dt" "end_dt" "org_cd" "src_org_cd" "nm" "kn_nm" "addr" "kn_addr" "zip_cd" "tel_no" "rec_sts_kbn" "ins_dt" "upd_dt" +"006" "01" "1007200000" "0" "20000101" "99991231" "11" "1131A2407312" "医療法人 山祇診療所2            " "イリヨウホウジンヤマズミシンリヨウジヨ" "長崎県 佐世保市山祇町 19−36               " "ナガサキケン サセボシヤマズミチョウ" "8570822" "0956313633" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007800000" "0" "20000101" "99991231" "11" "1121A2402213" "医療法人道仁会 品川医院2          " "イリヨウホウジンドウジンカイ シナガワイイン" "長崎県 佐世保市柚木町2188                 " "ナガサキケン サセボシユノキチヨウ 2188" "8570112" "0956460005" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1008000000" "0" "20000101" "99991231" "11" "1131A2283316" "北原整形外科医院2              " "キタハラセイケイゲカ イイン" "長崎県 佐世保市瀬戸越町4丁目1298−1           " "ナガサキケン サセボシセトゴシチヨウ 1298-1" "8570135" "0956497773" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1009100000" "0" "20000101" "99991231" "11" "1121A2224212" "山口医院2                  " "ヤマグチイイン" "長崎県 佐世保市春日町29−14                " "ナガサキケン サセボシカスガチヨウ29-14" "8570011" "0956228610" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" From 1cbe72fbb03aac0d99004dfcc9eedabfe0de7ee3 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 22:50:20 +0900 Subject: [PATCH 046/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80MDB=E3=82=B3=E3=83=BC?= =?UTF-8?q?=E3=83=89=E5=A4=89=E6=8F=9B=E8=A1=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/mdb_conv_mst_202304280000.tsv | 5 ++ .../testdata/mdb_conv_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index ca63e86d..7d06b39c 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -885,3 +885,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_13_mdb_conv_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_MDB_CONV_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MDB_CONV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "mdb_conv_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MDB_CONV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "mdb_conv_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.hco_vid_v = o.hco_vid_v and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv new file mode 100644 index 00000000..f27cf3ce --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "sub_no" "mdb_cd" "reliability" "start_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229780011959315" "1" "003410424" "0" "20020601" "0" "22-03-09 13:56:19" "22-03-09 13:56:19" +"670230081112654862" "0" "004101420" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:17:34" +"670230081112654862" "1" "004104997" "2" "20000101" "0" "22-03-09 14:17:33" "22-03-09 14:17:33" +"670230100414841865" "0" "003622111" "3" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:13:49" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv new file mode 100644 index 00000000..fe0ab79c --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "sub_no" "mdb_cd" "reliability" "start_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"670230081112654862" "1" "004104997" "0" "20000101" "0" "22-03-09 14:17:33" "22-03-09 14:17:33" +"670230100414841865" "0" "003622111" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:13:49" +"670230100414841865" "1" "003636480" "0" "20000101" "0" "22-03-09 14:13:49" "22-03-09 14:13:49" +"670230330673742853" "0" "004804003" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 15:20:35" From 0849ae8365be9de8706bd91774fe45778d61e5ac Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 6 Jun 2023 09:52:54 +0900 Subject: [PATCH 047/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E7=94=9F=E7=89=A9=E7=94=B1?= =?UTF-8?q?=E6=9D=A5=E3=83=87=E3=83=BC=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/bio_slip_data_202304280000.tsv | 5 ++ .../testdata/bio_slip_data_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 7d06b39c..31717661 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -948,3 +948,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_14_bio_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_BIO_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_BIO_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "bio_slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_BIO_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "bio_slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_org_kbn = o.slip_org_kbn)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv new file mode 100644 index 00000000..0c345954 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rec_hsdnYmd_wrk" "rec_hsdnYmd_srk" "rec_urag_no" "rec_comm_nm" "rec_nnskFcl_nm" "rec_nnsk_fcl_addr" "rec_lot_num1" "rec_amt1" "rec_lot_num2" "rec_amt2" "rec_lot_num3" "rec_amt3" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_nm" "fcl_kn" "fcl_addr_v" "comm_cd" "comm_nm" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "amt1" "amt2" "amt3" "slip_org_kbn" "bef_slip_mgt_no" "whs_rep_comm_nm" "whs_rep_nnskFcl_nm" "whs_rep_nnsk_fcl_addr" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D452960211JD1111311102503851400002304016427519111 496350122バベンチオテンテキ200MG 1V ソウゴウメデイカルニホンコウカンビツクバシ タカサキ 753 BAVB007 000003 000000 000000 " "296" "02" "11JD11113111025" "0385140000" "496350122" "111" "230401" "20230401" "6427519" "バベンチオテンテキ200MG 1V " "ソウゴウメデイカルニホンコウカンビ" "ツクバシ タカサキ 753 " "BAVB007 " "000003" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000126" "129" "20230401" "202305082041" "110" "売上" "11JD" "300001370" "川崎南支店" "" "200000007" "アルフレッサ株式会社" "0385140000" "670235967013012526" "医療法人社団こうかん会 日本鋼管病院" "イリョウホウジンシャダンコウカンカイ ニホンコウカンビョウイン" "210-0852 神奈川県川崎市川崎区鋼管通1−2−1" "496350122" "バベンチオ 注射剤 200mg 1VIAL" "" "1" "" "3" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211G11111377452402930640002304016433215111 496300127ゴナ-ルエフヒカチユウペン450 1トウ セコム)オギクボビヨウイン トウキヨウト シブヤク ジングウマエ 1-5-1 GF4C001 000002 000000 000000 " "296" "02" "11G111113774524" "0293064000" "496300127" "111" "230401" "20230401" "6433215" "ゴナ-ルエフヒカチユウペン450 1トウ " "セコム)オギクボビヨウイン " "トウキヨウト シブヤク ジングウマエ 1-5-1 " "GF4C001 " "000002" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000127" "130" "20230401" "202305082041" "110" "売上" "11G1" "300001351" "杉並・中野支店" "" "200000007" "アルフレッサ株式会社" "0293064000" "670234652241314835" "医療法人財団荻窪病院 荻窪病院" "イリョウホウジンザイダンオギクボビョウイン オギクボビョウイン" "167-0035 東京都杉並区今川3−1−24" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "2" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211V11121120604204799500002304016461276111 496300134ゴナ-ルエフヒカチユウペン900 1トウ ニチイサ-ビスキユウシユウフクオカサンフクオカシ サワラク モモチハマ 1-7-5 7F GF9C002 000010 000000 000000 " "296" "02" "11V111211206042" "0479950000" "496300134" "111" "230401" "20230401" "6461276" "ゴナ-ルエフヒカチユウペン900 1トウ " "ニチイサ-ビスキユウシユウフクオカサン" "フクオカシ サワラク モモチハマ 1-7-5 7F " "GF9C002 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000128" "131" "20230401" "202305082041" "110" "売上" "11V1" "300001491" "福岡第一支店" "" "200000007" "アルフレッサ株式会社" "0479950000" "670235883412145206" "医療法人社団高邦会 福岡山王病院" "イリョウホウジンシャダンコウホウカイ フクオカサンノウビョウイン" "814-0001 福岡県福岡市早良区百道浜3−6−45" "496300134" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211JB1121309300202875030002304016523689111 496300127ゴナ-ルエフヒカチユウペン450 1トウ イツカンドウヤツキヨク0561 カワサキシ アサオク フルサワ 172-1 GF4C001 000001 000000 000000 " "296" "02" "11JB11213093002" "0287503000" "496300127" "111" "230401" "20230401" "6523689" "ゴナ-ルエフヒカチユウペン450 1トウ " "イツカンドウヤツキヨク0561 " "カワサキシ アサオク フルサワ 172-1 " "GF4C001 " "000001" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000129" "132" "20230401" "202305082041" "110" "売上" "11JB" "300001369" "川崎北支店" "" "200000007" "アルフレッサ株式会社" "0287503000" "670237078008644636" "株式会社キリン堂 一貫堂薬局" "カブシキガイシャキリンドウ イッカンドウヤッキョク" "215-0026 神奈川県川崎市麻生区古沢172−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "1" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv new file mode 100644 index 00000000..e7f85ba8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rec_hsdnYmd_wrk" "rec_hsdnYmd_srk" "rec_urag_no" "rec_comm_nm" "rec_nnskFcl_nm" "rec_nnsk_fcl_addr" "rec_lot_num1" "rec_amt1" "rec_lot_num2" "rec_amt2" "rec_lot_num3" "rec_amt3" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_nm" "fcl_kn" "fcl_addr_v" "comm_cd" "comm_nm" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "amt1" "amt2" "amt3" "slip_org_kbn" "bef_slip_mgt_no" "whs_rep_comm_nm" "whs_rep_nnskFcl_nm" "whs_rep_nnsk_fcl_addr" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D452960211V11121120604204799500002304016461276111 496300134ゴナ-ルエフヒカチユウペン900 1トウ ニチイサ-ビスキユウシユウフクオカサンフクオカシ サワラク モモチハマ 1-7-5 7F GF9C002 000010 000000 000000 " "296" "02" "11V111211206042" "0479950000" "496300134" "111" "230401" "20230401" "6461276" "ゴナ-ルエフヒカチユウペン900 1トウ " "ニチイサ-ビスキユウシユウフクオカサン" "フクオカシ サワラク モモチハマ 1-7-5 7F " "GF9C002 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000128" "131" "20230401" "202305082041" "110" "売上" "11V1" "300001491" "福岡第一支店" "" "200000007" "アルフレッサ株式会社" "0479950000" "670235883412145206" "医療法人社団高邦会 福岡山王病院" "イリョウホウジンシャダンコウホウカイ フクオカサンノウビョウイン" "814-0001 福岡県福岡市早良区百道浜3−6−45" "496300134" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211JB1121309300202875030002304016523689111 496300127ゴナ-ルエフヒカチユウペン450 1トウ イツカンドウヤツキヨク0561 カワサキシ アサオク フルサワ 172-1 GF4C001 000001 000000 000000 " "296" "02" "11JB11213093002" "0287503000" "496300127" "111" "230401" "20230401" "6523689" "ゴナ-ルエフヒカチユウペン450 1トウ " "イツカンドウヤツキヨク0561 " "カワサキシ アサオク フルサワ 172-1 " "GF4C001 " "000001" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000129" "132" "20230401" "202305082041" "110" "売上" "11JB" "300001369" "川崎北支店" "" "200000007" "アルフレッサ株式会社" "0287503000" "670237078008644636" "株式会社キリン堂 一貫堂薬局" "カブシキガイシャキリンドウ イッカンドウヤッキョク" "215-0026 神奈川県川崎市麻生区古沢172−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "1" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D4529602K1201130032696508657140002304016527757111 496300110ゴナ-ルエフヒカチユウペン300 1トウ モリノクスリヤヤツキヨク オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 GF3B013 000005 000000 000000 " "296" "02" "K12011300326965" "0865714000" "496300110" "111" "230401" "20230401" "6527757" "ゴナ-ルエフヒカチユウペン300 1トウ " "モリノクスリヤヤツキヨク " "オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 " "GF3B013 " "000005" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000130" "133" "20230401" "202305082041" "110" "売上" "K120" "300006583" "沖縄第二営業部" "" "200000007" "アルフレッサ株式会社" "0865714000" "670232348519842842" "有限会社吾妻サンライズ 森の薬屋薬局" "ユウゲンガイシャアガツマサンライズ モリノクスリヤヤッキョク" "901-0406 沖縄県島尻郡八重瀬町屋宜原238−1" "496300110" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "1" "" "5" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D4529602K1201130032696508657140002304016527757111 496300127ゴナ-ルエフヒカチユウペン450 1トウ モリノクスリヤヤツキヨク オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 GF4B011 000010 000000 000000 " "296" "02" "K12011300326965" "0865714000" "496300127" "111" "230401" "20230401" "6527757" "ゴナ-ルエフヒカチユウペン450 1トウ " "モリノクスリヤヤツキヨク " "オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 " "GF4B011 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000131" "134" "20230401" "202305082041" "110" "売上" "K120" "300006583" "沖縄第二営業部" "" "200000007" "アルフレッサ株式会社" "0865714000" "670232348519842842" "有限会社吾妻サンライズ 森の薬屋薬局" "ユウゲンガイシャアガツマサンライズ モリノクスリヤヤッキョク" "901-0406 沖縄県島尻郡八重瀬町屋宜原238−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" From 35b56d0de331af4d3eab3f26c1b9472d07776df9 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 6 Jun 2023 10:08:55 +0900 Subject: [PATCH 048/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E3=83=AD=E3=83=83=E3=83=88?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=83=87=E3=83=BC=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 65 ++++++++++++++++++- .../testdata/lot_num_mst_202304280000.tsv | 5 ++ .../testdata/lot_num_mst_202304290000.tsv | 5 ++ 3 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 31717661..5f373fe0 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1000,7 +1000,70 @@ class TestImportFileToDb: assert_table_results(result_org, assert_list, ignore_columns) # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 result_src = self.db.execute_select( - f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_org_kbn = o.slip_org_kbn)") + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_15_lot_num_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_LOT_NUM_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_LOT_NUM_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "lot_num_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_LOT_NUM_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "lot_num_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.ser_num = o.ser_num and s.lot_num = o.lot_num)") # 突合から除外する項目 ignore_columns = ['dwh_upd_dt'] # srcテーブル結果が期待値通りかを突合 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv new file mode 100644 index 00000000..3acfb7cc --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"ser_no" "lot_num" "expr_dt" "frst_mov_dt" "ins_dt" "ins_usr" +"F0110601" "BAVA001" "20230331" "20210510" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA002" "20230331" "20210615" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA003" "20231031" "20210719" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA004" "20231031" "20210823" "23-05-08 20:40:41" "batch" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv new file mode 100644 index 00000000..c7e86b9a --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"ser_no" "lot_num" "expr_dt" "frst_mov_dt" "ins_dt" "ins_usr" +"F0110601" "BAVA003" "20231031" "20210719" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA004" "20231031" "20210823" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA005" "20231031" "20210927" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA006" "20240131" "20211025" "23-05-08 20:40:41" "batch" From 607784bc50a8597647e145b74812d175f578460d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 6 Jun 2023 17:55:47 +0900 Subject: [PATCH 049/103] =?UTF-8?q?feat:=20=E5=BE=93=E6=A5=AD=E5=93=A1?= =?UTF-8?q?=E6=8B=85=E5=BD=93=E6=96=BD=E8=A8=AD=E3=83=9E=E3=82=B9=E3=82=BF?= =?UTF-8?q?=E5=87=A6=E7=90=86=E3=82=92=E7=8F=BE=E8=A1=8CMINE=E3=81=AE?= =?UTF-8?q?=E5=87=A6=E7=90=86=E3=81=AB=E3=83=AD=E3=83=BC=E3=83=AB=E3=83=90?= =?UTF-8?q?=E3=83=83=E3=82=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index b4b6e4e7..e325d7a4 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -115,15 +115,14 @@ def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), emp_chg_inst_row) - # 適用終了日 ≧ 適用開始日の場合 - if _str_to_date_time(emp_chg_inst_row['end_date']) >= start_date: + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 + if start_date < tekiyo_month_first_day: # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する last_end_date = tekiyo_month_first_day - timedelta(days=1) _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_chg_inst_row) - if last_end_date >= start_date: - continue - # DCF施設統合マスタの適用月度の前月末日 < 適用開始日、または適用終了日 < 適用開始日の場合、N(論理削除レコード)に設定する + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], emp_chg_inst_row['start_date']) From 0390e23ca8cd73e0ef7c9c687ae3d44d89211795 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 6 Jun 2023 19:32:46 +0900 Subject: [PATCH 050/103] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E7=95=B0=E5=B8=B8=E7=B3=BB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../vjsk_file_check/test_vjsk_file_check.py | 4 +- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 278 ++++++++++++++++-- .../UnzipError/bio_slip_data_202304270000.gz | Bin 0 -> 2649 bytes .../UnzipError/fcl_mst_202304270000.gz | Bin 0 -> 2003 bytes .../UnzipError/hld_mst_202304270000.gz | Bin 0 -> 541 bytes .../UnzipError/lot_num_mst_202304270000.gz | Bin 0 -> 415 bytes .../UnzipError/mdb_conv_mst_202304270000.gz | Bin 0 -> 533 bytes .../mkr_org_horizon_202304270000.gz | Bin 0 -> 808 bytes .../UnzipError/org_cnv_mst_202304270000.gz | Bin 0 -> 508 bytes .../UnzipError/phm_prd_mst_202304270000.gz | Bin 0 -> 1175 bytes .../UnzipError/phm_price_mst_202304270000.gz | Bin 0 -> 529 bytes .../UnzipError/slip_data_202304270000.gz | Bin 0 -> 2890 bytes .../stock_slip_data_202304270000.gz | Bin 0 -> 928 bytes .../UnzipError/tran_kbn_mst_202304270000.gz | Bin 0 -> 498 bytes .../UnzipError/vop_hco_merge_202304270000.gz | Bin 0 -> 398 bytes .../whs_customer_mst_202304270000.gz | Bin 0 -> 1305 bytes .../UnzipError/whs_mst_202304270000.gz | Bin 0 -> 589 bytes .../testdata/phm_price_mst_dataerror.tsv | 3 + .../testdata/phm_price_mst_formaterror.tsv | 2 + 19 files changed, 255 insertions(+), 32 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/hld_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mdb_conv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_price_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/tran_kbn_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index 99b31d68..1c523d0e 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -10,7 +10,7 @@ def test_check_received_files_ok1(init_check_received_files_ok1): 観点 正常系 : 卸在庫データ取込対象日 期待値 - 例外が発生する + 例外が発生しない """ batch_context = BatchContext.get_instance() batch_context.is_vjsk_stock_import_day = True @@ -29,7 +29,7 @@ def test_check_received_files_ok2(init_check_received_files_ok2): 観点 正常系 : 卸在庫データ取込対象日以外 期待値 - 例外が発生する + 例外が発生しない """ batch_context = BatchContext.get_instance() batch_context.is_vjsk_stock_import_day = False diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 5f373fe0..a648724f 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -10,6 +10,7 @@ from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_importer import (_check_received_files, _import_file_to_db) from src.db.database import Database +from src.error.exceptions import BatchOperationException # from tests.testing_vjsk_utility import create_vjsk_assertion_dictionary from tests.testing_vjsk_utility import (assert_table_results, create_vjsk_assertion_list) @@ -20,6 +21,7 @@ class TestImportFileToDb: batch_context: BatchContext test_file_path_import_all: str test_file_path_load_individual: str + test_file_path_unzip_error: str @pytest.fixture(autouse=True, scope='function') def pre_test(self, database: Database): @@ -27,6 +29,7 @@ class TestImportFileToDb: # setup self.test_file_path_import_all = path.join(path.dirname(__file__), "testdata", "TestImportFileToDb") self.test_file_path_load_individual = path.join(path.dirname(__file__), "testdata") + self.test_file_path_unzip_error = path.join(path.dirname(__file__), "testdata", "UnzipError") self.batch_context = BatchContext.get_instance() @@ -80,41 +83,72 @@ class TestImportFileToDb: key = f"{receive_folder}/{test_file}" s3_client.upload_file(file_name, bucket_name, key) - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") # assertion received_s3_files = _check_received_files() _import_file_to_db(received_s3_files) - # # 検証 (卸在庫データファイル) - # table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) - # table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) - # result = self.db.execute(f"select * from {table_name_org}") - # assert result.rowcount == 10 - # result = self.db.execute(f"select * from {table_name_src}") - # assert result.rowcount == 10 - - # # 検証 (卸販売データ) - # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) - # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) - # result = self.db.execute(f"select * from {table_name_org}") - # assert result.rowcount == 10 - # result = self.db.execute(f"select * from {table_name_src}") - # assert result.rowcount == 10 + # 検証 + condkey = mapper.CONDKEY_STOCK_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_ORG_CNV_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_VOP_HCO_MERGE + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_WHS_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_HLD_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_FCL_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_MKR_ORG_HORIZON + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_TRAN_KBN_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_PHM_PRD_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_PHM_PRICE_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_WHS_CUSTOMER_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_MDB_CONV_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_BIO_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_LOT_NUM_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) # teardown for test_file in test_files: @@ -1074,3 +1108,187 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_unzip_to_error(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 異常系 : gzファイルが解凍できない + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + # ※.gzだが、7zipで圧縮してあるので、解凍に失敗するのが期待値 + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_unzip_error, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # assertion + received_s3_files = _check_received_files() + with pytest.raises(Exception) as e: + _import_file_to_db(received_s3_files) + + # 検証 + assert str(e.value) == "file could not be opened successfully" + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_load_data_error(self, mapper): + """ + 観点 + 異常系 : 日付型矛盾のデータ ※製品価格マスタファイルで確認 + 期待値 + 例外が発生する + """ + + # setup + self.batch_context.is_vjsk_stock_import_day = True + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_dataerror.tsv") + } + + with pytest.raises(BatchOperationException) as e: + VjskDataLoadManager.load(target_dict) + + # 検証 + assert str(e.value).startswith("SQL Error:") > 0 + + # teardown + + def test_load_format_error(self, mapper): + """ + 観点 + 異常系 : tsvファイルが途中で欠落している + 期待値 + 例外が発生する + """ + + # setup + self.batch_context.is_vjsk_stock_import_day = True + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_formaterror.tsv") + } + + with pytest.raises(BatchOperationException) as e: + VjskDataLoadManager.load(target_dict) + + # 検証 + assert str(e.value).startswith("SQL Error:") > 0 + + # teardown + + def test_s3backup_ok(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 正常系 : S3受領ファイルのバックアップフォルダ移動が完了する + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + # ※.gzだが、7zipで圧縮してあるので、解凍に失敗するのが期待値 + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_import_all, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # assertion + received_s3_files = _check_received_files() + vjsk_recv_bucket.backup_dat_file(received_s3_files, "test") + + # 検証 + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_s3backup_to_error(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 異常系 : S3受領ファイルのバックアップフォルダ移動ができない + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup + + # assertion + with pytest.raises(Exception) as e: + # 有りもしないファイルをバックアップフォルダにコピーさせてコケさせる + received_s3_files = [] + received_s3_files.append({"filename": "dummy.dummy"}) + vjsk_recv_bucket.backup_dat_file(received_s3_files, "test") + + # 検証 + assert str(e.value) == "An error occurred (404) when calling the HeadObject operation: Not Found" + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..c609baff8d5bb8aa1df1dc627ae1f42d55a619b7 GIT binary patch literal 2649 zcmV-f3a0fpdc3bE8~_CBuppAJ2><{90001r000000002GB5Tj!L;nevo}N7H!}`6!|44bB1Zd zA;|%jBml99_4nA8eWK{mQ|@aUvr5q)p#iS!JzLh+jS-F0>q!Bu{9h#Aoj1PvK-DQg zi?9bnmd5E++p12iIy(d3{*^a$C5|$R?1=2i^oYv0|l|J6A+9t?vymZlos?NLqJ)*&N?E0 zhg-e=Aw*J-Rp~pNmm&&7Tk^5diW_I8v=d{v#wP*|A64&Pjbodn9>ckgn57f%;I9w` z`lkg7pF{UzJwO^w9q8(b*4ha^rvw8!hrZORZZ1S#U>-KnSbVR4lO}q|=@7wfHd1nf z#zpDD6wYe*d4_F62!#HN%?=-DYj5&ekvY-XsRK3UPyrj<-xZZuVU*2&=XHn~ge42V zaUIg&;+WL~&@Vw?&sU7M06xLxk37`OGDPq83lmzz75>NX zPUt|te?LLOLBoz&T`9NFVIfl_F2-I~@wX51_d~arq-=eF(R&~|=B@G|&gPXm z;3RwHhlA@T5D;r>;jff_^YAiq))e$|aLFTL{IRj8h+R;F6vOlX&a^~p^!SRrfE@rR zLAz>Q)!p}5e4rn?%9jKJ9Curv_mb*RJr9x<+P$Q{xW|J7+5dGPV7ecS@7}s6axR*< zGpw|GNGxPv?AYZ`mxWIu%pJV(@uUW;E9ufu|6-3WEiR4;tNh*#-Ds@D3Tdu#hewR& zcUlyKZ;YjS8b`AEck*u+v=t;7YTypsqX+VlrRPBlLaRMr#$X*nago$1J5V&*>hrz; zQ}v~v9ous#a5B3 zihpgfhH5Yy;qIqas;n*E?OcArL)<;PmQfD)k!s5~qN-q~6vz~2!wboq#&nahQI+=grbyO?+s1UTDRCVKFk7?< zLOL%Id&}H4J{jP8Ubl}Ukflf&&=t)#VI_nHp?Bnx9Th@taa!iA_=Yl@%)57 z*Ic7(SoB78L<7yZz)8z1ON3`VbLCy$IpTi_NQjk=f>1FXo$23!*234_ylNh{|FWA7 zeONyc7YndnfOqIJA-CQQ^Ch|_sNvlOFIwd44&Z(H6q~@PK!p-)on|;W+5bYM3xk1; zijOge=hlkkZQV7f?5+g`IuGwC(t58FT@q@5ZM0ARhd~Ed6fh2W#E%1#_Ei}l+?Fcg9tLI%kWkoR=Zhdd+QrU&WZ9YldCSYB>H_sC^eEXXZr&Tio=pl$ zMTYH3t<1_B@yDHMH4(Y|BaubYW~fv(K|Amveb9)XwIm0o>8}h03fF`s?39Ec`e+&A zg^TTn=@$_vb`owMPUEGwS)O_-Krp2napxc#IX*>d zD;1U?7e7YmbwHm!&aBQ%WTk?M9jtz;7PxiEY!~t5E}QDeVxAQdId@#+On^)LK(_E1 zqd#t}KuqJVk47dGtJe`KeuXSIN;~zNP6c(2ND{?#lVV`e)hQ0RhT~ztY*b@UH}{j% zHz20;z+RtTm+zH5Y8GG_3!wozuIqUV!NHE^mWvb<_gOsRhf3qoEPjlK6Iq!3 zX}b6by{T@(J`NW7B$G3g0lHC6aR4q?nFrzo-@Bmh_sJ)-MHu86r%REf=mf_{9N5Qt zy1AlqdCr-@*S1G+Ok@GuLHMdFry&fc9l#TL(_@Cq9wo!OZxfYi*wd+~mEpEVcK{do zNtG>OU0NzDGnoFv6AO}|Akq)ZMF>dq8@P1)HZqMC%kXa6OWGs+1hjGMvQxFcO3Rg3 z>~)dlzT9pwl0gW4oG|zZ5;u@HXn3K>b%9biv&jGE@hT9f1!9Oy{@FSxbxS^*zWJL7B~!zHJ&&nnv<@3Hg^ge^(IjgH`Rg8f`MP`EU#S#e}E z!9K;^z*au7lisX?RkljrQ_YkTly&EpD9Xy(q)6s#I|cX|IXj9PPk4Z+Z<+K++10RSKX H00000Q4Zck literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..7f532e3b495ea3917ad7209461f48275b23036a0 GIT binary patch literal 2003 zcmV;^2Q2tEdc3bE8~_ATkz9m12LJ#70001b000000000D$omW68~+C}T>vu}$=Ybg zKS(NMs1ba1$3U6Yg*cVK^I7S$l??jsA_&s9t!8@)(gVV<48xNp`>0y;N6li5^G&s> z(w`j%W2~w}7?mn}+_ZKl)3=&trzk5KXRoy{CCwzz^kCk1f!bmGZ7u<3m4ZSdG|hxe zZeG_eoBUP?3wfQ4t4T_fx(Db%H>W7xUwfi|C}7qc#@hW8bk7TLZEgCxQMccGvO{|K zH*0lR%C;^115Eu$w2pJ61?Yumyp`k7G^JojBS)cDm7F%5@umk@%sT+CID9@d9z+Bz zib@3$9Nq(ie74YI(GtiWrx*zrnCtiZa(vn}%}v36?zukFNQ(f2*6yJ)3A618gpThH z)qjS_>*_nj%ydxOgf{uZHlu07XyO06-$g7=92Z|%7B1TnQX#$JX^nY#P^B0#-3y6V zzaI&9FO7WQgC>iAOG|ObvNxJYdR%<`@k#li*Tc&Ef4Z&%D{&PAgis9{{HCXA0Z48G zDbEf6BMl6ia!I#eJtM^k;iExkBCR}u6|cV;nU*Sw&t2Vr)V1vX(qZ{p3>A|nD6j~L?|D8;Ba!o$u~k5JOb2+` z7Z!$&H8)QEmKgZcFPaZ(RKEp*orZBlcsh-TXD($mq=IfD2?^Q9(Xk%l3N;PKtc4L- zKSC4Wr5Y0z*n4a?z4j~Ai0HX=_!k5OO%-%34I!o%p$D|N3eHhSN%EJFAr(c{#K=FD!L_S2jSHj{qW{i#w8Osoq5sSF` zLuW`+{QpNqFL;2FE~y$(n*0-xuJcP+VuH5>*wOPtKzfWdD;Oi<@vNTBUM69zd_bb( zj)S}|a6@Vmd2gDU#D>4$DV(OSVIlsED6*3wh1t0!JCeP?=#=x{aY|504W&YxEA|_X z4_zsIVa%QNn+bHHjJ4r~2eNQPq4J{){7^TgV^8!FM_Ka4f(F`ecaO>;wzp1^g@6L_ z{^W?~Crv(35YdbkH9M_m#aGb3ZlZp(ORumKo5j>x*=fx=JKh}-E?T8pFc~CUydJHb zhOuq>r)qn&HhiS>8F>7qYE^L)ti(QXlfJAuIqO(WTwg*793dX*PKdUnBW-}mCvTEZ z+Zsc4rgE;-$~f@kJfm04kZe1fL52Z=jk4RFQrz-~L}mr6vITF{@tbdesu}*rxYmId zmkok~_C!?l4fIzu;b#v63Lrp3fA=1mGM00JvmLQ8V;W)bzdzBW+?$2K;p}2W0&rtn zFY0rjGUX)bxkB=8!Whct9Eo9ks;gFAFU*~dJ8)jB(rNnN^pIFn6~_&W;k~P*fq#mU zPSn9K-d_dmU1OesB5MgQT&N+ou!wi<2V7G0CaICB)XV_y*^jrBCOLY zz4J-XARn2VioD(zN+UlT>OjtIzf%lQM4;iFp^Oylljx^8mY=GLe-Wa|$;_13)9UzZ z`=XXkEg9%Jb3)p$K*Fg7mNns}8k7z*eozs`JG%H%D}=ie_1l!NaI~uSb$YC7DoE`# zVg|4|Q-=_T1zRR(0tN?b4eNCPo-tm1hEM)a;e7SLH}4}bJ)@+gzoH;yl(O4}3eg32 zRtc2-;WUeS53;J;+>2+@GAKVO}; zE=g`7o?}-qxMflL#W7TZ=v;hN5}$2(>+7hcGOtS53BE3MprR_-8rzf`+`&>RL<&;5 zS(isjmZ{RGR?%}2Wj-fID6$I(VRg@P$rpDyL5}+Kxb7vHvse6P*(K`!az#r#bmaIg zr#yoyXp0R#pB0SSjW00#>J00AK(0Rjx1000OI0Sc-z$p8Qa0T~Ja z000000000001-0)W&mRVYye*XZ2)rsbO2ufG5|0DG5|9GFaR_FG5|LKFaR(BFaR(B lE&y}@VE}Rf000>P6bbffd4;Zbu5F4G{^cY?e-p8DJukPN{>6t zyxM9p^}y%Imz$^Z{CcOvacSGy+`e8{>x87(O{QEiCt}qNG8ljVGmWlL?$j@`6=hj- zrn|n8=S$1+zL{HBQ&c%XqvQ0%|4$IZjW`vVTncr~Z$ zn$^0M6Q4?}tuEL&|EKgxPd)}l7B&V(&c;jHZmD{=85md@ zCApvx2BZXy88R4h7*ZJG8FCql8A^b(5rY8`8v{ubAZ-q$Ay5w}o5)ZE)FjCu!o|qY T@c6V|UCT{IQ8q>f1+a+#BE!|7 literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..821eccd466b905bf80dd76d301e3fc124198d007 GIT binary patch literal 415 zcmXr7+Ou9=hJnQ+czXF?1_)?^(rr5&Vjpn+XZRY+U?zR)U7?hcI@hH{|VX9u-=XKe36y9`r@mgK= z^qVvOJ3Z`!uVkcLk~?Om_vG~7b(b$TyLM_{2!4_LafRxW1(vtwU$L0A%W0O00?W}Y z=F(>$Rh)cg^IY|L`otoOpquRXPtDo7Q!2Z~VrMA>10xF?10!d{Uj}w=Mg~SjMMegm zE(Qh;E=I`~u51PdRz^uKXixzuL2HH_hJ1z+hIocNhEj%HAf3xl4CWg#7%&(y7z0TY nAZ-q$Ay5w}o5)ZE)GWy$!o|qY(3z=I*K(6ll#P)=0c~w=ef?Nu>KIY#xD?H7A{`BuP2R?L6JT$q}XRFAw=kq--eR=(CO74vLulBns6=WC@#!{a5UT=lgwS4b$YL-h5i2C#V@!ENR z%X&et!W7A;0?+h6y=>SdP@z#{Ca%TQnYqT`ne6*LGs~pv`B}I9lMdLr<@&*Q8Y<2& zZ)Kmm=zG*zDX=J6c*6DG{>8~l((9k;#J-iArSM)<);?cqGixU2SFyvnE90il(vxrI zFZH~&=&sYT105`4=gpT`A98q6Ji~wclG{n%e;td0!%D)P7*n66t_|M4fBHhkM|0*s zVPIfnVPjzAY%FGA=VoMJR8(YS;OSvt;NW6Ba6bDm0|P6gBo{P-fRvyuLoP!KLlQ$g zLo!1?LmopJkd@0&%uoWPjTj7o*ceEf0BLg|4S{+<*+hmSpmq^1MurA`cCEUWn~b7t Jj0_53lL2C#&i?=a literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..d6327182e541ade13aa5222abf35750d251a2f3e GIT binary patch literal 808 zcmXr7+Ou9=hJoetg8Nc+OblSq1)&(U!$oBt$o^+4i)ApIb!t_ya;SWc`v;pf4GED3 zPFw}NN6xeO3pTzCzVOE&Qo+j3&f1x+MAd`aA;Lg7(`TWk$gbB3f&GpeK^m1U1&+-72P{oM(N=j3tpC`{Q7X~gr)tW zzDqwh2?&+>Fo}K;S~tPG_HOT~(>vcC7T!_adivez<83SYm3Do&9kFG-$yUP&eAY^q zyXMb4^mk`O>HcZ!R~2l#@B1*!ESZ0*F#loCL)Tx6w^JPyJ z_UI->*qrlup=n$C;mpR2MHOL8PM@0Bi6kdT)imFi+{mu2_*hp!@lv|(Z%NKV`II#A zIm?be-W(S;VNtSTnv%fr@VEN;dBEflM&i^-*IBGY0>6Kp`hlZ8yrLvlwv<8CmY+kA7+j-06< zy6&Ax-WjNI-HT_}_ z{shjLutetR0|o02U(Tgr8B%SzKN%QV*ccc&o9YGAMv;004SbOXvUq literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..1efc8e0f44b3a75dbc3d87909461e23a1dd9f481 GIT binary patch literal 508 zcmXr7+Ou9=hJnR#wD3-x|PN!h(6vh|7_smk;c6sUS z?2CKUmx+ZoX{}3H7S6(*{FpoH+@H2L9jx);+KcTDuK4j@!HA_-B~bkNPsPaHp^1O5 zyh+zOab;27N|m;bMBf|cEFE>ih6lA2O-^?%KJ6=c`gx80Jhqi0T2tQtbCu_8U#s#| z_wzsTx%Qqf(#P44o&=W|U{3tkkS-apYY)Of!K z|G{VRiVV_o)S1s8-uWr(!@51jM}ACaSLL5sTYpIR_{*)cj?b9TziZvsE0MyB#8S;Q zwyLbU?P(WRHd~VGi)uq-?>~pMJZ-~kAEwqyC$;?Dc5cyb6Fbw-%lcY=B&!RSv`yF~ z({N_`#P!mxK7yLJR6gaJ*qqLru=kpg?w`Ot8>$uW3sxz=I`d-6F5kUe{ol5Hj#gZn zrCgl*e`B24=I5R(9_(4WHRkO@{gb9PTRN2(7+KgD7&#lG7}&WP85k8685wwb7#KLX z7}uLugfK9$GD>nm;{iwsS~KJ`6fvYT#4{u_1>0E|lh7urc#9#o##z4{pNSgy` j2-E}0CIa;^Fi0|pa4|A8R6D8GwcKPBWn*Me0GkW|=Sjg? literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..c45076da977e36b40f0eaf997380aa19e0be62e7 GIT binary patch literal 1175 zcmV;I1Zev=dc3bE8~_BBw@YXB0{{R30001j000000001AoC>$#6aNG4T>v;53HoF+ z&@S2Jrr0QE?D8ym#8}7ee4+>R!;{%NTS~jgD*WP%kP&<&E-J~ zg)?oPJtV|Hp2x^k5>oGD0d%7G5lrFWyF`h|8!gbCElES@8U-MQ*WkOhVMj1v%)sRV zk+gTXUJ8D3Ry;!0i=UyRC&MCp0uV%+qEjb{fPI0h9z1lK#Idx{3S@&+T+bcfIWB;jJYd>>G`|4uN0tle&~folAVu|^swnMKXSge6`*{w zPvz%!B|My(%wa9hZ5fd`8P8JLYm#)fx1j&YmcuYK8O>1!%1mxxO)~MCMq_GDojUV| z+tIJb^5TUsw@rIbA#WuFi7eSe=>gl1@c!|(FTZHUY}jmSk&gJ=pnEVm1S18e)Dow8 zVhEvW|w#99o4T5uC9+LSA8&Qo-7nU%G?7S^J7PQ z9N*2c&DDzi=U4#oLv`djy@ph4ZDRZ@kPxpiB*!^vmQ3 z=X?rNZ*-fZXZP}b00D9WPsUwAq!R9FECbj)E(3TLZfK~_S>AUEN`dSzmk6Y;R63ZA z7ZYE}K1nBx%-{=QWmgq`BP*nP4k8XFYjY+Ft8%DO5$ljIf;>j!1XshYH6p)rmi@<6 zPKKfhNw`AOAOgNMw>6%^ph4`mf`#BPJ^}yav#+)lx8Rk?bl*t>Ef-+1X?oV*FV0lU zaE$l2Uq5E7zWzilLn-7*4O0;f4}=fkBamNqBJO=m+H*rEuB_Y{<4D@NV{A7uwmVq- z7HjQa5iT)n3j^A zhy(Ky75coj}zC}v^%mb>JWpi0l-GvO8yxSA7cNgm`3v^t-Kz)<(ope`gD#* z=Xzbcvc%_rsV+Tqm za+gJSG_X=t8o)P{>6KUE<{K*O^WQ(?1LEI5>x;IQ>SG!SHFc nqTiQ$(3w<5Ls2 z9Bm2JEhoy5lu-Zx0R#pB0SSZk00#>J00AK(0Raq@000OI0okdpP5=M}0T~Ja00000 z0000001-O?Z~$lkZ2(^YZ~$@uWB^|PZ2)rsbO2ufG5|0DG5|9GFaR_FG5|LKFaR(B pFaR(BE&y}@VE}Rf000>P6bbks85zK!3qmpEaG%h8!2O>wK9<2kit|^B@danGb1&^9ou16R zG-dOH-WRSPzF(<~Q_oU-QkR_KE#%G7pRa%Lh4Hz~lbsJsn>053c=3PdJWHwVhj%Qs z@_9LVqq@Ke&4>N_LCL_i_s|REtgeEx%xP=aD`swStTX z4tq7e%q=RaPr83!()g)KzrC!>)qF`C7Iryk4vJUc7JXm@5}_oh!fISir}t zy3Kl)_?_VEl?JBYp5Ds4oU{FAVz9mCh2vW6)j4ZU*z{Wn6wX$D^Wt?l)2vk!S1dfv z!GHd-W>HJwRJM|>x8@&vAbZZ!{MR4?qCBW1d_1qJ|FHLOp~s%bVKNPKKX56e9~0d6*JS;?zy-E7Q-3^( zxgD5pIa}u4m&RoaIvmy~ZoJo?Y#QYdd2n&6p+Ka+;{>nGg^P6r7#LaD7#KMl(;3*g z85tNA6&V?LdKefuxEKZazXD^0l~Ixl8Yw_Z(4L`yA%h{8As$Ef1&|;E07>V` AX#fBK literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..9c2dc3ad8b220f26cf496de8e62e50a8c31e1031 GIT binary patch literal 2890 zcmV-Q3$^q&dc3bE8~_A${EO?T3IG5A0001j000000002Gkw~N9NB;_-T>v?o31RZB zjjL?HEs`wV6&99nWD#rdJ2+Du{Ag@I6IJ|;VedSs$UwB-) z%l9JDE;s%nQzq~*D&e{SX@fcwG?|0{&(VkB4p7mN>%4lB=aL*)BQ2-0rVUAA%2U%Q zn6+v5aBMh!U=m2Hd0R>IZL*&Yj{xUwS~ymP7N``?3R|s7w2p7yT4D%_X|+gkGvW?N zcSq+w>A#((UQ{w;mk81NRs9BgsNE6rh9C^yc;v$<3u}Ouq5-o>y*U07;J(-0>=1BvKgyn=nvSFe1u2PhYU|yawdfzg6f7 z=xyphyn_WGlZ-j8Z^_up>M!}md1PJLj}fwsVrNzTW>7mkCUu;&e%?|;Hxa%#^Rgyg zv*>gow@?g!>XGjPUDgD-1*UiO&hbsWH`2rUW=_{jX8aSn6WV+@7NWc~gt^&*Ryc_- z19a5vuG<|Kbd#H^3|M7Ma$P|>agrZmQ=q5ows`^HnXfgGrn)^O(N}HPh_~*%+EDWj zY5^CHQpyl0GD(LXId$e69$0U_Woy?3a)+YYT$x;w!V14 z!7X20t&+@bx<`sNbp-E5JlbXfOpv9VmL?L!QB;Bx4uW)*0gZjhho_*Z4VHMw$+qFq zJOZ(v_h^~(?g0Qbevy>zY3tL>K2kS}#2sP46O@$`QWgMB*8^>lFUGW>Idih4I<9U;el7ly~j4X;mlx$kfM1s5%-C4$J8>zG127UoB8It z%Kzn%_2(((RK{t&cX)QI^VlJHe*Sq6Dvql5J%abZvtez?yXSFLI(ZEnkh(y-#(MM) zEy)7n28U0>oA*5;5>S4*iKiV6_fxj6Yk5m*`gV5^15N9`RMBbj-pDmoSP`OaAf6SW z9@Byk4t~J#bisL$W;T!3Jh(aE{m8PZqGQFXx2E@%N)`LmPtpA$$^eWJF1Lj$ z?T)QxQ70V)3(bryZn>(*FqU|1vAs|JGnX)T9q|GKfX}>Q7)QFV*#tt@UXNZe-FNy8Og4FtU@u z^>Dci?P;(1dsC`3`*IhSl2(gNkOPWbJznj^kTct1%(`X$Ez$CwvoA-tKjOgi<-iv) z7l?ts4}?Vl)5;R%YVez@-%?KlAbov;^7+Z!0d-AZb*P=yeA`$16pORe!)$q1fB~wW zHpJah#cnPrVMRr}ulPM^LiWZXaY~PnCG51<)?Z|06~`E?RE;`DNrB9>j!S??BiL(y z@1(U4Or!)VY(M)Ea$*Xh5LYgS6}9oOXXW1+nRy%4Nh0|zW>VTj7`ZMdSRtl+gA7xd z6XpVKH87Jl3aRl<&R+I6?0Z?m@kHY-85Y7($`h4KY;3v9$9YRzy|8&2h**F@LSGzT;zfM;&gnp_VQ6%AUzro1V&m(vzM zXf9d5^p}H>cuD=v3(EruAri3gnW+s}T*+;@+ywhm#j|Y<7BEnD(K*^~(=TG}5Q!g% zOT#8c%-`Bby@k<z=tgoanCyaE3QO0 zstAPD!1ZY?V*hRfdD^6#CE^7`5+7SQcmgk1gG1`9&_8%NTnpb;hffzRbAwQITJuhP z;_ifm9l?6!31DMMpdLfEH3$lwze3tRCegv9vN*uI>y-fAQ8Af>&hU~ti7s>at4sSA zSf{aD__30;;tm7pvNRwAypjr*?eBqgmqt7xl@XsLPkS@7iILQRTI=FPeXTxZDzC2O zm{w+7FK)%Pklm&l^5J$w5f`#3D06atW~LFT3NH>p7TGEcUt z(l``eqb;=!cad0L{?R2Q+y@_0vGw^YH$uQi3U{YtO#hZCD0rb6-)C4QSkV?v0W+>@ z_+srBXhn3xz~#n=N5k>N@}nUb{|&6@NJ3vJFJk30FQ3=Mn>dS9Pw$C5+vvCFV}pjb zipp&=G}p>I?ivlXsEGl8$EW)Yz{SCRPNqvH&b@*3;KVS8rxjI~T+}bJa*Vc}b01C+ zpu(hQvJrX+@(-BO(@NvTiV7}1PhnEWO7ju-qIEh>ThFrQzEZb>58XotGsAO-!{azn zpyT4>Y;3A2i(ispT?a~FZLwo+euoD(J$0^XgP3Sv20l>6z*@6uEQ%<)^wayu1C1z> zwwQdT19~dpQM|rS#b23lMWs<6Ii1NP!?X~>%~R)f%KuVCnhMj=4SP0p2E3138tgIe zURopdqGg=?_*lRGt>Ji`_L9pS@^X!218qr!_9WUvDB3b|EZ%@6EAHe1x zQDKy^y0aZM%h|a+KxnCkzcl~Tm=yF(S3wV<$?792rwMdbFdQ7q3;Ta{Vy4hGcwsM-8S zRujJ1i+8MoUSI$L1O@;B35uuy2MYlJ0U;p)1q{FdNB{^50hzg-rT_p10T~GZ00000 z000005jOyH0Bith0B`_b0Av7R0CWIh0ABzy05AYD05bqE05kwH05gxLWV1_1yd000000O-hNQ~&?~ literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..3d741b23b8d06131ede9740849253dd80d84c9a3 GIT binary patch literal 928 zcmV;R17G|%dc3bE8~_9u_yv?q3L&ZC zEPpun`r`{FYty&VXAAX8Wl4QlPT~VK>FX_(9eL%0^Ps4)<4wI!%pbk`U-(aQy=$&^ z7(B_}8$5lDpAZxK(b}Ht%{Wi~Tnf78a}~Pef1$)d5OLTz$7IsngT=hX!p9@Q$Bl2( ziflrX0zrW-rXCa20O|a_mp`}~gFT#d0MW1zSZBwPE+L2i!KPe*6YfxTZ7w3|31vFq`rE3ch`ju1GoC1T>29ehZ(Mv*1;!uvGJNqZlOP zap1I=$nO4Zxun%M%o=LYlHo%{<9>k`%WLds9o(8)?ee8wu&=enC}X3`NnJU7i*exf zZrvS*1I6?jMRZYLX-~~84647yEm-OFJUX;phqoigF&ne$%v=IPRPd!f7FYVpI+w~Z z(zmR1A0V1)3RFY4C6z zX4Au(7!uHV8zW6!VjeuSWjmxfk#H&3`N-q9FK$OZ42fZY9*WTKaRpv{0k2h@E009IB009Yt_5cS90RRCZAprplm;e9>3IXkjO9ub|1pyff z0000000000000p~0CNCz0B-2RCEI`hR=_8SqAU;jL6 z`^6qDv**&Mdim$7XYQDO2|F$iP<(wB1!j=2r+Iz7W27%PL`g>BhWTs{r zON(nxNU)wgzacPrddNvjEe7?9MXYt7%Tl$=o}{*=KfQ8BUf4Rt<4Z_v*`76BmBnY< z-gzaRVd7jU!My+FtAmdZ;su5c8))Tk3&yHM)C_d1E{1xp?si{=XVQ=YJyhu?2Q za@UKhkUy_}ur%!2Uv%*7ru17;nzyC*38w!!FMOYE#Y^{t`@WnxzPDxnv~>5H+WW_& zr}0HvIX|^{WO;dSh|yw^*+y#?PPpFnzLxRVo2a&J6z1Z^2g7>XDY8S)t78L}CYz;rG{F;Fy~!HB^Ch>d}y36M4i(h#TzluZQcVqg&A WVq|FOQI)7`xydNX#>k)mHW>hN!MgAO literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..a698e6313afebab9adfe57be053e1b8ed0a6bfc0 GIT binary patch literal 398 zcmXr7+Ou9=hJnStwbkYc0|azI>0b@Mwm;zf&u}M}!CLxMTFE2HS%2)euIm`^&-1yS))~!mbX=>gF79|?`E?bj+XSw(Jb9(z@4VY|C zupUZC+IL~!`N>+6pRVl;>{uk)W!t-*>rJgY%frgtMa&frdzwS7GH)Ldd%fXhPjr&> z!^n#L+r@Upe!6$W@mOKIOXJpx;$0ulPT5fWadrO;=j{n=kDl?#Q2({D>`%wS2#u}k z>F2V}ZZYwk6R>s5jB8z29}BIjIh**cY0vo;!Atk*?GwMFc!`X(gaAG18E41XV7CPVMqk3V_=YEVqg&A WVq|DAIxJe(a+6V%jgdhCYy$ue&yYL- literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..88998e266c3d4bd6bff2f4cb86e957988f50d879 GIT binary patch literal 1305 zcmV+!1?KuUdc3bE8~_Aw7im#%1ONa40001r000000001nHI0to7ykrjT>v|p3e0h| zE7|hh_V3-Wz~ik>d2)l$BqOQ2auwXBN$KZk&pb1F-Mhpti{42yhP&J*#uB5A6*le2 zeGf%!{Uyo><>`Yg_(Ib@XG7x5%2$HV)Ue)>rfe||TTFI#c?jI_Q#^}Cwny}`$G-H$ z@i^##6oTm>dh0e=BIpfWf$(*FqvYHXRUD8fcG6NGuU<`gRU{6Z<;ShTHd^H>@$-wp zVHKFCFc5PXB_VTPu08>z_Go8TNYPal}OT=)=0|FlXBOP2V_i6 zX@L`SgSd{|*Eh=~1{a?bAE&;SLws}OC^9=090;M4_H9=vL)U?jiX-~Y^>|Ew=)%dR z3u90k=D<`(L1V#JKP8%dKehLp7*vE}#sm8>2C}K(uf9 z;w^+X4Glz#=LoP^CMLQR8- z!Yw`InaenSWae>Ik=08-W(v?h9U@kNSoxbKmbKV0R{$R8p|kU8;@$sgCn(jog|4z{ zk9reeM=5{21_B4T`w9zXl9j)OiZe*vTmdnfiv=M(xRI$5&7c%;2qXy~*KXi|HtVG& zUQbqQ{umzZ9Ry*p^PA|FJo_hBA1SbBUKNlfSUa-Y2*ZS`=`26_=FbbmE@}r9p&qpt zx4TA%U9QZSzo(w$)*q;L7eyGe7Ewo|_e!30F{p38SH{esHl2!g+~ zW+qZQKDks8E_-GB9u-iy_j4;nvSW8&2FB|2V3kIoQtn)?p`K3XH{T6$isg{N;TmSS zB3pAl>8zBpD^zZfHprBd2oKHopS{RCuLrv1DLBmgloJiAP{62L17d|u@ZfaPK@u)A z4y;3ll*OI~ztuNK(dg$(dXhM76trHE?YtV8tq31Ywi()xdPbwP@+M=SN>fz)o<^sC zRAdEw8-BdJul7{L&;0p8W{8P7!4xVoluUr6|EZ(Pka*v*vxP)YIlfMbE2&u5Ba5bn zGN<7$TfWww=1VaHjWcTGGC%tKe^DP*n6G_A+yDUt1^@vGgl_-`3jqKDAt3<)4441_ z2nqqWr8oxw00jXV3IG5A000000000HMF4jIXaI8nUjSnObpUe!bO3JvZ2)BeasXcd zZ2)rsbO2ufG5|0DG5|9GFaR_FG5|LKFaR(BFaR(BE&y}@VE}Rf000yU0RRB#We|RZ P*#Q*>0RSKX00000NnT7o literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..a97fdb31ea2712b7afc6e043b35a28ea0bbced1d GIT binary patch literal 589 zcmXr7+Ou9=hJhvN(o>zyj0|8<1)&&rhh7(a!26$Zbu5GREUq&}0vD&fIQ9A0Yi`Na zfB!8m*ivNZb>6)_dtqhHmzzRCRxYNB{;P$;Ro2cv!m#^6(lgr)d0tP<#Fx)^*&pe1 zY%81QMn8w6>-I0Wy!+VWIpQbpGJBZa5ZkrDUgY2ZC4S24J_3@g5nr$Ng!<1p@Y^JG z<8B`N6Z~RHPWHtyoVUf9SKQQYyQ8>xXR3_F1(giO$LpjPX|||~&3N$oOV?>xalf3U zDFG|Ldi|5_-|h5_{hah4Kle2`ebSwW@3efK{bafKRA4*4zAYwUhkmL)i_NGz=#+ zS^TfNSDp1Kww8hY)e+|vzJ?l+)|%TY9$9syeM(tU-0oz&Eo-KK_SM+kxB0*AJGaC4 znEsUK65C7FZ>;#G$nyK)117E{7Z$GhuM5~ZgHO+2zu@z>Em~J!t`aksTj514$DgZ4RU%P!A}Z$WR2-B*`Gc1xzb_ S%zSk%HyK6Q7#S46CISG2=j=uR literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv new file mode 100644 index 00000000..3df124c1 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv @@ -0,0 +1,3 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"123456701" "01" "1" "12345.6" "yyyy0401" "20190930" "10" "0" "18-03-07 09:48:00" "19-09-19 11:23:47" +"123456701" "02" "1" "12587.8" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv new file mode 100644 index 00000000..20ebb14d --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv @@ -0,0 +1,2 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"123456701" "01" "1" "12345.6" "202304 \ No newline at end of file From 0bdcc1fc4afe44302f6d94f37d2ae4db8cf649b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Wed, 7 Jun 2023 15:49:20 +0900 Subject: [PATCH 051/103] =?UTF-8?q?feat:=20=E9=96=8B=E7=99=BA=E4=B8=AD?= =?UTF-8?q?=E3=82=B3=E3=83=9F=E3=83=83=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/laundering/sales_laundering.py | 2 + .../laundering/sales_results_laundering.py | 139 +++++ .../src05/hco_to_mdb_laundering.sql | 98 +++ .../src05/inst_merge_laundering.sql | 52 ++ .../src05/sales_lau_delete.sql | 37 ++ .../src05/sales_lau_upsert.sql | 568 ++++++++++++++++++ .../src05/v_inst_merge_laundering.sql | 71 +++ .../src05/whs_org_laundering.sql | 118 ++++ 8 files changed, 1085 insertions(+) create mode 100644 ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py create mode 100644 rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql create mode 100644 rds_mysql/stored_procedure/src05/inst_merge_laundering.sql create mode 100644 rds_mysql/stored_procedure/src05/sales_lau_delete.sql create mode 100644 rds_mysql/stored_procedure/src05/sales_lau_upsert.sql create mode 100644 rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql create mode 100644 rds_mysql/stored_procedure/src05/whs_org_laundering.sql diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index f6d682b4..3862177e 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -20,6 +20,8 @@ def exec(): emp_chg_inst_laundering.exec() # 納入先処方元マスタ洗替 ult_ident_presc_laundering.exec() + # 卸販売洗替 + # # 並列処理のテスト用コード # import time diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py new file mode 100644 index 00000000..979f1042 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -0,0 +1,139 @@ +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.batch.batch_functions import logging_sql + +logger = get_logger('卸実績洗替') + + +def exec(): + db = Database.get_instance() + try: + db.connect() + logger.debug('処理開始') + # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 + _call_sales_lau_delete(db, 'sales_lau', 5) + # 卸販売実績テーブル(洗替後)作成 + _call_sales_lau_upsert(db, 'sales_lau', '', '') + # 1:卸組織洗替 + _call_whs_org_laundering(db, 'sales_lau') + # 3:HCO施設コードの洗替 + _update_sales_lau_from_vop_hco_merge_v(db, 'sales_lau') + # 4:メルク施設コードの洗替 + _update_mst_inst_laundering(db, 'sales_lau') + logger.debug('処理終了') + except Exception as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + +def _call_sales_lau_delete(db: Database, target_table: str, set_year: int): + # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 + logger.info('sales_lau_delete(プロシージャ―) 開始') + db.execute(f'CALL src05.sales_lau_delete("{target_table}", {set_year})') + logger.info('sales_lau_delete(プロシージャ―) 終了') + return + + +def _call_sales_lau_upsert(db: Database, target_table: str, extract_from_date: str, + extract_to_date: str): + # 卸販売実績テーブル(洗替後)作成 + logger.info('sales_lau_delete(プロシージャ―) 開始') + db.execute(f'CALL src05.sales_lau_delete("{target_table}", "{extract_from_date}", "{extract_to_date}")') + logger.info('sales_lau_delete(プロシージャ―) 終了') + return + + +def _call_whs_org_laundering(db: Database, target_table: str): + # 卸組織洗替 + logger.info('whs_org_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.whs_org_laundering("{target_table}")') + logger.info('whs_org_laundering(プロシージャ―) 終了') + return + + +def _update_sales_lau_from_vop_hco_merge_v(db: Database, target_table: str): + # HCO施設コードの洗替 + if _count_vop_hco_merge_v(db) >= 1: + _call_v_inst_merge_laundering(db, target_table) + return + logger.info('V施設統合マスタにデータは存在しません') + return + + +def _count_vop_hco_merge_v(db: Database) -> int: + # V施設統合マスタのデータ件数の取得 + try: + sql = """ + SELECT + COUNT(v_inst_cd) AS cnt + FROM + src05.vop_hco_merge_v + """ + result = db.execute_select(sql) + logging_sql(logger, sql) + logger.info('V施設統合マスタのデータ件数の取得 成功') + except Exception as e: + logger.debug('V施設統合マスタのデータ件数の取得 失敗') + raise e + + return result[0]['cnt'] + + +def _call_v_inst_merge_laundering(db: Database, target_table: str): + # HCO施設コードの洗替(テーブル更新) + logger.info('v_inst_merge_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.v_inst_merge_laundering("{target_table}")') + logger.info('v_inst_merge_laundering(プロシージャ―) 終了') + return + + +def _update_mst_inst_laundering(db: Database, target_table: str): + # メルク施設コードの洗替 + _call_hco_to_mdb_laundering(db, target_table) + _update_sales_lau_from_dcf_inst_merge(db, target_table) + + +def _call_hco_to_mdb_laundering(db: Database, target_table: str): + # A:医療機関のデータはMDB変換表からHCO⇒DCFへ変換 + logger.info('hco_to_mdb_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.hco_to_mdb_laundering("{target_table}")') + logger.info('hco_to_mdb_laundering(プロシージャ―) 終了') + return + + +def _update_sales_lau_from_dcf_inst_merge(db: Database, target_table: str): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする + if _count_dcf_inst_merge(db) >= 1: + _call_inst_merge_laundering(db, target_table) + return + logger.info('DCF施設統合マスタにデータは存在しません') + return + + +def _count_dcf_inst_merge(db: Database) -> int: + # DCF施設統合マスタのデータ件数の取得 + try: + sql = """ + SELECT + COUNT(dcf_inst_cd) AS cnt + FROM + src05.dcf_inst_merge + """ + result = db.execute_select(sql) + logging_sql(logger, sql) + logger.info('DCF施設統合マスタのデータ件数の取得 成功') + except Exception as e: + logger.debug('DCF施設統合マスタのデータ件数の取得 失敗') + raise e + + return result[0]['cnt'] + + +def _call_inst_merge_laundering(db: Database, target_table: str): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(テーブル更新) + logger.info('inst_merge_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.inst_merge_laundering("{target_table}")') + logger.info('inst_merge_laundering(プロシージャ―) 終了') + return diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql new file mode 100644 index 00000000..03402001 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -0,0 +1,98 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.hco_to_mdb_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'hco_to_mdb_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'hco_to_mdb_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A① 開始'); + + TRUNCATE TABLE internal05.hco_cnv_mdb_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A① 終了'); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A② 開始'); + + INSERT INTO + internal05.hco_cnv_mdb_t ( + hco_vod_v, + mdb_cd, + form_inst_name_kana, + form_inst_name_kanji, + inst_addr, + prefc_cd, + delete_flg, + abolish_ymd, + start_date + ) + SELECT + mcmv.hco_vid_v, + mcmv.mdb_cd, + ci.form_inst_name_kana, + ci.form_inst_name_kanji, + ci.inst_addr, + ci.prefc_cd, + ci.delete_flg, + ci.abolish_ymd, + mcmv.start_date + FROM + src05.mdb_cnv_mst_v AS mcmv + INNER JOIN ( + SELECT + hco_vid_v,MAX(sub_num) AS sno + FROM + src05.mdb_cnv_mst_v + WHERE + rec_sts_kbn != '9' + AND src05.get_syor_date() >= START_DATE + GROUP BY hco_vid_v + ) AS mcmv2 + ON mcmv.hco_vid_v = mcmv2.hco_vid_v + AND mcmv.sub_num = mcmv2.sno + LEFT OUTER JOIN src05.com_inst AS ci + ON mcmv.mdb_cd = ci.dcf_dsf_inst_cd + AND ci.delete_flg = '0' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A② 終了'); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A③ 開始'); + + UPDATE src05.@target_table AS tt, internal05.hco_cnv_mdb_t AS hcmt + SET + tt.inst_cd = hcmt.mdb_cd, + tt.inst_name_kana = hcmt.form_inst_name_kana, + tt.inst_name = hcmt.form_inst_name_kanji, + tt.address = hcmt.inst_addr, + tt.pref_cd = hcmt.prefc_cd + WHERE + tt.v_inst_cd = hcmt.hco_vid_v + AND tt.inst_clas_cd = '1' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A③ 終了'); + +END diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql new file mode 100644 index 00000000..92ff79a9 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -0,0 +1,52 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.inst_merge_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_B① 開始'); + + UPDATE ( + SELECT + dcf_dsf_inst_cd, + dup_opp_cd, + form_inst_name_kanji, + form_inst_name_kana, + inst_addr, + prefc_cd + FROM + internal05.inst_merge_t + ) AS imt, + src05.@target_table AS tt + SET + tt.inst_cd = imt.dup_opp_cd, + tt.inst_name = imt.form_inst_name_kanji, + tt.inst_name_kana = imt.form_inst_name_kana, + tt.address = imt.inst_addr, + tt.prefc_cd = imt.prefc_cd + WHERE + tt.inst_cd = imt.dcf_dsf_inst_cd + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_B① 終了'); + diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql new file mode 100644 index 00000000..652db718 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -0,0 +1,37 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.sales_lau_delete(@target_table VARCHAR(64), @laundering_period_year INT) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_delete'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'sales_lau_deleteでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 開始'); + + DELETE FROM + src05.@target_table + WHERE + kjyo_ym < DATE_FORMAT((src05.get_syor_date() - INTERVAL @laundering_period_year YEAR), '%Y%m') + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); + +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql new file mode 100644 index 00000000..270495e7 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -0,0 +1,568 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.sales_lau_upsert(@target_table VARCHAR(64), @extract_from_date DATETIME, + @extract_to_date DATETIME) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_upsert'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'sales_lau_upsertでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成① 開始' + ); + + TRUNCATE TABLE internal05.bu_prd_name_contrast_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成① 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成② 開始' + ); + + INSERT INTO + internal05.bu_prd_name_contrast_t ( + prd_cd, + bu_cd, + phm_itm_cd, + pp_start_date, + pp_end_date, + update_date, + bp_start_date, + bp_end_date + ) + SELECT + ppmv.prd_cd, + bpnc.bu_cd, + ppmv.phm_itm_cd, + ppmv.start_date AS pp_start_date, + ppmv.end_date AS pp_end_date, + bpnc.update_date AS update_date + bpnc.start_date AS bp_start_date, + bpnc.end_date AS bp_end_date + FROM + src05.phm_prd_mst_v AS ppmv + LEFT OUTER JOIN src05.bu_prd_name_contrast AS bpnc + ON ppmv.phm_itm_cd = bpnc.phm_itm_cd + WHERE + ppmv.rec_sts_kbn != '9' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成② 終了' + ); + + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成③ 開始' + ); + + TRUNCATE TABLE internal05.fcl_mst_v_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成③ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成④ 開始' + ); + + INSERT INTO + internal05.internal05.fcl_mst_v_t + SELECT + v_inst_cd, + sub_num, + start_date, + end_date, + closed_dt, + fcl_name, + fcl_kn_name, + fcl_abb_name, + mkr_cd, + jsk_proc_kbn, + fmt_addr, + fmt_kn_addr, + postal_cd, + prft_cd, + prft_name, + city_name, + addr_line_1, + tel_num, + admin_kbn, + fcl_type, + rec_sts_kbn, + ins_dt, + upd_dt, + dwh_upd_dt + FROM + src05.fcl_mst_v AS fmv1 + INNER JOIN ( + SELECT + fmv.v_inst_cd, + MAX(fmv.sub_num) AS sno + FROM + src05.fcl_mst_v AS fmv + GROUP BY + fmv.v_inst_cd + ) AS fmv2 + ON fmv1.v_inst_cd = fmv2.v_inst_cd + AND fmv1.sub_num = fmv2.sno + WHERE + fmv1.rec_sts_kbn != '9' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成④ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑤ 開始' + ); + + INSERT INTO + src05.@target_table ( + rec_whs_cd, + rec_whs_sub_cd, + rec_whs_org_cd, + rec_cust_cd, + rec_comm_cd, + rec_tran_kbn, + rev_hsdnymd_wrk, + rev_hsdnymd_srk, + rec_urag_num, + rec_qty, + rec_nonyu_price, + rec_nonyu_amt, + rec_comm_name, + rec_nonyu_fcl_name, + free_item, + rec_nonyu_fcl_addr, + rec_nonyu_fcl_post, + rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd, + rec_bef_slip_num, + rec_ymd, + sale_data_cat, + slip_file_name, + slip_mgt_num, + row_num, + hsdn_ymd, + exec_dt, + v_tran_cd, + tran_kbn_name, + whs_org_cd, + v_whsorg_cd, + whs_org_name, + whs_org_kn, + v_whs_cd, + whs_name, + nonyu_fcl_cd, + inst_name, + inst_name_kana, + address, + comm_cd, + comm_name, + nonyu_qty, + nonyu_price, + nonyu_amt, + shikiri_price, + shikiri_amt, + nhi_price, + nhi_amt, + v_inst_cd, + inst_clas_cd, + bu_cd, + item_cd, + item_name, + item_english_name, + pref_cd, + whspos_err_kbn, + htdnymd_err_kbn, + prd_exis_kbn, + fcl_exis_kbn, + bef_hsdn_ymd, + bef_slip_num, + slip_org_kbn, + kjyo_ym, + tksnbk_kbn, + fcl_exec_kbn, + rec_sts_kbn, + ins_dt, + ins_usr, + dwh_upd_dt + ) + SELECT + s.rec_whs_cd, + s.rec_whs_sub_cd, + s.rec_whs_org_cd, + s.rec_cust_cd, + s.rec_comm_cd, + s.rec_tran_kbn, + s.rev_hsdnymd_wrk, + s.rev_hsdnymd_srk, + s.rec_urag_num, + s.rec_qty, + s.rec_nonyu_price, + s.rec_nonyu_amt, + s.rec_comm_name, + s.rec_nonyu_fcl_name, + s.free_item, + s.rec_nonyu_fcl_addr, + s.rec_nonyu_fcl_post, + s.rec_nonyu_fcl_tel, + s.rec_bef_hsdn_ymd, + s.rec_bef_slip_num, + s.rec_ymd, + s.sale_data_cat, + s.slip_file_name, + s.slip_mgt_num, + s.row_num, + s.hsdn_ymd, + s.exec_dt, + s.v_tran_cd, + s.tran_kbn_name, + s.whs_org_cd, + s.v_whsorg_cd, + s.whs_org_name, + s.whs_org_kn, + s.v_whs_cd, + s.whs_name, + s.nonyu_fcl_cd, + s.v_inst_name, + s.v_inst_kn, + s.v_inst_addr, + s.comm_cd, + s.comm_name, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END AS nonyu_qty, + s.nonyu_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END AS nonyu_amt, + s.shikiri_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END AS shikiri_amt, + s.nhi_price, + CASE + WHEN + (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END AS nhi_amt, + s.v_inst_cd, + CASE + WHEN + (fmvt.fcl_type = 'A1' or fmvt.fcl_type = 'A0') THEN '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' THEN '2' + ELSE + '1' + END AS inst_clas_cd, + bpnct.bu_cd, + ppmv.mkr_cd, + ppmv.mkr_inf_1, + ppmv.mkr_inf_2, + CASE + WHEN + s.v_inst_cd LIKE '00%' + THEN + ci.prefc_cd + ELSE + fmvt.prft_cd + END AS prft_cd, + s.whspos_err_kbn, + s.htdnymd_err_kbn, + s.prd_exis_kbn, + s.fcl_exis_kbn, + s.bef_hsdn_ymd, + s.bef_slip_num, + s.slip_org_kbn, + s.kjyo_ym, + s.tksnbk_kbn, + s.fcl_exec_kbn, + s.rec_sts_kbn, + s.ins_dt, + s.ins_usr, + SYSDATE() + FROM ( + SELECT + CAST(SYSDATE() AS DATE) AS today + ) AS sub + INNER JOIN src05.sales AS s + ON sub.today = CAST(s.dwh_upd_dt AS DATE) + LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv + ON s.comm_cd = ppmv.prd_cd + AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date + AND ppmv.rec_sts_kbn != '9' + LEFT OUTER JOIN src05.fcl_mst_v_t AS fmvt + ON s.v_inst_cd = fmvt.v_inst_cd + LEFT OUTER JOIN src05.bu_prd_name_contrast_t AS bpnct + ON s.comm_cd = bpnct.prd_cd + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.pp_start_date AND bpnct.pp_end_date + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date + LEFT OUTER JOIN src05.com_inst AS ci + ON s.v_inst_cd = ci.dcf_dsf_inst_cd + WHERE + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + OR ( + s.rec_sts_kbn = '0' + AND s.err_flg20 != 'M' + AND s.v_tran_cd IN (110, 120, 210, 220) + AND ( + ( + s.fcl_exec_kbn NOT IN ('2', '5') + AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) + ) + OR s.fcl_exec_kbn IS NULL + ) + ) + + ON DUPLICATE KEY UPDATE + rec_whs_cd = s.rec_whs_cd, + rec_whs_sub_cd = s.rec_whs_sub_cd, + rec_whs_org_cd = s.rec_whs_org_cd, + rec_cust_cd = s.rec_cust_cd, + rec_comm_cd = s.rec_comm_cd, + rec_tran_kbn = s.rec_tran_kbn, + rev_hsdnymd_wrk = s.rev_hsdnymd_wrk, + rev_hsdnymd_srk = s.rev_hsdnymd_srk, + rec_urag_num = s.rec_urag_num, + rec_qty = s.rec_qty, + rec_nonyu_price = s.rec_nonyu_price, + rec_nonyu_amt = s.rec_nonyu_amt, + rec_comm_name = s.rec_comm_name, + rec_nonyu_fcl_name = s.rec_nonyu_fcl_name, + free_item = s.free_item, + rec_nonyu_fcl_addr = s.rec_nonyu_fcl_addr, + rec_nonyu_fcl_post = s.rec_nonyu_fcl_post, + rec_nonyu_fcl_tel = s.rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd = s.rec_bef_hsdn_ymd, + rec_bef_slip_num = s.rec_bef_slip_num, + rec_ymd = s.rec_ymd, + sale_data_cat = s.sale_data_cat, + slip_file_name = s.slip_file_name, + row_num = s.row_num, + hsdn_ymd = s.hsdn_ymd, + exec_dt = s.exec_dt, + v_tran_cd = s.v_tran_cd, + tran_kbn_name = s.tran_kbn_name, + whs_org_cd = s.whs_org_cd, + v_whsorg_cd = s.v_whsorg_cd, + whs_org_name = s.whs_org_name, + whs_org_kn = s.whs_org_kn, + v_whs_cd = s.v_whs_cd, + whs_name = s.whs_name, + nonyu_fcl_cd = s.nonyu_fcl_cd, + inst_name = s.v_inst_name, + inst_name_kana = s.v_inst_kn, + address = s.v_inst_addr, + comm_cd = s.comm_cd, + comm_name = s.comm_name, + nonyu_qty = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END + ), + nonyu_price = s.nonyu_price, + nonyu_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END + ), + shikiri_price = s.shikiri_price, + shikiri_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END + ), + nhi_price = s.nhi_price, + nhi_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END + ), + v_inst_cd = s.v_inst_cd, + inst_clas_cd = ( + CASE + WHEN + (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') + THEN + '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' + THEN + '2' + ELSE + '1' + END + ), + bu_cd = bpnct.bu_cd, + item_cd = ppmv.mkr_cd, + item_name = ppmv.mkr_inf_1, + item_english_name = ppmv.mkr_inf_2, + pref_cd = ( + CASE + WHEN + s.v_inst_cd LIKE '00%' + THEN + ci.prefc_cd + ELSE + fmvt.prft_cd + END + ), + whspos_err_kbn = s.whspos_err_kbn, + htdnymd_err_kbn = s.htdnymd_err_kbn, + prd_exis_kbn = s.prd_exis_kbn, + fcl_exis_kbn = s.fcl_exis_kbn, + bef_hsdn_ymd = s.bef_hsdn_ymd, + bef_slip_num = s.bef_slip_num, + slip_org_kbn = s.slip_org_kbn, + kjyo_ym = s.kjyo_ym, + tksnbk_kbn = s.tksnbk_kbn, + fcl_exec_kbn = s.fcl_exec_kbn, + rec_sts_kbn = s.rec_sts_kbn, + ins_dt = s.ins_dt, + ins_usr = s.ins_usr, + dwh_upd_dt = SYSDATE() + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑤ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑥ 開始' + ); + + UPDATE ( + SELECT + fmvt.v_inst_cd AS v_inst_cd, + fmvt.fcl_type AS fcl_type, + fmvt.prft_cd AS prft_cd, + ci.prefc_cd AS prefc_cd + FROM + (SELECT CAST(SYSDATE() AS DATE) AS today) AS sub + INNER JOIN src05.fcl_mst_v_t AS fmvt + ON sub.today = CAST(fmvt.dwh_upd_dt AS DATE) + LEFT OUTER JOIN src05.com_inst AS ci + ON fmvt.v_inst_cd = ci.dcf_dsf_inst_cd + ) AS t3t5, + src05.sales_lau AS sl + SET + sl.inst_clas_cd = ( + CASE + WHEN + (t3t5.fcl_type = 'A1' OR t3t5.fcl_type = 'A0') + THEN + '3' + WHEN + t3t5.fcl_type BETWEEN '20' AND '29' + THEN + '2' + ELSE + '1' + END + ), + sl.pref_cd = ( + CASE + WHEN + sl.v_inst_cd LIKE '00%' + THEN + t3t5.prefc_cd + ELSE + t3t5.prft_cd + END + ) + WHERE sl.v_inst_cd = t3t5.v_inst_cd + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑥ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑦ 開始' + ); + + UPDATE + ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, + src05.sales_lau AS sl, + src05.sales AS s + SET + sl.inst_cd = ( + CASE + WHEN + (s.err_flg20 != 'M' AND sl.inst_clas_cd IN ('2', '3')) OR (s.err_flg20 = 'M') + THEN + s.v_inst_cd + ELSE + NULL + END + ) + WHERE + sub.today = CAST(s.dwh_upd_dt AS DATE) + AND sl.slip_mgt_num = s.slip_mgt_num + AND sl.row_num = s.row_num + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑦ 終了' + ); + +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql new file mode 100644 index 00000000..4ef2075e --- /dev/null +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -0,0 +1,71 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.v_inst_merge_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'v_inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】3:HCO施設コードの洗替① 開始' + ); + + UPDATE ( + SELECT + v_inst_cd, + v_inst_cd_merge, + fcl_name, + fcl_kn_name, + fmt_addr, + prft_cd + FROM + internal05.v_inst_merge_t + WHERE + (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' + ) AS vimt, + src05.@target_table AS tt + SET + tt.inst_cd = ( + CASE + WHEN + tt.inst_clas_cd = '1' + THEN + tt.inst_cd + WHEN + (tt.inst_clas_cd = '2' OR tt.inst_clas_cd = '3') + THEN + vimt.v_inst_cd_merge + END + ) + tt.v_inst_cd = vimt.v_inst_cd_merge, + tt.inst_name = vimt.fcl_name, + tt.inst_name_kana = vimt.fcl_kn_name, + tt.address = vimt.fmt_addr, + tt.prefc_cd = vimt.prft_cd, + tt.dwh_upd_dt = SYSDATE() + WHERE + tt.v_inst_cd = vimt.v_inst_cd + AND (tt.inst_clas_cd IN ('1', '2', '3')) + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】3:HCO施設コードの洗替① 終了' + ); + +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql new file mode 100644 index 00000000..e3ce53c7 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -0,0 +1,118 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.whs_org_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'whs_org_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'whs_org_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替① 開始' + ); + + TRUNCATE TABLE internal05.whs_customer_org_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替① 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替② 開始' + ); + + INSERT INTO + internal05.whs_customer_org_t ( + whs_cd, + whs_sub_cd, + customer_cd, + whs_org_cd, + v_org_cd, + nm_2 + ) + SELECT + wcmv.whs_cd, + wcmv.whs_sub_cd, + wcmv.customer_cd, + wcmv.whs_org_cd, + ocmv.v_org_cd, + mohv2.name_2 + FROM + src05.whs_customer_mst_v AS wcmv + LEFT OUTER JOIN src05.org_cnv_mst_v AS ocmv + ON wcmv.whs_cd = ocmv.whs_cd + AND wcmv.whs_sub_cd = ocmv.whs_sub_cd + AND wcmv.whs_org_cd = ocmv.org_cd + AND src05.get_syor_date() BETWEEN ocmv.start_date AND ocmv.end_date + AND ocmv.rec_sts_kbn != '9' + LEFT OUTER JOIN ( + SELECT + mohv.v_cd_2, + mohv.name_2 + FROM src05.mkr_org_horizon_v AS mohv + INNER JOIN ( + SELECT + count(1) AS c, + v_cd_2, + MAX(dwh_upd_dt) AS dwh_upd_dt_latest + FROM + src05.mkr_org_horizon_v + WHERE + rec_sts_kbn != '9' + AND src05.get_syor_date() BETWEEN start_date AND end_date + GROUP BY + v_cd_2 + ORDER BY + MAX(start_date) DESC + ) AS m_latest + ON mohv.v_cd_2 = m_latest.v_cd_2 + AND mohv.dwh_upd_dt = m_latest.dwh_upd_dt_latest + WHERE + mohv.rec_sts_kbn != '9' + AND src05.get_syor_date() BETWEEN mohv.start_date AND mohv.end_date + ) AS mohv2 + ON ocmv.v_org_cd = mohv2.v_cd_2 + WHERE + wcmv.rec_sts_kbn != '9' + AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替② 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 開始' + ); + + UPDATE + src05.sales_lau AS sl, src05.@target_table AS tt + SET + sl.whs_org_cd = tt.whs_org_cd, + sl.v_whsorg_cd = tt.v_org_cd, + sl.whs_org_name = tt.nm_2 + WHERE + st.whs_cd = tt.whs_cd + AND st.whs_sub_cd = tt.whs_sub_cd + AND st.customer_cd = tt.customer_cd + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 終了' + ); + +END \ No newline at end of file From 5f4efd451def5911198a8826a117cec361e6ee87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Mon, 12 Jun 2023 10:30:52 +0900 Subject: [PATCH 052/103] =?UTF-8?q?feat:=2048-1.=E5=8D=B8=E5=AE=9F?= =?UTF-8?q?=E7=B8=BE=E6=B4=97=E6=9B=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 6 + .../src/batch/laundering/sales_laundering.py | 6 +- .../laundering/sales_results_laundering.py | 85 +- .../src/system_var/environment.py | 7 + .../src05/hco_to_mdb_laundering.sql | 40 +- .../src05/inst_merge_laundering.sql | 57 +- .../src05/sales_lau_delete.sql | 20 +- .../src05/sales_lau_upsert.sql | 864 +++++++++--------- .../src05/v_inst_merge_laundering.sql | 86 +- .../src05/whs_org_laundering.sql | 59 +- 10 files changed, 629 insertions(+), 601 deletions(-) diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 95aef7fe..7463e0d2 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -11,3 +11,9 @@ ULTMARC_BACKUP_FOLDER=ultmarc JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 \ No newline at end of file diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index 3862177e..8264f4bd 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -1,5 +1,7 @@ from src.batch.common.batch_context import BatchContext -from src.batch.laundering import create_inst_merge_for_laundering, emp_chg_inst_laundering, ult_ident_presc_laundering +from src.batch.laundering import ( + create_inst_merge_for_laundering, emp_chg_inst_laundering, + ult_ident_presc_laundering, sales_results_laundering) from src.logging.get_logger import get_logger batch_context = BatchContext.get_instance() @@ -21,7 +23,7 @@ def exec(): # 納入先処方元マスタ洗替 ult_ident_presc_laundering.exec() # 卸販売洗替 - + sales_results_laundering.exec() # # 並列処理のテスト用コード # import time diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 979f1042..4fe4126a 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -2,6 +2,7 @@ from src.db.database import Database from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.batch.batch_functions import logging_sql +from src.system_var import environment logger = get_logger('卸実績洗替') @@ -12,15 +13,15 @@ def exec(): db.connect() logger.debug('処理開始') # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 - _call_sales_lau_delete(db, 'sales_lau', 5) + _call_sales_lau_delete(db) # 卸販売実績テーブル(洗替後)作成 - _call_sales_lau_upsert(db, 'sales_lau', '', '') + _call_sales_lau_upsert(db) # 1:卸組織洗替 - _call_whs_org_laundering(db, 'sales_lau') + _call_whs_org_laundering(db) # 3:HCO施設コードの洗替 - _update_sales_lau_from_vop_hco_merge_v(db, 'sales_lau') + _update_sales_lau_from_vop_hco_merge_v(db) # 4:メルク施設コードの洗替 - _update_mst_inst_laundering(db, 'sales_lau') + _update_mst_inst_laundering(db) logger.debug('処理終了') except Exception as e: raise BatchOperationException(e) @@ -28,35 +29,49 @@ def exec(): db.disconnect() -def _call_sales_lau_delete(db: Database, target_table: str, set_year: int): +def _call_sales_lau_delete(db: Database): # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 logger.info('sales_lau_delete(プロシージャ―) 開始') - db.execute(f'CALL src05.sales_lau_delete("{target_table}", {set_year})') + db.execute(f""" + CALL src05.sales_lau_delete( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', + {environment.SALES_LAUNDERING_TARGET_YEAR_OFFSET} + ) + """) logger.info('sales_lau_delete(プロシージャ―) 終了') return -def _call_sales_lau_upsert(db: Database, target_table: str, extract_from_date: str, - extract_to_date: str): +def _call_sales_lau_upsert(db: Database): # 卸販売実績テーブル(洗替後)作成 - logger.info('sales_lau_delete(プロシージャ―) 開始') - db.execute(f'CALL src05.sales_lau_delete("{target_table}", "{extract_from_date}", "{extract_to_date}")') - logger.info('sales_lau_delete(プロシージャ―) 終了') + logger.info('sales_lau_upsert(プロシージャ―) 開始') + db.execute(f""" + CALL src05.sales_lau_upsert( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', + (src05.get_syor_date() - {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD}), + src05.get_syor_date() + ) + """) + logger.info('sales_lau_upsert(プロシージャ―) 終了') return -def _call_whs_org_laundering(db: Database, target_table: str): +def _call_whs_org_laundering(db: Database): # 卸組織洗替 logger.info('whs_org_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.whs_org_laundering("{target_table}")') + db.execute(f""" + CALL src05.whs_org_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('whs_org_laundering(プロシージャ―) 終了') return -def _update_sales_lau_from_vop_hco_merge_v(db: Database, target_table: str): +def _update_sales_lau_from_vop_hco_merge_v(db: Database): # HCO施設コードの洗替 if _count_vop_hco_merge_v(db) >= 1: - _call_v_inst_merge_laundering(db, target_table) + _call_v_inst_merge_laundering(db) return logger.info('V施設統合マスタにデータは存在しません') return @@ -81,32 +96,40 @@ def _count_vop_hco_merge_v(db: Database) -> int: return result[0]['cnt'] -def _call_v_inst_merge_laundering(db: Database, target_table: str): - # HCO施設コードの洗替(テーブル更新) +def _call_v_inst_merge_laundering(db: Database): + # HCO施設コードの洗替(プロシージャ―の呼び出し) logger.info('v_inst_merge_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.v_inst_merge_laundering("{target_table}")') + db.execute(f""" + CALL src05.v_inst_merge_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('v_inst_merge_laundering(プロシージャ―) 終了') return -def _update_mst_inst_laundering(db: Database, target_table: str): +def _update_mst_inst_laundering(db: Database): # メルク施設コードの洗替 - _call_hco_to_mdb_laundering(db, target_table) - _update_sales_lau_from_dcf_inst_merge(db, target_table) + _call_hco_to_mdb_laundering(db) + _update_sales_lau_from_dcf_inst_merge(db) -def _call_hco_to_mdb_laundering(db: Database, target_table: str): +def _call_hco_to_mdb_laundering(db: Database): # A:医療機関のデータはMDB変換表からHCO⇒DCFへ変換 logger.info('hco_to_mdb_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.hco_to_mdb_laundering("{target_table}")') + db.execute(f""" + CALL src05.hco_to_mdb_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('hco_to_mdb_laundering(プロシージャ―) 終了') return -def _update_sales_lau_from_dcf_inst_merge(db: Database, target_table: str): +def _update_sales_lau_from_dcf_inst_merge(db: Database): # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする if _count_dcf_inst_merge(db) >= 1: - _call_inst_merge_laundering(db, target_table) + _call_inst_merge_laundering(db) return logger.info('DCF施設統合マスタにデータは存在しません') return @@ -131,9 +154,13 @@ def _count_dcf_inst_merge(db: Database) -> int: return result[0]['cnt'] -def _call_inst_merge_laundering(db: Database, target_table: str): - # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(テーブル更新) +def _call_inst_merge_laundering(db: Database): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(プロシージャ―の呼び出し) logger.info('inst_merge_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.inst_merge_laundering("{target_table}")') + db.execute(f""" + CALL src05.inst_merge_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('inst_merge_laundering(プロシージャ―) 終了') return diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index b1730224..a51ab519 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -22,3 +22,10 @@ DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_AT DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) + +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql index 03402001..b3cbfc6e 100644 --- a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.hco_to_mdb_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.hco_to_mdb_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'hco_to_mdb_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -23,7 +23,7 @@ BEGIN SET @error_state = NULL, @error_msg = NULL; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_A① 開始'); + '【洗替】4:メルク施設コードの洗替_A① 開始'); TRUNCATE TABLE internal05.hco_cnv_mdb_t; @@ -31,11 +31,11 @@ BEGIN '【洗替】4:メルク施設コードの洗替_A① 終了'); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_A② 開始'); + '【洗替】4:メルク施設コードの洗替_A② 開始'); INSERT INTO internal05.hco_cnv_mdb_t ( - hco_vod_v, + hco_vid_v, mdb_cd, form_inst_name_kana, form_inst_name_kanji, @@ -78,19 +78,23 @@ BEGIN '【洗替】4:メルク施設コードの洗替_A② 終了'); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_A③ 開始'); + '【洗替】4:メルク施設コードの洗替_A③ 開始'); - UPDATE src05.@target_table AS tt, internal05.hco_cnv_mdb_t AS hcmt - SET - tt.inst_cd = hcmt.mdb_cd, - tt.inst_name_kana = hcmt.form_inst_name_kana, - tt.inst_name = hcmt.form_inst_name_kanji, - tt.address = hcmt.inst_addr, - tt.pref_cd = hcmt.prefc_cd - WHERE - tt.v_inst_cd = hcmt.hco_vid_v - AND tt.inst_clas_cd = '1' - ; + SET @update_institution = " + UPDATE $$target_table$$ AS tt, internal05.hco_cnv_mdb_t AS hcmt + SET + tt.inst_cd = hcmt.mdb_cd, + tt.inst_name_kana = hcmt.form_inst_name_kana, + tt.inst_name = hcmt.form_inst_name_kanji, + tt.address = hcmt.inst_addr, + tt.pref_cd = hcmt.prefc_cd + WHERE + tt.v_inst_cd = hcmt.hco_vid_v + AND tt.inst_clas_cd = '1' + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A③ 終了'); diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql index 92ff79a9..6a0642a9 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.inst_merge_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.inst_merge_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -25,28 +25,35 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_B① 開始'); - UPDATE ( - SELECT - dcf_dsf_inst_cd, - dup_opp_cd, - form_inst_name_kanji, - form_inst_name_kana, - inst_addr, - prefc_cd - FROM - internal05.inst_merge_t - ) AS imt, - src05.@target_table AS tt - SET - tt.inst_cd = imt.dup_opp_cd, - tt.inst_name = imt.form_inst_name_kanji, - tt.inst_name_kana = imt.form_inst_name_kana, - tt.address = imt.inst_addr, - tt.prefc_cd = imt.prefc_cd - WHERE - tt.inst_cd = imt.dcf_dsf_inst_cd - ; + SET @update_institution = " + UPDATE ( + SELECT + dcf_dsf_inst_cd, + dup_opp_cd, + form_inst_name_kanji, + form_inst_name_kana, + inst_addr, + prefc_cd + FROM + internal05.inst_merge_t + ) AS imt, + $$target_table$$ AS tt + SET + tt.inst_cd = imt.dup_opp_cd, + tt.inst_name = imt.form_inst_name_kanji, + tt.inst_name_kana = imt.form_inst_name_kana, + tt.address = imt.inst_addr, + tt.pref_cd = imt.prefc_cd, + tt.dwh_upd_dt = SYSDATE() + WHERE + tt.inst_cd = imt.dcf_dsf_inst_cd + AND tt.inst_clas_cd = '1' + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_B① 終了'); + '【洗替】4:メルク施設コードの洗替_B① 終了'); +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql index 652db718..c1610435 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.sales_lau_delete(@target_table VARCHAR(64), @laundering_period_year INT) +CREATE PROCEDURE src05.sales_lau_delete(target_table VARCHAR(255), laundering_period_year INT) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_delete'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -25,11 +25,15 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 開始'); - DELETE FROM - src05.@target_table - WHERE - kjyo_ym < DATE_FORMAT((src05.get_syor_date() - INTERVAL @laundering_period_year YEAR), '%Y%m') - ; + SET @delete_data = " + DELETE FROM + $$target_table$$ + WHERE + kjyo_ym < DATE_FORMAT((src05.get_syor_date() - INTERVAL ? YEAR), '%Y%m') + "; + SET @delete_data = REPLACE(@delete_data, "$$target_table$$", target_table); + PREPARE delete_data_stmt from @delete_data; + EXECUTE delete_data_stmt USING @laundering_period_year; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 270495e7..ecb2b671 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -1,6 +1,6 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.sales_lau_upsert(@target_table VARCHAR(64), @extract_from_date DATETIME, - @extract_to_date DATETIME) +CREATE PROCEDURE src05.sales_lau_upsert(target_table VARCHAR(255), extract_from_date date, + extract_to_date date) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -8,10 +8,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_upsert'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -24,7 +24,7 @@ BEGIN SET @error_state = NULL, @error_msg = NULL; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成① 開始' + '卸販売実績テーブル(洗替後)作成① 開始' ); TRUNCATE TABLE internal05.bu_prd_name_contrast_t; @@ -36,7 +36,7 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 開始' ); - + INSERT INTO internal05.bu_prd_name_contrast_t ( prd_cd, @@ -46,15 +46,15 @@ BEGIN pp_end_date, update_date, bp_start_date, - bp_end_date - ) + bp_end_date + ) SELECT ppmv.prd_cd, bpnc.bu_cd, ppmv.phm_itm_cd, ppmv.start_date AS pp_start_date, ppmv.end_date AS pp_end_date, - bpnc.update_date AS update_date + bpnc.update_date AS update_date, bpnc.start_date AS bp_start_date, bpnc.end_date AS bp_end_date FROM @@ -63,7 +63,7 @@ BEGIN ON ppmv.phm_itm_cd = bpnc.phm_itm_cd WHERE ppmv.rec_sts_kbn != '9' - ; + ; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 終了' @@ -71,46 +71,47 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成③ 開始' + '卸販売実績テーブル(洗替後)作成③ 開始' ); TRUNCATE TABLE internal05.fcl_mst_v_t; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成③ 終了' - ); + '卸販売実績テーブル(洗替後)作成③ 終了' + ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成④ 開始' + '卸販売実績テーブル(洗替後)作成④ 開始' ); INSERT INTO - internal05.internal05.fcl_mst_v_t + internal05.fcl_mst_v_t SELECT - v_inst_cd, - sub_num, - start_date, - end_date, - closed_dt, - fcl_name, - fcl_kn_name, - fcl_abb_name, - mkr_cd, - jsk_proc_kbn, - fmt_addr, - fmt_kn_addr, - postal_cd, - prft_cd, - prft_name, - city_name, - addr_line_1, - tel_num, - admin_kbn, - fcl_type, - rec_sts_kbn, - ins_dt, - upd_dt, - dwh_upd_dt + fmv1.v_inst_cd, + fmv1.sub_num, + fmv1.start_date, + fmv1.end_date, + fmv1.closed_dt, + fmv1.fcl_name, + fmv1.fcl_kn_name, + fmv1.fcl_abb_name, + fmv1.fcl_abb_kn_name, + fmv1.mkr_cd, + fmv1.jsk_proc_kbn, + fmv1.fmt_addr, + fmv1.fmt_kn_addr, + fmv1.postal_cd, + fmv1.prft_cd, + fmv1.prft_name, + fmv1.city_name, + fmv1.addr_line_1, + fmv1.tel_num, + fmv1.admin_kbn, + fmv1.fcl_type, + fmv1.rec_sts_kbn, + fmv1.ins_dt, + fmv1.upd_dt, + fmv1.dwh_upd_dt FROM src05.fcl_mst_v AS fmv1 INNER JOIN ( @@ -129,334 +130,176 @@ BEGIN ; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成④ 終了' + '卸販売実績テーブル(洗替後)作成④ 終了' ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑤ 開始' - ); + '卸販売実績テーブル(洗替後)作成⑤ 開始' + ); - INSERT INTO - src05.@target_table ( - rec_whs_cd, - rec_whs_sub_cd, - rec_whs_org_cd, - rec_cust_cd, - rec_comm_cd, - rec_tran_kbn, - rev_hsdnymd_wrk, - rev_hsdnymd_srk, - rec_urag_num, - rec_qty, - rec_nonyu_price, - rec_nonyu_amt, - rec_comm_name, - rec_nonyu_fcl_name, - free_item, - rec_nonyu_fcl_addr, - rec_nonyu_fcl_post, - rec_nonyu_fcl_tel, - rec_bef_hsdn_ymd, - rec_bef_slip_num, - rec_ymd, - sale_data_cat, - slip_file_name, - slip_mgt_num, - row_num, - hsdn_ymd, - exec_dt, - v_tran_cd, - tran_kbn_name, - whs_org_cd, - v_whsorg_cd, - whs_org_name, - whs_org_kn, - v_whs_cd, - whs_name, - nonyu_fcl_cd, - inst_name, - inst_name_kana, - address, - comm_cd, - comm_name, - nonyu_qty, - nonyu_price, - nonyu_amt, - shikiri_price, - shikiri_amt, - nhi_price, - nhi_amt, - v_inst_cd, - inst_clas_cd, - bu_cd, - item_cd, - item_name, - item_english_name, - pref_cd, - whspos_err_kbn, - htdnymd_err_kbn, - prd_exis_kbn, - fcl_exis_kbn, - bef_hsdn_ymd, - bef_slip_num, - slip_org_kbn, - kjyo_ym, - tksnbk_kbn, - fcl_exec_kbn, - rec_sts_kbn, - ins_dt, - ins_usr, - dwh_upd_dt - ) - SELECT - s.rec_whs_cd, - s.rec_whs_sub_cd, - s.rec_whs_org_cd, - s.rec_cust_cd, - s.rec_comm_cd, - s.rec_tran_kbn, - s.rev_hsdnymd_wrk, - s.rev_hsdnymd_srk, - s.rec_urag_num, - s.rec_qty, - s.rec_nonyu_price, - s.rec_nonyu_amt, - s.rec_comm_name, - s.rec_nonyu_fcl_name, - s.free_item, - s.rec_nonyu_fcl_addr, - s.rec_nonyu_fcl_post, - s.rec_nonyu_fcl_tel, - s.rec_bef_hsdn_ymd, - s.rec_bef_slip_num, - s.rec_ymd, - s.sale_data_cat, - s.slip_file_name, - s.slip_mgt_num, - s.row_num, - s.hsdn_ymd, - s.exec_dt, - s.v_tran_cd, - s.tran_kbn_name, - s.whs_org_cd, - s.v_whsorg_cd, - s.whs_org_name, - s.whs_org_kn, - s.v_whs_cd, - s.whs_name, - s.nonyu_fcl_cd, - s.v_inst_name, - s.v_inst_kn, - s.v_inst_addr, - s.comm_cd, - s.comm_name, - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_qty * -1) - ELSE - s.nonyu_qty - END AS nonyu_qty, - s.nonyu_price, - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_amt * -1) - ELSE - s.nonyu_amt - END AS nonyu_amt, - s.shikiri_price, - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.shikiri_amt * -1) - ELSE - s.shikiri_amt - END AS shikiri_amt, - s.nhi_price, - CASE - WHEN - (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nhi_amt * -1) - ELSE - s.nhi_amt - END AS nhi_amt, - s.v_inst_cd, - CASE - WHEN - (fmvt.fcl_type = 'A1' or fmvt.fcl_type = 'A0') THEN '3' - WHEN - fmvt.fcl_type BETWEEN '20' AND '29' THEN '2' - ELSE - '1' - END AS inst_clas_cd, - bpnct.bu_cd, - ppmv.mkr_cd, - ppmv.mkr_inf_1, - ppmv.mkr_inf_2, - CASE - WHEN - s.v_inst_cd LIKE '00%' - THEN - ci.prefc_cd - ELSE - fmvt.prft_cd - END AS prft_cd, - s.whspos_err_kbn, - s.htdnymd_err_kbn, - s.prd_exis_kbn, - s.fcl_exis_kbn, - s.bef_hsdn_ymd, - s.bef_slip_num, - s.slip_org_kbn, - s.kjyo_ym, - s.tksnbk_kbn, - s.fcl_exec_kbn, - s.rec_sts_kbn, - s.ins_dt, - s.ins_usr, - SYSDATE() - FROM ( + SET @insert_sales_laundering = " + INSERT INTO + $$target_table$$ ( + rec_whs_cd, + rec_whs_sub_cd, + rec_whs_org_cd, + rec_cust_cd, + rec_comm_cd, + rec_tran_kbn, + rev_hsdnymd_wrk, + rev_hsdnymd_srk, + rec_urag_num, + rec_qty, + rec_nonyu_price, + rec_nonyu_amt, + rec_comm_name, + rec_nonyu_fcl_name, + free_item, + rec_nonyu_fcl_addr, + rec_nonyu_fcl_post, + rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd, + rec_bef_slip_num, + rec_ymd, + sale_data_cat, + slip_file_name, + slip_mgt_num, + row_num, + hsdn_ymd, + exec_dt, + v_tran_cd, + tran_kbn_name, + whs_org_cd, + v_whsorg_cd, + whs_org_name, + whs_org_kn, + v_whs_cd, + whs_name, + nonyu_fcl_cd, + inst_name, + inst_name_kana, + address, + comm_cd, + comm_name, + nonyu_qty, + nonyu_price, + nonyu_amt, + shikiri_price, + shikiri_amt, + nhi_price, + nhi_amt, + v_inst_cd, + inst_clas_cd, + bu_cd, + item_cd, + item_name, + item_english_name, + pref_cd, + whspos_err_kbn, + htdnymd_err_kbn, + prd_exis_kbn, + fcl_exis_kbn, + bef_hsdn_ymd, + bef_slip_num, + slip_org_kbn, + kjyo_ym, + tksnbk_kbn, + fcl_exec_kbn, + rec_sts_kbn, + ins_dt, + ins_usr, + dwh_upd_dt + ) SELECT - CAST(SYSDATE() AS DATE) AS today - ) AS sub - INNER JOIN src05.sales AS s - ON sub.today = CAST(s.dwh_upd_dt AS DATE) - LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv - ON s.comm_cd = ppmv.prd_cd - AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date - AND ppmv.rec_sts_kbn != '9' - LEFT OUTER JOIN src05.fcl_mst_v_t AS fmvt - ON s.v_inst_cd = fmvt.v_inst_cd - LEFT OUTER JOIN src05.bu_prd_name_contrast_t AS bpnct - ON s.comm_cd = bpnct.prd_cd - AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.pp_start_date AND bpnct.pp_end_date - AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date - LEFT OUTER JOIN src05.com_inst AS ci - ON s.v_inst_cd = ci.dcf_dsf_inst_cd - WHERE - (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') - OR ( - s.rec_sts_kbn = '0' - AND s.err_flg20 != 'M' - AND s.v_tran_cd IN (110, 120, 210, 220) - AND ( - ( - s.fcl_exec_kbn NOT IN ('2', '5') - AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) - ) - OR s.fcl_exec_kbn IS NULL - ) - ) - - ON DUPLICATE KEY UPDATE - rec_whs_cd = s.rec_whs_cd, - rec_whs_sub_cd = s.rec_whs_sub_cd, - rec_whs_org_cd = s.rec_whs_org_cd, - rec_cust_cd = s.rec_cust_cd, - rec_comm_cd = s.rec_comm_cd, - rec_tran_kbn = s.rec_tran_kbn, - rev_hsdnymd_wrk = s.rev_hsdnymd_wrk, - rev_hsdnymd_srk = s.rev_hsdnymd_srk, - rec_urag_num = s.rec_urag_num, - rec_qty = s.rec_qty, - rec_nonyu_price = s.rec_nonyu_price, - rec_nonyu_amt = s.rec_nonyu_amt, - rec_comm_name = s.rec_comm_name, - rec_nonyu_fcl_name = s.rec_nonyu_fcl_name, - free_item = s.free_item, - rec_nonyu_fcl_addr = s.rec_nonyu_fcl_addr, - rec_nonyu_fcl_post = s.rec_nonyu_fcl_post, - rec_nonyu_fcl_tel = s.rec_nonyu_fcl_tel, - rec_bef_hsdn_ymd = s.rec_bef_hsdn_ymd, - rec_bef_slip_num = s.rec_bef_slip_num, - rec_ymd = s.rec_ymd, - sale_data_cat = s.sale_data_cat, - slip_file_name = s.slip_file_name, - row_num = s.row_num, - hsdn_ymd = s.hsdn_ymd, - exec_dt = s.exec_dt, - v_tran_cd = s.v_tran_cd, - tran_kbn_name = s.tran_kbn_name, - whs_org_cd = s.whs_org_cd, - v_whsorg_cd = s.v_whsorg_cd, - whs_org_name = s.whs_org_name, - whs_org_kn = s.whs_org_kn, - v_whs_cd = s.v_whs_cd, - whs_name = s.whs_name, - nonyu_fcl_cd = s.nonyu_fcl_cd, - inst_name = s.v_inst_name, - inst_name_kana = s.v_inst_kn, - address = s.v_inst_addr, - comm_cd = s.comm_cd, - comm_name = s.comm_name, - nonyu_qty = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_qty * -1) - ELSE - s.nonyu_qty - END - ), - nonyu_price = s.nonyu_price, - nonyu_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_amt * -1) - ELSE - s.nonyu_amt - END - ), - shikiri_price = s.shikiri_price, - shikiri_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.shikiri_amt * -1) - ELSE - s.shikiri_amt - END - ), - nhi_price = s.nhi_price, - nhi_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nhi_amt * -1) - ELSE - s.nhi_amt - END - ), - v_inst_cd = s.v_inst_cd, - inst_clas_cd = ( - CASE - WHEN - (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') - THEN - '3' - WHEN - fmvt.fcl_type BETWEEN '20' AND '29' - THEN - '2' - ELSE - '1' - END - ), - bu_cd = bpnct.bu_cd, - item_cd = ppmv.mkr_cd, - item_name = ppmv.mkr_inf_1, - item_english_name = ppmv.mkr_inf_2, - pref_cd = ( + s.rec_whs_cd, + s.rec_whs_sub_cd, + s.rec_whs_org_cd, + s.rec_cust_cd, + s.rec_comm_cd, + s.rec_tran_kbn, + s.rev_hsdnymd_wrk, + s.rev_hsdnymd_srk, + s.rec_urag_num, + s.rec_qty, + s.rec_nonyu_price, + s.rec_nonyu_amt, + s.rec_comm_name, + s.rec_nonyu_fcl_name, + s.free_item, + s.rec_nonyu_fcl_addr, + s.rec_nonyu_fcl_post, + s.rec_nonyu_fcl_tel, + s.rec_bef_hsdn_ymd, + s.rec_bef_slip_num, + s.rec_ymd, + s.sale_data_cat, + s.slip_file_name, + s.slip_mgt_num, + s.row_num, + s.hsdn_ymd, + s.exec_dt, + s.v_tran_cd, + s.tran_kbn_name, + s.whs_org_cd, + s.v_whsorg_cd, + s.whs_org_name, + s.whs_org_kn, + s.v_whs_cd, + s.whs_name, + s.nonyu_fcl_cd, + s.v_inst_name, + s.v_inst_kn, + s.v_inst_addr, + s.comm_cd, + s.comm_name, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END AS nonyu_qty, + s.nonyu_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END AS nonyu_amt, + s.shikiri_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END AS shikiri_amt, + s.nhi_price, + CASE + WHEN + (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END AS nhi_amt, + s.v_inst_cd, + CASE + WHEN + (fmvt.fcl_type = 'A1' or fmvt.fcl_type = 'A0') THEN '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' THEN '2' + ELSE + '1' + END AS inst_clas_cd, + bpnct.bu_cd, + ppmv.mkr_cd, + ppmv.mkr_inf_1, + ppmv.mkr_inf_2, CASE WHEN s.v_inst_cd LIKE '00%' @@ -464,105 +307,224 @@ BEGIN ci.prefc_cd ELSE fmvt.prft_cd - END - ), - whspos_err_kbn = s.whspos_err_kbn, - htdnymd_err_kbn = s.htdnymd_err_kbn, - prd_exis_kbn = s.prd_exis_kbn, - fcl_exis_kbn = s.fcl_exis_kbn, - bef_hsdn_ymd = s.bef_hsdn_ymd, - bef_slip_num = s.bef_slip_num, - slip_org_kbn = s.slip_org_kbn, - kjyo_ym = s.kjyo_ym, - tksnbk_kbn = s.tksnbk_kbn, - fcl_exec_kbn = s.fcl_exec_kbn, - rec_sts_kbn = s.rec_sts_kbn, - ins_dt = s.ins_dt, - ins_usr = s.ins_usr, - dwh_upd_dt = SYSDATE() - ; - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑤ 終了' - ); - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑥ 開始' - ); - - UPDATE ( + END AS prft_cd, + s.whspos_err_kbn, + s.htdnymd_err_kbn, + s.prd_exis_kbn, + s.fcl_exis_kbn, + s.bef_hsdn_ymd, + s.bef_slip_num, + s.slip_org_kbn, + s.kjyo_ym, + s.tksnbk_kbn, + s.fcl_exec_kbn, + s.rec_sts_kbn, + s.ins_dt, + s.ins_usr, + SYSDATE() + FROM ( SELECT - fmvt.v_inst_cd AS v_inst_cd, - fmvt.fcl_type AS fcl_type, - fmvt.prft_cd AS prft_cd, - ci.prefc_cd AS prefc_cd - FROM - (SELECT CAST(SYSDATE() AS DATE) AS today) AS sub - INNER JOIN src05.fcl_mst_v_t AS fmvt - ON sub.today = CAST(fmvt.dwh_upd_dt AS DATE) - LEFT OUTER JOIN src05.com_inst AS ci - ON fmvt.v_inst_cd = ci.dcf_dsf_inst_cd - ) AS t3t5, - src05.sales_lau AS sl - SET - sl.inst_clas_cd = ( - CASE - WHEN - (t3t5.fcl_type = 'A1' OR t3t5.fcl_type = 'A0') - THEN - '3' - WHEN - t3t5.fcl_type BETWEEN '20' AND '29' - THEN - '2' - ELSE - '1' - END - ), - sl.pref_cd = ( - CASE - WHEN - sl.v_inst_cd LIKE '00%' - THEN - t3t5.prefc_cd - ELSE - t3t5.prft_cd - END - ) - WHERE sl.v_inst_cd = t3t5.v_inst_cd - ; + CAST(SYSDATE() AS DATE) AS today + ) AS sub + INNER JOIN src05.sales AS s + ON sub.today = CAST(s.dwh_upd_dt AS DATE) + LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv + ON s.comm_cd = ppmv.prd_cd + AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date + AND ppmv.rec_sts_kbn != '9' + LEFT OUTER JOIN internal05.fcl_mst_v_t AS fmvt + ON s.v_inst_cd = fmvt.v_inst_cd + LEFT OUTER JOIN internal05.bu_prd_name_contrast_t AS bpnct + ON s.comm_cd = bpnct.prd_cd + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.pp_start_date AND bpnct.pp_end_date + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date + LEFT OUTER JOIN src05.com_inst AS ci + ON s.v_inst_cd = ci.dcf_dsf_inst_cd + WHERE + (? <= s.dwh_upd_dt AND s.dwh_upd_dt <= ?) + AND ( + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + OR ( + s.rec_sts_kbn = '0' + AND s.err_flg20 != 'M' + AND s.v_tran_cd IN (110, 120, 210, 220) + AND ( + ( + s.fcl_exec_kbn NOT IN ('2', '5') + AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) + ) + OR s.fcl_exec_kbn IS NULL + ) + ) + ) + + ON DUPLICATE KEY UPDATE + rec_whs_cd = s.rec_whs_cd, + rec_whs_sub_cd = s.rec_whs_sub_cd, + rec_whs_org_cd = s.rec_whs_org_cd, + rec_cust_cd = s.rec_cust_cd, + rec_comm_cd = s.rec_comm_cd, + rec_tran_kbn = s.rec_tran_kbn, + rev_hsdnymd_wrk = s.rev_hsdnymd_wrk, + rev_hsdnymd_srk = s.rev_hsdnymd_srk, + rec_urag_num = s.rec_urag_num, + rec_qty = s.rec_qty, + rec_nonyu_price = s.rec_nonyu_price, + rec_nonyu_amt = s.rec_nonyu_amt, + rec_comm_name = s.rec_comm_name, + rec_nonyu_fcl_name = s.rec_nonyu_fcl_name, + free_item = s.free_item, + rec_nonyu_fcl_addr = s.rec_nonyu_fcl_addr, + rec_nonyu_fcl_post = s.rec_nonyu_fcl_post, + rec_nonyu_fcl_tel = s.rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd = s.rec_bef_hsdn_ymd, + rec_bef_slip_num = s.rec_bef_slip_num, + rec_ymd = s.rec_ymd, + sale_data_cat = s.sale_data_cat, + slip_file_name = s.slip_file_name, + row_num = s.row_num, + hsdn_ymd = s.hsdn_ymd, + exec_dt = s.exec_dt, + v_tran_cd = s.v_tran_cd, + tran_kbn_name = s.tran_kbn_name, + whs_org_cd = s.whs_org_cd, + v_whsorg_cd = s.v_whsorg_cd, + whs_org_name = s.whs_org_name, + whs_org_kn = s.whs_org_kn, + v_whs_cd = s.v_whs_cd, + whs_name = s.whs_name, + nonyu_fcl_cd = s.nonyu_fcl_cd, + inst_name = s.v_inst_name, + inst_name_kana = s.v_inst_kn, + address = s.v_inst_addr, + comm_cd = s.comm_cd, + comm_name = s.comm_name, + nonyu_qty = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END + ), + nonyu_price = s.nonyu_price, + nonyu_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END + ), + shikiri_price = s.shikiri_price, + shikiri_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END + ), + nhi_price = s.nhi_price, + nhi_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END + ), + v_inst_cd = s.v_inst_cd, + inst_clas_cd = ( + CASE + WHEN + (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') + THEN + '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' + THEN + '2' + ELSE + '1' + END + ), + bu_cd = bpnct.bu_cd, + item_cd = ppmv.mkr_cd, + item_name = ppmv.mkr_inf_1, + item_english_name = ppmv.mkr_inf_2, + pref_cd = ( + CASE + WHEN + s.v_inst_cd LIKE '00%' + THEN + ci.prefc_cd + ELSE + fmvt.prft_cd + END + ), + whspos_err_kbn = s.whspos_err_kbn, + htdnymd_err_kbn = s.htdnymd_err_kbn, + prd_exis_kbn = s.prd_exis_kbn, + fcl_exis_kbn = s.fcl_exis_kbn, + bef_hsdn_ymd = s.bef_hsdn_ymd, + bef_slip_num = s.bef_slip_num, + slip_org_kbn = s.slip_org_kbn, + kjyo_ym = s.kjyo_ym, + tksnbk_kbn = s.tksnbk_kbn, + fcl_exec_kbn = s.fcl_exec_kbn, + rec_sts_kbn = s.rec_sts_kbn, + ins_dt = s.ins_dt, + ins_usr = s.ins_usr, + dwh_upd_dt = SYSDATE() + "; + SET @insert_sales_laundering = REPLACE(@insert_sales_laundering, "$$target_table$$", target_table); + PREPARE insert_sales_laundering_stmt from @insert_sales_laundering; + EXECUTE insert_sales_laundering_stmt USING @extract_from_date, @extract_to_date; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑥ 終了' + '卸販売実績テーブル(洗替後)作成⑤ 終了' ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑦ 開始' + '卸販売実績テーブル(洗替後)作成⑥ 開始' ); - UPDATE - ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, - src05.sales_lau AS sl, - src05.sales AS s - SET - sl.inst_cd = ( - CASE - WHEN - (s.err_flg20 != 'M' AND sl.inst_clas_cd IN ('2', '3')) OR (s.err_flg20 = 'M') - THEN - s.v_inst_cd - ELSE - NULL - END - ) - WHERE - sub.today = CAST(s.dwh_upd_dt AS DATE) - AND sl.slip_mgt_num = s.slip_mgt_num - AND sl.row_num = s.row_num - ; + SET @update_institution_code = " + UPDATE + ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, + $$target_table$$ AS tt, + src05.sales AS s + SET + tt.inst_cd = ( + CASE + WHEN + (s.err_flg20 != 'M' AND tt.inst_clas_cd IN ('2', '3')) OR (s.err_flg20 = 'M') + THEN + s.v_inst_cd + ELSE + NULL + END + ) + WHERE + sub.today = CAST(s.dwh_upd_dt AS DATE) + AND tt.slip_mgt_num = s.slip_mgt_num + AND tt.row_num = s.row_num + "; + SET @update_institution_code = REPLACE(@update_institution_code, "$$target_table$$", target_table); + PREPARE update_institution_code_stmt from @update_institution_code; + EXECUTE update_institution_code_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑦ 終了' + '卸販売実績テーブル(洗替後)作成⑥ 終了' ); END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 4ef2075e..35c4a700 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.v_inst_merge_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.v_inst_merge_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -23,46 +23,50 @@ BEGIN SET @error_state = NULL, @error_msg = NULL; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】3:HCO施設コードの洗替① 開始' + '【洗替】3:HCO施設コードの洗替① 開始' ); - UPDATE ( - SELECT - v_inst_cd, - v_inst_cd_merge, - fcl_name, - fcl_kn_name, - fmt_addr, - prft_cd - FROM - internal05.v_inst_merge_t - WHERE - (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' - ) AS vimt, - src05.@target_table AS tt - SET - tt.inst_cd = ( - CASE - WHEN - tt.inst_clas_cd = '1' - THEN - tt.inst_cd - WHEN - (tt.inst_clas_cd = '2' OR tt.inst_clas_cd = '3') - THEN - vimt.v_inst_cd_merge - END - ) - tt.v_inst_cd = vimt.v_inst_cd_merge, - tt.inst_name = vimt.fcl_name, - tt.inst_name_kana = vimt.fcl_kn_name, - tt.address = vimt.fmt_addr, - tt.prefc_cd = vimt.prft_cd, - tt.dwh_upd_dt = SYSDATE() - WHERE - tt.v_inst_cd = vimt.v_inst_cd - AND (tt.inst_clas_cd IN ('1', '2', '3')) - ; + SET @update_institution = " + UPDATE ( + SELECT + v_inst_cd, + v_inst_cd_merge, + fcl_name, + fcl_kn_name, + fmt_addr, + prft_cd + FROM + internal05.v_inst_merge_t + WHERE + (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' + ) AS vimt, + $$target_table$$ AS tt + SET + tt.inst_cd = ( + CASE + WHEN + tt.inst_clas_cd = '1' + THEN + tt.inst_cd + WHEN + (tt.inst_clas_cd = '2' OR tt.inst_clas_cd = '3') + THEN + vimt.v_inst_cd_merge + END + ), + tt.v_inst_cd = vimt.v_inst_cd_merge, + tt.inst_name = vimt.fcl_name, + tt.inst_name_kana = vimt.fcl_kn_name, + tt.address = vimt.fmt_addr, + tt.pref_cd = vimt.prft_cd, + tt.dwh_upd_dt = SYSDATE() + WHERE + tt.v_inst_cd = vimt.v_inst_cd + AND (tt.inst_clas_cd IN ('1', '2', '3')) + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 終了' diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index e3ce53c7..4b5835ee 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.whs_org_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.whs_org_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'whs_org_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -29,11 +29,11 @@ BEGIN TRUNCATE TABLE internal05.whs_customer_org_t; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替① 終了' + '【洗替】1.卸組織洗替① 終了' ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替② 開始' + '【洗替】1.卸組織洗替② 開始' ); INSERT INTO @@ -43,7 +43,7 @@ BEGIN customer_cd, whs_org_cd, v_org_cd, - nm_2 + name_2 ) SELECT wcmv.whs_cd, @@ -89,30 +89,35 @@ BEGIN ON ocmv.v_org_cd = mohv2.v_cd_2 WHERE wcmv.rec_sts_kbn != '9' - AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date; - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替② 終了' - ); - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替③ 開始' - ); - - UPDATE - src05.sales_lau AS sl, src05.@target_table AS tt - SET - sl.whs_org_cd = tt.whs_org_cd, - sl.v_whsorg_cd = tt.v_org_cd, - sl.whs_org_name = tt.nm_2 - WHERE - st.whs_cd = tt.whs_cd - AND st.whs_sub_cd = tt.whs_sub_cd - AND st.customer_cd = tt.customer_cd + AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date ; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替③ 終了' + '【洗替】1.卸組織洗替② 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 開始' + ); + + SET @update_organization = " + UPDATE + $$target_table$$ AS tt, internal05.whs_customer_org_t AS wcot + SET + tt.whs_org_cd = wcot.whs_org_cd, + tt.v_whsorg_cd = wcot.v_org_cd, + tt.whs_org_name = wcot.name_2 + WHERE + wcot.whs_cd = tt.rec_whs_cd + AND wcot.whs_sub_cd = tt.rec_whs_sub_cd + AND wcot.customer_cd = tt.rec_cust_cd + "; + SET @update_organization = REPLACE(@update_organization, "$$target_table$$", target_table); + PREPARE update_organization_stmt from @update_organization; + EXECUTE update_organization_stmt; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 終了' ); END \ No newline at end of file From 0421aa8ccd44f1bb4463c7c08a2ae07cb181da24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Wed, 14 Jun 2023 11:56:44 +0900 Subject: [PATCH 053/103] =?UTF-8?q?feat:=20=E4=BB=95=E6=A7=98=E5=A4=89?= =?UTF-8?q?=E6=9B=B4=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrate_dcf_inst_merge.py | 170 ++++++++++++------ 1 file changed, 113 insertions(+), 57 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index e325d7a4..2e4f9ca7 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -55,76 +55,128 @@ def _set_disabled_dct_inst_merge(db: Database): _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') +def _select_ult_ident_presc_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 納入先処方元マスタから、DCF施設コードに対応した領域コードの取得 + try: + sql = """ + SELECT + ta_cd + FROM + src05.ult_ident_presc + WHERE + presc_cd = :dcf_inst_cd + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd} + ult_ident_presc_ta_cd_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('納入先処方元マスタから領域コードの取得に成功') + except Exception as e: + logger.debug('納入先処方元マスタから領域コードの取得に失敗') + raise e + + return ult_ident_presc_ta_cd_records + + def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): # 納入先処方元マスタの追加 logger.info('納入先処方元マスタの登録 開始') for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) + ult_ident_presc_ta_cd_records = _select_ult_ident_presc_ta_cd(db, enabled_merge_record['dcf_inst_cd']) + for ult_ident_presc_ta_cd_record in ult_ident_presc_ta_cd_records: + ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], + enabled_merge_record['dup_opp_cd'], + ult_ident_presc_ta_cd_record['ta_cd']) + for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): + logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') + # 処方元コード=重複時相手先コードが発生した場合 + if ult_ident_presc_row['opp_count'] > 0: + continue - ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], - enabled_merge_record['dup_opp_cd']) - for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): - logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') - # 処方元コード=重複時相手先コードが発生した場合 - if ult_ident_presc_row['opp_count'] > 0: - break + start_date = _str_to_date_time(ult_ident_presc_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + set_start_date = _date_time_to_str(set_start_date) + is_exists_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: + _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, + '納入先処方元マスタの重複予定データの削除') + is_exists_duplicate_key = True + else: + logger.info('納入先処方元マスタの重複予定データなし') + _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) - start_date = _str_to_date_time(ult_ident_presc_row['start_date']) - set_start_date = start_date \ - if start_date > tekiyo_month_first_day else tekiyo_month_first_day - set_start_date = _date_time_to_str(set_start_date) - is_exists_duplicate_key = False - if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: - _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, - '納入先処方元マスタの重複予定データの削除') - is_exists_duplicate_key = True - else: - logger.info('納入先処方元マスタの重複予定データなし') - _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) - - # 適用終了日 < 適用開始日の場合 - if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: - # 対象レコードを物理削除する - _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, - '開始月>適用開始日のため物理削除') - continue - # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 - if not is_exists_duplicate_key: - last_end_date = tekiyo_month_first_day - timedelta(days=1) - # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 - _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + # 適用終了日 < 適用開始日の場合 + if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: + # 対象レコードを物理削除する + _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, + '開始月>適用開始日のため物理削除') + continue + # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 + if not is_exists_duplicate_key: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) logger.info('納入先処方元マスタの登録 終了') +def _select_emp_chg_inst_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 従業員担当施設マスタから、DCF施設コードに対応した領域コードの取得 + try: + sql = """ + SELECT + ta_cd + FROM + src05.emp_chg_inst + WHERE + inst_cd = :dcf_inst_cd + AND enabled_flg = 'Y' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd} + emp_chg_inst_ta_cd_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('従業員担当施設マスタから領域コードの取得に成功') + except Exception as e: + logger.debug('従業員担当施設マスタから領域コードの取得に失敗') + raise e + + return emp_chg_inst_ta_cd_records + + def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): # 従業員担当施設マスタの登録 logger.info('従業員担当施設マスタの登録 開始') for enabled_merge_record in enabled_dst_inst_merge_records: tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) - emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd']) - for emp_chg_inst_row in emp_chg_inst_records: - # 重複時相手先コードが存在したかのチェック - if emp_chg_inst_row['opp_count'] > 0: - break + emp_chg_inst_ta_cd_records = _select_emp_chg_inst_ta_cd(db, enabled_merge_record['dcf_inst_cd']) + for emp_chg_inst_ta_cd_record in emp_chg_inst_ta_cd_records: + emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd'], + emp_chg_inst_ta_cd_record['ta_cd']) + for emp_chg_inst_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_chg_inst_row['opp_count'] > 0: + continue - start_date = _str_to_date_time(emp_chg_inst_row['start_date']) - set_start_date = start_date \ - if start_date > tekiyo_month_first_day else tekiyo_month_first_day + start_date = _str_to_date_time(emp_chg_inst_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day - _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), - emp_chg_inst_row) + _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), + emp_chg_inst_row) - # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 - if start_date < tekiyo_month_first_day: - # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する - last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), - emp_chg_inst_row) - continue - # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する - _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], - emp_chg_inst_row['start_date']) + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 + if start_date < tekiyo_month_first_day: + # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), + emp_chg_inst_row) + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) logger.info('従業員担当施設マスタの登録 終了') @@ -468,7 +520,7 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, raise e -def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: +def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: # emp_chg_instからSELECT try: sql = """ @@ -487,16 +539,18 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis FROM src05.emp_chg_inst AS eciopp WHERE - eciopp.inst_cd = :dup_opp_cd + eciopp.inst_cd = :dup_opp_cd + AND eciopp.ta_cd = :ta_cd ) AS opp_count FROM src05.emp_chg_inst AS eci WHERE eci.inst_cd = :dcf_inst_cd + AND eci.ta_cd = :ta_cd AND eci.enabled_flg = 'Y' AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < eci.end_date """ - params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} emp_chg_inst_records = db.execute_select(sql, params) logging_sql(logger, sql) logger.info('従業員担当施設マスタの取得 成功') @@ -506,7 +560,7 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis return emp_chg_inst_records -def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: # ult_ident_prescからSELECT try: sql = """ @@ -522,15 +576,17 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> FROM src05.ult_ident_presc AS uipopp WHERE - uipopp.presc_cd = :dup_opp_cd + uipopp.presc_cd = :dup_opp_cd + AND uipopp.ta_cd = :ta_cd ) AS opp_count FROM src05.ult_ident_presc AS uip WHERE uip.presc_cd = :dcf_inst_cd + AND uip.ta_cd = :ta_cd AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date """ - params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} ult_ident_presc_records = db.execute_select(sql, params) logging_sql(logger, sql) logger.info('納入先処方元マスタの取得 成功') From a808e03ea816766fec9008266b1ab47dc079c97d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 15 Jun 2023 15:55:16 +0900 Subject: [PATCH 054/103] =?UTF-8?q?feat:=20=E3=83=87=E3=83=83=E3=83=88?= =?UTF-8?q?=E3=83=AD=E3=82=B8=E3=83=83=E3=82=AF=E3=81=AE=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 2e4f9ca7..ab4d107a 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -107,14 +107,8 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict logger.info('納入先処方元マスタの重複予定データなし') _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) - # 適用終了日 < 適用開始日の場合 - if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: - # 対象レコードを物理削除する - _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, - '開始月>適用開始日のため物理削除') - continue # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 - if not is_exists_duplicate_key: + if not is_exists_duplicate_key and _str_to_date_time(ult_ident_presc_row['end_date']) >= start_date: last_end_date = tekiyo_month_first_day - timedelta(days=1) # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) From 7a9293187fc9dd2f2368111575e7b56e969f4a10 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 20 Jun 2023 18:16:01 +0900 Subject: [PATCH 055/103] =?UTF-8?q?LOAD=E6=96=87=E5=AE=9F=E8=A1=8C?= =?UTF-8?q?=E6=99=82=E3=81=ABWARNING=E3=81=8C=E7=99=BA=E7=94=9F=E3=81=97?= =?UTF-8?q?=E3=81=9F=E3=81=A8=E3=81=8D=E3=80=81WARNING=E5=86=85=E5=AE=B9?= =?UTF-8?q?=E3=82=92INFO=E3=81=A7=E3=83=AD=E3=82=B0=E5=87=BA=E5=8A=9B?= =?UTF-8?q?=E3=81=97=E3=81=A6=E3=80=81=E4=BE=8B=E5=A4=96=E3=82=B9=E3=83=AD?= =?UTF-8?q?=E3=83=BC=E3=81=99=E3=82=8B=E3=82=88=E3=81=86=E3=81=AB=E6=94=B9?= =?UTF-8?q?=E4=BF=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 10 +++++++++- .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 2d99951b..61f287c5 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -27,7 +27,7 @@ class VjskDataLoadManager: # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") - # orgにload ※warningは1148エラーになるらしい + # orgにload ※warningが発生すれば異常終了させる sql = f"""\ LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} @@ -37,6 +37,14 @@ class VjskDataLoadManager: """ result = db.execute(sql, {"src_file_name": src_file_name}) logger.debug(sql) + # MEMO : sqlalchemy(engine=pymysql)としたときの result.context.cursor は、engineに依存してクラスが異なる + # https://nds-tyo.backlog.com/view/NEWDWH2021-1006#comment-266127218 + if result.context.cursor._result.warning_count > 0: + result_w = db.execute("SHOW WARNINGS;") + for row in result_w.fetchall(): + logger.info(f"SHOW WARNINGS : {row}") + raise Exception("LOAD文実行時にWARNINGが発生しました。") + logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') # org→srcにinsert select diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index a648724f..a5cfd370 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1208,7 +1208,7 @@ class TestImportFileToDb: VjskDataLoadManager.load(target_dict) # 検証 - assert str(e.value).startswith("SQL Error:") > 0 + assert str(e.value).startswith("LOAD文実行時にWARNINGが発生しました。") > 0 # teardown From 4bde68a27c631c4e7c16e1fd4bdc98193c000504 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Wed, 21 Jun 2023 18:11:32 +0900 Subject: [PATCH 056/103] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../laundering/sales_results_laundering.py | 37 ++--- .../src05/sales_lau_upsert.sql | 131 +++++------------- .../src05/whs_org_laundering.sql | 1 - 3 files changed, 56 insertions(+), 113 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 4fe4126a..04d0dcaa 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -4,7 +4,7 @@ from src.logging.get_logger import get_logger from src.batch.batch_functions import logging_sql from src.system_var import environment -logger = get_logger('卸実績洗替') +logger = get_logger('卸卸販売洗替') def exec(): @@ -48,7 +48,7 @@ def _call_sales_lau_upsert(db: Database): db.execute(f""" CALL src05.sales_lau_upsert( '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', - (src05.get_syor_date() - {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD}), + (src05.get_syor_date() - interval {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD} day), src05.get_syor_date() ) """) @@ -70,27 +70,28 @@ def _call_whs_org_laundering(db: Database): def _update_sales_lau_from_vop_hco_merge_v(db: Database): # HCO施設コードの洗替 - if _count_vop_hco_merge_v(db) >= 1: - _call_v_inst_merge_laundering(db) + if _count_v_inst_merge_t(db) == 0: + logger.info('V施設統合マスタ(洗替処理一時テーブル)にデータは存在しません') return - logger.info('V施設統合マスタにデータは存在しません') + + _call_v_inst_merge_laundering(db) return -def _count_vop_hco_merge_v(db: Database) -> int: - # V施設統合マスタのデータ件数の取得 +def _count_v_inst_merge_t(db: Database) -> int: + # V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 try: sql = """ SELECT COUNT(v_inst_cd) AS cnt FROM - src05.vop_hco_merge_v + internal05.v_inst_merge_t """ result = db.execute_select(sql) logging_sql(logger, sql) - logger.info('V施設統合マスタのデータ件数の取得 成功') + logger.info('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功') except Exception as e: - logger.debug('V施設統合マスタのデータ件数の取得 失敗') + logger.debug('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗') raise e return result[0]['cnt'] @@ -128,27 +129,27 @@ def _call_hco_to_mdb_laundering(db: Database): def _update_sales_lau_from_dcf_inst_merge(db: Database): # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする - if _count_dcf_inst_merge(db) >= 1: - _call_inst_merge_laundering(db) + if _count_inst_merge_t(db) == 0: + logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)にデータは存在しません') return - logger.info('DCF施設統合マスタにデータは存在しません') + _call_inst_merge_laundering(db) return -def _count_dcf_inst_merge(db: Database) -> int: - # DCF施設統合マスタのデータ件数の取得 +def _count_inst_merge_t(db: Database) -> int: + # アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 try: sql = """ SELECT COUNT(dcf_inst_cd) AS cnt FROM - src05.dcf_inst_merge + internal05.inst_merge_t """ result = db.execute_select(sql) logging_sql(logger, sql) - logger.info('DCF施設統合マスタのデータ件数の取得 成功') + logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功') except Exception as e: - logger.debug('DCF施設統合マスタのデータ件数の取得 失敗') + logger.debug('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗') raise e return result[0]['cnt'] diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index ecb2b671..182c3b3a 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -137,7 +137,10 @@ BEGIN '卸販売実績テーブル(洗替後)作成⑤ 開始' ); - SET @insert_sales_laundering = " + SET @extract_from_datetime = CAST(extract_from_date AS DATETIME); + SET @extract_to_datetime = CAST(extract_to_date AS DATETIME); + + SET @upsert_sales_launderning = " INSERT INTO $$target_table$$ ( rec_whs_cd, @@ -256,7 +259,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.nonyu_qty * -1) + -s.nonyu_qty ELSE s.nonyu_qty END AS nonyu_qty, @@ -265,7 +268,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.nonyu_amt * -1) + -s.nonyu_amt ELSE s.nonyu_amt END AS nonyu_amt, @@ -274,7 +277,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.shikiri_amt * -1) + -s.shikiri_amt ELSE s.shikiri_amt END AS shikiri_amt, @@ -283,7 +286,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.nhi_amt * -1) + -s.nhi_amt ELSE s.nhi_amt END AS nhi_amt, @@ -307,7 +310,7 @@ BEGIN ci.prefc_cd ELSE fmvt.prft_cd - END AS prft_cd, + END AS pref_cd, s.whspos_err_kbn, s.htdnymd_err_kbn, s.prd_exis_kbn, @@ -324,10 +327,11 @@ BEGIN SYSDATE() FROM ( SELECT - CAST(SYSDATE() AS DATE) AS today + ? AS extract_from_datetime, + ? AS extract_to_datetime ) AS sub INNER JOIN src05.sales AS s - ON sub.today = CAST(s.dwh_upd_dt AS DATE) + ON s.dwh_upd_dt BETWEEN sub.extract_from_datetime AND sub.extract_to_datetime LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv ON s.comm_cd = ppmv.prd_cd AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date @@ -341,21 +345,15 @@ BEGIN LEFT OUTER JOIN src05.com_inst AS ci ON s.v_inst_cd = ci.dcf_dsf_inst_cd WHERE - (? <= s.dwh_upd_dt AND s.dwh_upd_dt <= ?) - AND ( - (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') - OR ( - s.rec_sts_kbn = '0' - AND s.err_flg20 != 'M' - AND s.v_tran_cd IN (110, 120, 210, 220) - AND ( - ( - s.fcl_exec_kbn NOT IN ('2', '5') - AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) - ) - OR s.fcl_exec_kbn IS NULL - ) - ) + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + OR ( + s.rec_sts_kbn = '0' + AND s.err_flg20 != 'M' + AND s.v_tran_cd IN (110, 120, 210, 220) + AND ( + (s.fcl_exec_kbn NOT IN ('2', '5') AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn != 1)) + OR s.fcl_exec_kbn IS NULL + ) ) ON DUPLICATE KEY UPDATE @@ -399,78 +397,20 @@ BEGIN address = s.v_inst_addr, comm_cd = s.comm_cd, comm_name = s.comm_name, - nonyu_qty = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_qty * -1) - ELSE - s.nonyu_qty - END - ), + nonyu_qty = VALUES(nonyu_qty), nonyu_price = s.nonyu_price, - nonyu_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_amt * -1) - ELSE - s.nonyu_amt - END - ), + nonyu_amt = VALUES(nonyu_amt), shikiri_price = s.shikiri_price, - shikiri_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.shikiri_amt * -1) - ELSE - s.shikiri_amt - END - ), + shikiri_amt = VALUES(shikiri_amt), nhi_price = s.nhi_price, - nhi_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nhi_amt * -1) - ELSE - s.nhi_amt - END - ), + nhi_amt = VALUES(nhi_amt), v_inst_cd = s.v_inst_cd, - inst_clas_cd = ( - CASE - WHEN - (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') - THEN - '3' - WHEN - fmvt.fcl_type BETWEEN '20' AND '29' - THEN - '2' - ELSE - '1' - END - ), + inst_clas_cd = VALUES(inst_clas_cd), bu_cd = bpnct.bu_cd, item_cd = ppmv.mkr_cd, item_name = ppmv.mkr_inf_1, item_english_name = ppmv.mkr_inf_2, - pref_cd = ( - CASE - WHEN - s.v_inst_cd LIKE '00%' - THEN - ci.prefc_cd - ELSE - fmvt.prft_cd - END - ), + pref_cd = VALUES(pref_cd), whspos_err_kbn = s.whspos_err_kbn, htdnymd_err_kbn = s.htdnymd_err_kbn, prd_exis_kbn = s.prd_exis_kbn, @@ -486,9 +426,9 @@ BEGIN ins_usr = s.ins_usr, dwh_upd_dt = SYSDATE() "; - SET @insert_sales_laundering = REPLACE(@insert_sales_laundering, "$$target_table$$", target_table); - PREPARE insert_sales_laundering_stmt from @insert_sales_laundering; - EXECUTE insert_sales_laundering_stmt USING @extract_from_date, @extract_to_date; + SET @upsert_sales_launderning = REPLACE(@upsert_sales_launderning, "$$target_table$$", target_table); + PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; + EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 終了' @@ -499,8 +439,11 @@ BEGIN ); SET @update_institution_code = " - UPDATE - ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, + UPDATE ( + SELECT + ? AS extract_from_datetime, + ? AS extract_to_datetime + ) AS sub, $$target_table$$ AS tt, src05.sales AS s SET @@ -515,13 +458,13 @@ BEGIN END ) WHERE - sub.today = CAST(s.dwh_upd_dt AS DATE) + s.dwh_upd_dt BETWEEN sub.extract_from_datetime AND sub.extract_to_datetime AND tt.slip_mgt_num = s.slip_mgt_num AND tt.row_num = s.row_num "; SET @update_institution_code = REPLACE(@update_institution_code, "$$target_table$$", target_table); PREPARE update_institution_code_stmt from @update_institution_code; - EXECUTE update_institution_code_stmt; + EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 終了' diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index 4b5835ee..65dc9e30 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -67,7 +67,6 @@ BEGIN FROM src05.mkr_org_horizon_v AS mohv INNER JOIN ( SELECT - count(1) AS c, v_cd_2, MAX(dwh_upd_dt) AS dwh_upd_dt_latest FROM From 3b2362442d130853af51cbda8c1d5cf876262310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 22 Jun 2023 10:34:14 +0900 Subject: [PATCH 057/103] =?UTF-8?q?feat:=20=E5=8D=98=E4=BD=93=E8=A9=A6?= =?UTF-8?q?=E9=A8=93NG=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/laundering/sales_results_laundering.py | 2 +- rds_mysql/stored_procedure/src05/sales_lau_upsert.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 04d0dcaa..530fd9b9 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -141,7 +141,7 @@ def _count_inst_merge_t(db: Database) -> int: try: sql = """ SELECT - COUNT(dcf_inst_cd) AS cnt + COUNT(dcf_dsf_inst_cd) AS cnt FROM internal05.inst_merge_t """ diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 182c3b3a..63049f09 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -138,7 +138,7 @@ BEGIN ); SET @extract_from_datetime = CAST(extract_from_date AS DATETIME); - SET @extract_to_datetime = CAST(extract_to_date AS DATETIME); + SET @extract_to_datetime = ADDTIME(CAST(extract_to_date AS DATETIME), '23:59:59'); SET @upsert_sales_launderning = " INSERT INTO From 7f82cc9e48208adf765802b08590c89f1c78ccbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Fri, 23 Jun 2023 14:20:48 +0900 Subject: [PATCH 058/103] =?UTF-8?q?=E7=92=B0=E5=A2=83=E6=A7=8B=E7=AF=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/.dockerignore | 12 + ecs/jskult-batch-monthly/.env.example | 22 + ecs/jskult-batch-monthly/.gitignore | 10 + ecs/jskult-batch-monthly/.vscode/launch.json | 16 + .../.vscode/recommended_settings.json | 31 ++ ecs/jskult-batch-monthly/Dockerfile | 20 + ecs/jskult-batch-monthly/Pipfile | 26 ++ ecs/jskult-batch-monthly/Pipfile.lock | 387 ++++++++++++++++++ ecs/jskult-batch-monthly/README.md | 48 +++ ecs/jskult-batch-monthly/entrypoint.py | 10 + ecs/jskult-batch-monthly/src/__init__.py | 0 ecs/jskult-batch-monthly/src/aws/__init__.py | 0 ecs/jskult-batch-monthly/src/aws/s3.py | 98 +++++ .../src/batch/batch_functions.py | 101 +++++ .../src/batch/common/__init__.py | 0 .../src/batch/common/batch_context.py | 37 ++ .../src/batch/common/calendar_file.py | 32 ++ .../src/batch/parallel_processes.py | 32 ++ ecs/jskult-batch-monthly/src/db/__init__.py | 0 ecs/jskult-batch-monthly/src/db/database.py | 178 ++++++++ .../src/error/__init__.py | 0 .../src/error/exceptions.py | 10 + .../src/jobctrl_monthly.py | 88 ++++ .../src/logging/get_logger.py | 37 ++ .../src/system_var/__init__.py | 0 .../src/system_var/constants.py | 17 + .../src/system_var/environment.py | 25 ++ .../src/time/elapsed_time.py | 22 + .../calendar/jskult_arisj_output_day_list.txt | 100 +++++ 29 files changed, 1359 insertions(+) create mode 100644 ecs/jskult-batch-monthly/.dockerignore create mode 100644 ecs/jskult-batch-monthly/.env.example create mode 100644 ecs/jskult-batch-monthly/.gitignore create mode 100644 ecs/jskult-batch-monthly/.vscode/launch.json create mode 100644 ecs/jskult-batch-monthly/.vscode/recommended_settings.json create mode 100644 ecs/jskult-batch-monthly/Dockerfile create mode 100644 ecs/jskult-batch-monthly/Pipfile create mode 100644 ecs/jskult-batch-monthly/Pipfile.lock create mode 100644 ecs/jskult-batch-monthly/README.md create mode 100644 ecs/jskult-batch-monthly/entrypoint.py create mode 100644 ecs/jskult-batch-monthly/src/__init__.py create mode 100644 ecs/jskult-batch-monthly/src/aws/__init__.py create mode 100644 ecs/jskult-batch-monthly/src/aws/s3.py create mode 100644 ecs/jskult-batch-monthly/src/batch/batch_functions.py create mode 100644 ecs/jskult-batch-monthly/src/batch/common/__init__.py create mode 100644 ecs/jskult-batch-monthly/src/batch/common/batch_context.py create mode 100644 ecs/jskult-batch-monthly/src/batch/common/calendar_file.py create mode 100644 ecs/jskult-batch-monthly/src/batch/parallel_processes.py create mode 100644 ecs/jskult-batch-monthly/src/db/__init__.py create mode 100644 ecs/jskult-batch-monthly/src/db/database.py create mode 100644 ecs/jskult-batch-monthly/src/error/__init__.py create mode 100644 ecs/jskult-batch-monthly/src/error/exceptions.py create mode 100644 ecs/jskult-batch-monthly/src/jobctrl_monthly.py create mode 100644 ecs/jskult-batch-monthly/src/logging/get_logger.py create mode 100644 ecs/jskult-batch-monthly/src/system_var/__init__.py create mode 100644 ecs/jskult-batch-monthly/src/system_var/constants.py create mode 100644 ecs/jskult-batch-monthly/src/system_var/environment.py create mode 100644 ecs/jskult-batch-monthly/src/time/elapsed_time.py create mode 100644 s3/config/jskult/calendar/jskult_arisj_output_day_list.txt diff --git a/ecs/jskult-batch-monthly/.dockerignore b/ecs/jskult-batch-monthly/.dockerignore new file mode 100644 index 00000000..8b9da402 --- /dev/null +++ b/ecs/jskult-batch-monthly/.dockerignore @@ -0,0 +1,12 @@ +tests/* +.coverage +.env +.env.example +.report/* +.vscode/* +.pytest_cache/* +*/__pychache__/* +Dockerfile +pytest.ini +README.md +*.sql diff --git a/ecs/jskult-batch-monthly/.env.example b/ecs/jskult-batch-monthly/.env.example new file mode 100644 index 00000000..19a3f19f --- /dev/null +++ b/ecs/jskult-batch-monthly/.env.example @@ -0,0 +1,22 @@ +DB_HOST=************ +DB_PORT=************ +DB_USERNAME=************ +DB_PASSWORD=************ +DB_SCHEMA=src05 +LOG_LEVEL=INFO +ULTMARC_DATA_BUCKET=**************** +ULTMARC_DATA_FOLDER=recv +JSKULT_BACKUP_BUCKET=**************** +ULTMARC_BACKUP_FOLDER=ultmarc +JSKULT_CONFIG_BUCKET=********************** +JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar +JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +ARISJ_DATA_BUCKET=********** +LOG_LEVEL=************** +ARISJ_BACKUP_FOLDER=************** +JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME=jskult_arisj_output_day_list.txt +DB_CONNECTION_MAX_RETRY_ATTEMPT=************** +DB_CONNECTION_RETRY_INTERVAL_INIT=************** +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=************** +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=************* +VJSK_DATA_BUCKET=************* \ No newline at end of file diff --git a/ecs/jskult-batch-monthly/.gitignore b/ecs/jskult-batch-monthly/.gitignore new file mode 100644 index 00000000..bd0b37f8 --- /dev/null +++ b/ecs/jskult-batch-monthly/.gitignore @@ -0,0 +1,10 @@ +.vscode/settings.json +.env + +# python +__pycache__ + +# python test +.pytest_cache +.coverage +.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-monthly/.vscode/launch.json b/ecs/jskult-batch-monthly/.vscode/launch.json new file mode 100644 index 00000000..e0267567 --- /dev/null +++ b/ecs/jskult-batch-monthly/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // IntelliSense を使用して利用可能な属性を学べます。 + // 既存の属性の説明をホバーして表示します。 + // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(DEBUG)jskult batch monthly", + "type": "python", + "request": "launch", + "program": "entrypoint.py", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/ecs/jskult-batch-monthly/.vscode/recommended_settings.json b/ecs/jskult-batch-monthly/.vscode/recommended_settings.json new file mode 100644 index 00000000..b5e79d73 --- /dev/null +++ b/ecs/jskult-batch-monthly/.vscode/recommended_settings.json @@ -0,0 +1,31 @@ +{ + "[python]": { + "editor.defaultFormatter": null, + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + }, + // 自身の環境に合わせて変えてください + "python.defaultInterpreterPath": "", + "python.linting.lintOnSave": true, + "python.linting.enabled": true, + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": [ + "--max-line-length=200", + "--ignore=F541" + ], + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Path": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length", "200", + "--ignore=F541" + ], + "python.testing.pytestArgs": [ + "tests/batch/ultmarc" + ], + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/ecs/jskult-batch-monthly/Dockerfile b/ecs/jskult-batch-monthly/Dockerfile new file mode 100644 index 00000000..dd891d48 --- /dev/null +++ b/ecs/jskult-batch-monthly/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.9 + +ENV TZ="Asia/Tokyo" + +WORKDIR /usr/src/app +COPY Pipfile Pipfile.lock ./ +RUN \ + apt update -y && \ + # パッケージのセキュリティアップデートのみを適用するコマンド + apt install -y unattended-upgrades && \ + unattended-upgrades && \ + pip install --upgrade pip wheel setuptools && \ + pip install pipenv --no-cache-dir && \ + pipenv install --system --deploy && \ + pip uninstall -y pipenv virtualenv-clone virtualenv + +COPY src ./src +COPY entrypoint.py entrypoint.py + +CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-monthly/Pipfile b/ecs/jskult-batch-monthly/Pipfile new file mode 100644 index 00000000..24e5efcd --- /dev/null +++ b/ecs/jskult-batch-monthly/Pipfile @@ -0,0 +1,26 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[scripts] +"test:ultmarc" = "pytest tests/batch/ultmarc/" +"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" + +[packages] +boto3 = "*" +sqlalchemy = "*" +tenacity = "*" +pymysql = "*" + +[dev-packages] +autopep8 = "*" +flake8 = "*" +pytest = "*" +pytest-cov = "*" + +[requires] +python_version = "3.9" + +[pipenv] +allow_prereleases = true diff --git a/ecs/jskult-batch-monthly/Pipfile.lock b/ecs/jskult-batch-monthly/Pipfile.lock new file mode 100644 index 00000000..3e58b727 --- /dev/null +++ b/ecs/jskult-batch-monthly/Pipfile.lock @@ -0,0 +1,387 @@ +{ + "_meta": { + "hash": { + "sha256": "0b1dbc40a5069476aa66f172175ae24ffae385c335ff8e4794c1b25a111b9e43" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:7694df61bd6d253d6d9db34adbcd218b8efbe7f894a4a51611f7e0587ae33218", + "sha256:fe49f91e057b241b23a58c74c2f22654216788052ce95b73439fdb18bfd0e155" + ], + "index": "pypi", + "version": "==1.26.159" + }, + "botocore": { + "hashes": [ + "sha256:86fe4641fd32dc6a5be4a289e00dc180448fc7bc37abac21bd624656985eef62", + "sha256:da1c61757d466b82cc89f379a50662064bcb0beb67cc6efa1fbfc9a341bd08b0" + ], + "markers": "python_version >= '3.7'", + "version": "==1.29.159" + }, + "greenlet": { + "hashes": [ + "sha256:0a9dfcadc1d79696e90ccb1275c30ad4ec5fd3d1ab3ae6671286fac78ef33435", + "sha256:0f313771cb8ee0a04dfdf586b7d4076180d80c94be09049daeea018089b5b957", + "sha256:17503397bf6cbb5e364217143b6150c540020c51a3f6b08f9a20cd67c25e2ca8", + "sha256:180ec55cb127bc745669eddc9793ffab6e0cf7311e67e1592f183d6ca00d88c1", + "sha256:1b3f3568478bc21b85968e8038c4f98f4bf0039a692791bc324b5e0d1522f4b1", + "sha256:1bd4ea36f0aeb14ca335e0c9594a5aaefa1ac4e2db7d86ba38f0be96166b3102", + "sha256:21ebcb570e0d8501457d6a2695a44c5af3b6c2143dc6644ec73574beba067c90", + "sha256:24071eee113d75fedebaeb86264d94f04b5a24e311c5ba3e8003c07d00112a7e", + "sha256:270432cfdd6a50016b8259b3bbf398a3f7c06a06f2c68c7b93e49f53bc193bcf", + "sha256:271ed380389d2f7e4c1545b6e0837986e62504ab561edbaff05da9c9f3f98f96", + "sha256:2840187a94e258445e62ff1545e34f0b1a14aef4d0078e5c88246688d2b6515e", + "sha256:2cda110faee67613fed221f90467003f477088ef1cc84c8fc88537785a5b4de9", + "sha256:2e160a65cc6023a237be870f2072513747d512a1d018efa083acce0b673cccc0", + "sha256:2fcf7af83516db35af3d0ed5d182dea8585eddd891977adff1b74212f4bfd2fd", + "sha256:36cebce1f30964d5672fd956860e7e7b69772da69658d5743cb676b442eeff36", + "sha256:42bfe67824a9b53e73f568f982f0d1d4c7ac0f587d2e702a23f8a7b505d7b7c2", + "sha256:450a7e52a515402fd110ba807f1a7d464424bfa703be4effbcb97e1dfbfcc621", + "sha256:463d63ca5d8c236788284a9a44b9715372a64d5318a6b5eee36815df1ea0ba3d", + "sha256:4d0c0ffd732466ff324ced144fad55ed5deca36f6036c1d8f04cec69b084c9d6", + "sha256:4ff2a765f4861fc018827eab4df1992f7508d06c62de5d2fe8a6ac2233d4f1d0", + "sha256:53abf19b7dc62795c67b8d0a3d8ef866db166b21017632fff2624cf8fbf3481c", + "sha256:5552d7be37d878e9b6359bbffa0512d857bb9703616a4c0656b49c10739d5971", + "sha256:585810056a8adacd3152945ebfcd25deb58335d41f16ae4e0f3d768918957f9a", + "sha256:5942b1d6ba447cff1ec23a21ec525dde2288f00464950bc647f4e0f03bd537d1", + "sha256:5c355c99be5bb23e85d899b059a4f22fdf8a0741c57e7029425ee63eb436f689", + "sha256:5f61df4fe07864561f49b45c8bd4d2c42e3f03d2872ed05c844902a58b875028", + "sha256:665942d3a954c3e4c976581715f57fb3b86f4cf6bae3ac30b133f8ff777ac6c7", + "sha256:68368e908f14887fb202a81960bfbe3a02d97e6d3fa62b821556463084ffb131", + "sha256:6aac94ff957b5dea0216af71ab59c602e1b947b394e4f5e878a5a65643090038", + "sha256:889934aa8d72b6bfc46babd1dc4b817a56c97ec0f4a10ae7551fb60ab1f96fae", + "sha256:a00550757fca1b9cbc479f8eb1cf3514dbc0103b3f76eae46341c26ddcca67a9", + "sha256:a4a2d6ed0515c05afd5cc435361ced0baabd9ba4536ddfe8ad9a95bcb702c8ce", + "sha256:a8dd92fd76a61af2abc8ccad0c6c6069b3c4ebd4727ecc9a7c33aae37651c8c7", + "sha256:ab81f9ff3e3c2ca65e824454214c10985a846cd9bee5f4d04e15cd875d9fe13b", + "sha256:ac10196b8cde7a082e4e371ff171407270d3337c8d57ed43030094eb01d9c95c", + "sha256:b767930af686551dc96a5eb70af3736709d547ffa275c11a5e820bfb3ae61d8d", + "sha256:b9a1f4d256b81f59ba87bb7a29b9b38b1c018e052dba60a543cb0ddb5062d159", + "sha256:ba94c08321b5d345100fc64eb1ab235f42faf9aabba805cface55ebe677f1c2c", + "sha256:bab71f73001cd15723c4e2ca398f2f48e0a3f584c619eefddb1525e8986e06eb", + "sha256:bce5cf2b0f0b29680396c5c98ab39a011bd70f2dfa8b8a6811a69ee6d920cf9f", + "sha256:c02e514c72e745e49a3ae7e672a1018ba9b68460c21e0361054e956e5d595bc6", + "sha256:c3fb459ced6c5e3b2a895f23f1400f93e9b24d85c30fbe2d637d4f7706a1116b", + "sha256:cd31ab223e43ac64fd23f8f5dad249addadac2a459f040546200acbf7e84e353", + "sha256:ce70aa089ec589b5d5fab388af9f8c9f9dfe8fe4ad844820a92eb240d8628ddf", + "sha256:d47b2e1ad1429da9aa459ef189fbcd8a74ec28a16bc4c3f5f3cf3f88e36535eb", + "sha256:d61bad421c1f496f9fb6114dbd7c30a1dac0e9ff90e9be06f4472cbd8f7a1704", + "sha256:d7ba2e5cb119eddbc10874b41047ad99525e39e397f7aef500e6da0d6f46ab91", + "sha256:dde0ab052c7a1deee8d13d72c37f2afecee30ebdf6eb139790157eaddf04dd61", + "sha256:df34b52aa50a38d7a79f3abc9fda7e400791447aa0400ed895f275f6d8b0bb1f", + "sha256:e0fc20e6e6b298861035a5fc5dcf9fbaa0546318e8bda81112591861a7dcc28f", + "sha256:e20d5e8dc76b73db9280464d6e81bea05e51a99f4d4dd29c5f78dc79f294a5d3", + "sha256:e31d1a33dc9006b278f72cb0aacfe397606c2693aa2fdc0c2f2dcddbad9e0b53", + "sha256:e3a99f890f2cc5535e1b3a90049c6ca9ff9da9ec251cc130c8d269997f9d32ee", + "sha256:e7b192c3df761d0fdd17c2d42d41c28460f124f5922e8bd524018f1d35610682", + "sha256:ed0f4fad4c3656e34d20323a789b6a2d210a6bb82647d9c86dded372f55c58a1", + "sha256:f34ec09702be907727fd479046193725441aaaf7ed4636ca042734f469bb7451", + "sha256:f3530c0ec1fc98c43d5b7061781a8c55bd0db44f789f8152e19d9526cbed6021", + "sha256:f5672082576d0e9f52fa0fa732ff57254d65faeb4a471bc339fe54b58b3e79d2", + "sha256:ffb9f8969789771e95d3c982a36be81f0adfaa7302a1d56e29f168ca15e284b8" + ], + "markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))", + "version": "==3.0.0a1" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "pymysql": { + "hashes": [ + "sha256:766b72e4370aba94e6266a4dbd62c51fbc6a894c38de25a41a8a01f0461a2387", + "sha256:aade29b861e81a3c68a9e90d43f3db257940c0208983a0128b82f1a4cef639aa" + ], + "index": "pypi", + "version": "==1.1.0rc2" + }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.2" + }, + "s3transfer": { + "hashes": [ + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" + ], + "markers": "python_version >= '3.7'", + "version": "==0.6.1" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:0db6734cb5644c55d0262a813b764c6e2cda1e66e939a488b3d6298cdc7344c2", + "sha256:0e4645b260cfe375a0603aa117f0a47680864cf37833129da870919e88b08d8f", + "sha256:131f0c894c6572cb1bdcf97c92d999d3128c4ff1ca13061296057072f61afe13", + "sha256:1e2caba78e7d1f5003e88817b7a1754d4e58f4a8f956dc423bf8e304c568ab09", + "sha256:2de1477af7f48c633b8ecb88245aedd811dca88e88aee9e9d787b388abe74c44", + "sha256:2f3b6c31b915159b96b68372212fa77f69230b0a32acab40cf539d2823954f5a", + "sha256:3ef876615ff4b53e2033022195830ec4941a6e21068611f8d77de60203b90a98", + "sha256:43e69c8c1cea0188b7094e22fb93ae1a1890aac748628b7e925024a206f75368", + "sha256:53081c6fce0d49bb36d05f12dc87e008c9b0df58a163b792c5fc4ac638925f98", + "sha256:5a934eff1a2882137be3384826f997db8441d43b61fda3094923e69fffe474be", + "sha256:5e8522b49e0e640287308b68f71cc338446bbe1c226c8f81743baa91b0246e92", + "sha256:61f2035dea56ff1a429077e481496f813378beb02b823d2e3e7eb05bc1a7a8ca", + "sha256:63ea36c08792a7a8a08958bc806ecff6b491386feeaf14607c3d9d2d9325e67f", + "sha256:6e85e315725807c127ad8ba3d628fdb861cf9ebfb0e10c39a97c01e257cdd71b", + "sha256:7641f6ed2682de84d77c4894cf2e43700f3cf7a729361d7f9cac98febf3d8614", + "sha256:7be04dbe3470fe8dd332fdb48c979887c381ef6c635eddf2dec43d2766111be4", + "sha256:81d867c1be5abd49f7e547c108391f371a9d980ba7ec34666c50d683f782b754", + "sha256:8544c6e62eacb77d5106e2055ef10f2407fc0dbd547e879f8745b2032eefd2bc", + "sha256:8d3cbdb2f07fb0e4b897dc1df39166735e194fb946f28f26f4c9f9801c8b24f7", + "sha256:8d6ef848e5afcd1bda3e9a843751f845c0ca888b61e669237680e913d84ec206", + "sha256:8e2569dac4e3cb85365b91ab569d06a221e0e17e65ce59949d00c3958946282b", + "sha256:90d320fde566b864adbc19abb40ecb80f4e25d6f084639969bb972d5cca16858", + "sha256:91eb8f89fcce8f709f8a4d65d265bc48a80264ee14c7c9e955f3222f19b4b39c", + "sha256:a08a791c75d6154d46914d1e23bd81d9455f2950ec1de81f2723848c593d2c8b", + "sha256:a2e9f50a906d0b81292576a9fb458f8cace904c81a67088f4a2ca9ff2856f55d", + "sha256:a5a2856e12cf5f54301ddf043bcbf0552561d61555e1bcf348b63f42b8e1eec2", + "sha256:b2801f85c5c0293aa710f8aa5262c707a83c1c203962ae5a22b4d9095e71aa9d", + "sha256:b72f4e4def50414164a1d899f2ce4e782a029fad0ed5585981d1611e8ae29a74", + "sha256:bdaf89dd82f4a0e1b8b5ffc9cdc0c9551be6175f7eee5af6a838e92ba2e57100", + "sha256:c5e333b81fe10d14efebd4e9429b7bb865ed9463ca8bef07a7136dfa1fd4a37b", + "sha256:ce1fc3f64fd42d5f763d6b83651471f32920338a1ba107a3186211474861af57", + "sha256:d0c96592f54edd571e00ba6b1ed5df8263328ca1da9e78088c0ebc93c2e6562c", + "sha256:dc97238fa44be86971270943a0c21c19ce18b8d1596919048e57912e8abc02cc", + "sha256:e19546924f0cf2ec930d1faf318b7365e5827276410a513340f31a2b423e96a4", + "sha256:f2938edc512dd1fa48653e14c1655ab46144d4450f0e6b33da7acd8ba77fbfd7", + "sha256:f387b496a4c9474d8580195bb2660264a3f295a04d3a9d00f4fa15e9e597427e", + "sha256:f409f35a0330ab0cb18ece736b86d8b8233c64f4461fcb10993f67afc0ac7e5a", + "sha256:f662cf69484c59f8a3435902c40dfc34d86050bdb15e23d437074ce9f153306b", + "sha256:fbcc51fdbc89fafe4f4fe66f59372a8be88ded04de34ef438ab04f980beb12d4", + "sha256:fc1dae11bd5167f9eb53b3ccad24a79813004612141e76de21cf4c028dc30b34", + "sha256:ff6496ad5e9dc8baeb93a151cc2f599d01e5f8928a2aaf0b09a06428fdbaf553" + ], + "index": "pypi", + "version": "==2.0.16" + }, + "tenacity": { + "hashes": [ + "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0", + "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0" + ], + "index": "pypi", + "version": "==8.2.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:16224afa8cc2b3679dd9e9a1efe719dd2e20a03f0cc2e4cc4c97870ae9622532", + "sha256:3c2c2cd887648efa0ea8f8ba4260a1213058e8e4a25a6a6f4e084740b2c858e2" + ], + "markers": "python_version >= '3.7'", + "version": "==4.7.0rc1" + }, + "urllib3": { + "hashes": [ + "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f", + "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.16" + } + }, + "develop": { + "autopep8": { + "hashes": [ + "sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1", + "sha256:f9849cdd62108cb739dbcdbfb7fdcc9a30d1b63c4cc3e1c1f893b5360941b61c" + ], + "index": "pypi", + "version": "==2.0.2" + }, + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.6" + }, + "coverage": { + "extras": [ + "toml" + ], + "hashes": [ + "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", + "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2", + "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a", + "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", + "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", + "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6", + "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7", + "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f", + "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02", + "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c", + "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063", + "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", + "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5", + "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959", + "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", + "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", + "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", + "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9", + "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5", + "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f", + "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", + "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", + "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9", + "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f", + "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", + "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb", + "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1", + "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb", + "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250", + "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e", + "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", + "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5", + "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", + "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2", + "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", + "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", + "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", + "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", + "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9", + "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", + "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0", + "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9", + "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", + "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050", + "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d", + "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6", + "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353", + "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb", + "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e", + "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8", + "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495", + "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2", + "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd", + "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27", + "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1", + "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818", + "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", + "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e", + "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850", + "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3" + ], + "markers": "python_version >= '3.7'", + "version": "==7.2.7" + }, + "exceptiongroup": { + "hashes": [ + "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", + "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" + ], + "markers": "python_version < '3.11'", + "version": "==1.1.1" + }, + "flake8": { + "hashes": [ + "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7", + "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181" + ], + "index": "pypi", + "version": "==6.0.0" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "packaging": { + "hashes": [ + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" + ], + "markers": "python_version >= '3.7'", + "version": "==23.1" + }, + "pluggy": { + "hashes": [ + "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", + "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3" + ], + "markers": "python_version >= '3.7'", + "version": "==1.2.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053", + "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610" + ], + "markers": "python_version >= '3.6'", + "version": "==2.10.0" + }, + "pyflakes": { + "hashes": [ + "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf", + "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd" + ], + "markers": "python_version >= '3.6'", + "version": "==3.0.1" + }, + "pytest": { + "hashes": [ + "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295", + "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b" + ], + "index": "pypi", + "version": "==7.3.2" + }, + "pytest-cov": { + "hashes": [ + "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", + "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a" + ], + "index": "pypi", + "version": "==4.1.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + } + } +} diff --git a/ecs/jskult-batch-monthly/README.md b/ecs/jskult-batch-monthly/README.md new file mode 100644 index 00000000..acf096d2 --- /dev/null +++ b/ecs/jskult-batch-monthly/README.md @@ -0,0 +1,48 @@ +# 実消化&アルトマーク 月次バッチ + +## 概要 + +実消化&アルトマークの月次バッチ処理。 + +## 環境情報 + +- Python 3.9 +- MySQL 8.23 +- VSCode + +## 環境構築 + +- Python の構築 + + - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 + - 「Pipenv の導入」までを行っておくこと + - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する + - `pipenv install --dev --python ` + - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく + +- MySQL の環境構築 + - Windows の場合、以下のリンクからダウンロードする + - + - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 + - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると + - `docker-compose up -d` + - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと + - データを投入する + - 立ち上げたデータベースに「src05」スキーマを作成する + - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する + - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` +- 環境変数の設定 + - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する + - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください +- VSCode の設定 + - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する + - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する + +## 実行 + +- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 +- 「entrypoint.py」が、バッチ処理のエントリーポイント。 +- 実際の処理は、「src/jobctrl_daily.py」で行っている。 + + +## フォルダ構成(工事中) diff --git a/ecs/jskult-batch-monthly/entrypoint.py b/ecs/jskult-batch-monthly/entrypoint.py new file mode 100644 index 00000000..191d0eae --- /dev/null +++ b/ecs/jskult-batch-monthly/entrypoint.py @@ -0,0 +1,10 @@ +"""実消化&アルトマーク 日次バッチのエントリーポイント""" +from src import jobctrl_monthly + +if __name__ == '__main__': + try: + exit(jobctrl_monthly.exec()) + except Exception: + # エラーが起きても、正常系のコードで返す。 + # エラーが起きた事実はbatch_process内でログを出す。 + exit(0) diff --git a/ecs/jskult-batch-monthly/src/__init__.py b/ecs/jskult-batch-monthly/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/aws/__init__.py b/ecs/jskult-batch-monthly/src/aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/aws/s3.py b/ecs/jskult-batch-monthly/src/aws/s3.py new file mode 100644 index 00000000..847f5bad --- /dev/null +++ b/ecs/jskult-batch-monthly/src/aws/s3.py @@ -0,0 +1,98 @@ +import os.path as path +import tempfile + +import boto3 + +from src.system_var import environment + + +class S3Client: + __s3_client = boto3.client('s3') + _bucket_name: str + + def list_objects(self, bucket_name: str, folder_name: str): + response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + if response['KeyCount'] == 0: + return [] + contents = response['Contents'] + # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする + objects = [{'filename': content['Key'], 'size': content['Size']} for content in contents if not content['Key'].endswith('/')] + return objects + + def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: + copy_source = {'Bucket': src_bucket, 'Key': src_key} + self.__s3_client.copy(copy_source, dest_bucket, dest_key) + return + + def download_file(self, bucket_name: str, file_key: str, file): + self.__s3_client.download_fileobj( + Bucket=bucket_name, + Key=file_key, + Fileobj=file + ) + return + + def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): + self.__s3_client.upload_file( + local_file_path, + Bucket=bucket_name, + Key=file_key + ) + + def delete_file(self, bucket_name: str, file_key: str): + self.__s3_client.delete_object( + Bucket=bucket_name, + Key=file_key + ) + + +class S3Bucket(): + _s3_client = S3Client() + _bucket_name: str = None + + +class UltmarcBucket(S3Bucket): + _bucket_name = environment.ULTMARC_DATA_BUCKET + _folder = environment.ULTMARC_DATA_FOLDER + + def list_dat_file(self): + return self._s3_client.list_objects(self._bucket_name, self._folder) + + def download_dat_file(self, dat_filename: str): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, dat_filename, f) + f.seek(0) + return temporary_file_path + + def backup_dat_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + ultmarc_backup_bucket = UltmarcBackupBucket() + backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) + # コピー元のファイルを削除 + self._s3_client.delete_file(self._bucket_name, dat_file_key) + + +class ConfigBucket(S3Bucket): + _bucket_name = environment.JSKULT_CONFIG_BUCKET + + def download_arisj_output_day_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME) + arisj_output_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, arisj_output_day_list_key, f) + f.seek(0) + return temporary_file_path + + +class JskUltBackupBucket(S3Bucket): + _bucket_name = environment.JSKULT_BACKUP_BUCKET + + +class UltmarcBackupBucket(JskUltBackupBucket): + _folder = environment.ULTMARC_BACKUP_FOLDER diff --git a/ecs/jskult-batch-monthly/src/batch/batch_functions.py b/ecs/jskult-batch-monthly/src/batch/batch_functions.py new file mode 100644 index 00000000..27aac450 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/batch_functions.py @@ -0,0 +1,101 @@ +"""バッチ処理の共通関数""" +import logging +import textwrap +from datetime import datetime + +from src.db.database import Database +from src.error.exceptions import BatchOperationException, DBException +from src.system_var import constants + + +def get_batch_statuses() -> tuple[str, str, str]: + """日付テーブルから、以下を取得して返す。 + - バッチ処理中フラグ + - dump取得状況区分 + - 処理日(YYYY/MM/DD) + + Raises: + BatchOperationException: 日付テーブルが取得できないとき、何らかのエラーが発生したとき + + Returns: + tuple[str, str]: [0]バッチ処理中フラグ、dump取得状況区分 + """ + db = Database.get_instance() + sql = 'SELECT bch_actf, dump_sts_kbn, src05.get_syor_date() AS syor_date FROM src05.hdke_tbl' + try: + db.connect() + hdke_tbl_result = db.execute_select(sql) + except DBException as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + if len(hdke_tbl_result) == 0: + raise BatchOperationException('日付テーブルが取得できませんでした') + + # 必ず1件取れる + hdke_tbl_record = hdke_tbl_result[0] + batch_processing_flag = hdke_tbl_record['bch_actf'] + dump_status_kbn = hdke_tbl_record['dump_sts_kbn'] + syor_date = hdke_tbl_record['syor_date'] + # 処理日を文字列に変換する + syor_date_str = datetime.strftime(syor_date, '%Y/%m/%d') + + return batch_processing_flag, dump_status_kbn, syor_date_str + + +def update_batch_processing_flag_in_processing() -> None: + """バッチ処理中フラグを処理中に更新する + + Raises: + BatchOperationException: DB操作の何らかのエラー + """ + db = Database.get_instance() + sql = 'UPDATE src05.hdke_tbl SET bch_actf = :in_processing' + try: + db.connect() + db.execute(sql, {'in_processing': constants.BATCH_ACTF_BATCH_IN_PROCESSING}) + except DBException as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + return + + +def update_batch_process_complete() -> None: + """バッチ処理を完了とし、処理日、バッチ処理中フラグ、dump処理状態区分を更新する + + Raises: + BatchOperationException: DB操作の何らかのエラー + """ + db = Database.get_instance() + sql = """\ + UPDATE src05.hdke_tbl + SET + bch_actf = :batch_complete, + dump_sts_kbn = :dump_unprocessed, + syor_date = DATE_FORMAT((src05.get_syor_date() + interval 1 day), '%Y%m%d') -- +1日 + """ + try: + db.connect() + db.execute(sql, { + 'batch_complete': constants.BATCH_ACTF_BATCH_UNPROCESSED, + 'dump_unprocessed': constants.DUMP_STATUS_KBN_UNPROCESSED + }) + except DBException as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + return + + +def logging_sql(logger: logging.Logger, sql: str) -> None: + """SQL文をデバッグログで出力する + + Args: + logger (logging.Logger): ロガー + sql (str): SQL文 + """ + logger.debug(f'\n{"-" * 15}\n{textwrap.dedent(sql)[1:-1]}\n{"-" * 15}') diff --git a/ecs/jskult-batch-monthly/src/batch/common/__init__.py b/ecs/jskult-batch-monthly/src/batch/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/batch/common/batch_context.py b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py new file mode 100644 index 00000000..8b76415a --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py @@ -0,0 +1,37 @@ +class BatchContext: + __instance = None + __syor_date: str # 処理日(yyyy/mm/dd形式) + __is_not_business_monthly: bool # 月次バッチ起動日フラグ + + def __init__(self) -> None: + self.__is_not_business_monthly = False + + @classmethod + def get_instance(cls): + if cls.__instance is None: + cls.__instance = cls() + return cls.__instance + + @property + def syor_date(self): + return self.__syor_date + + @syor_date.setter + def syor_date(self, syor_date_str: str): + self.__syor_date = syor_date_str + + @property + def is_not_business_monthly(self): + return self.__is_not_business_monthly + + @is_not_business_monthly.setter + def is_not_business_monthly(self, flag: bool): + self.__is_not_business_monthly = flag + + @property + def is_ultmarc_imported(self): + return self.__is_ultmarc_imported + + @is_ultmarc_imported.setter + def is_ultmarc_imported(self, flag: bool): + self.__is_ultmarc_imported = flag diff --git a/ecs/jskult-batch-monthly/src/batch/common/calendar_file.py b/ecs/jskult-batch-monthly/src/batch/common/calendar_file.py new file mode 100644 index 00000000..b456f03c --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/common/calendar_file.py @@ -0,0 +1,32 @@ +from src.system_var import constants + + +class CalendarFile: + """カレンダーファイル""" + + __calendar_file_lines: list[str] + + def __init__(self, calendar_file_path): + with open(calendar_file_path) as f: + self.__calendar_file_lines: list[str] = f.readlines() + + def compare_date(self, date_str: str) -> bool: + """与えられた日付がカレンダーファイル内に含まれているかどうか + カレンダーファイル内の日付はyyyy/mm/ddで書かれている前提 + コメント(#)が含まれている行は無視される + + Args: + date_str (str): yyyy/mm/dd文字列 + + Returns: + bool: 含まれていればTrue + """ + for calendar_date in self.__calendar_file_lines: + # コメント行が含まれている場合はスキップ + if constants.CALENDAR_COMMENT_SYMBOL in calendar_date: + continue + + if date_str in calendar_date: + return True + + return False diff --git a/ecs/jskult-batch-monthly/src/batch/parallel_processes.py b/ecs/jskult-batch-monthly/src/batch/parallel_processes.py new file mode 100644 index 00000000..0fb2d715 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/parallel_processes.py @@ -0,0 +1,32 @@ +"""並列処理""" + +import concurrent.futures + +from src.batch.bio_sales import create_bio_sales_lot +from src.batch.laundering import sales_laundering +from src.error.exceptions import BatchOperationException + + +def exec(): + # 並列処理を開始 + with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: + + # 実績更新 + future_sales_laundering = executor.submit(sales_laundering.exec) + # 生物由来ロット分解 + future_create_bio_sales_lot = executor.submit(create_bio_sales_lot.exec) + + # 両方の処理が完了するまで待つ + concurrent.futures.wait([future_sales_laundering, future_create_bio_sales_lot]) + + # エラーがあれば呼び出し元でキャッチする + sales_laundering_exc = future_sales_laundering.exception() + create_bio_sales_lot_exc = future_create_bio_sales_lot.exception() + + # いずれかにエラーが発生していれば、1つのエラーとして返す。 + if sales_laundering_exc is not None or create_bio_sales_lot_exc is not None: + sales_laundering_exc_message = str(sales_laundering_exc) if sales_laundering_exc is not None else '' + create_bio_sales_lot_exc_message = str(create_bio_sales_lot_exc) if create_bio_sales_lot_exc is not None else '' + raise BatchOperationException(f'並列処理中にエラーが発生しました。実績更新="{sales_laundering_exc_message}", 生物由来ロット分解={create_bio_sales_lot_exc_message}') + + return diff --git a/ecs/jskult-batch-monthly/src/db/__init__.py b/ecs/jskult-batch-monthly/src/db/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/db/database.py b/ecs/jskult-batch-monthly/src/db/database.py new file mode 100644 index 00000000..f67a21b9 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/db/database.py @@ -0,0 +1,178 @@ +from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, + create_engine, text) +from sqlalchemy.engine.url import URL +from tenacity import retry, stop_after_attempt, wait_exponential + +from src.error.exceptions import DBException +from src.logging.get_logger import get_logger +from src.system_var import environment + +logger = get_logger(__name__) + + +class Database: + """データベース操作クラス""" + __connection: Connection = None + __engine: Engine = None + __host: str = None + __port: str = None + __username: str = None + __password: str = None + __schema: str = None + __connection_string: str = None + + def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None: + """このクラスの新たなインスタンスを初期化します + + Args: + username (str): DBユーザー名 + password (str): DBパスワード + host (str): DBホスト名 + port (int): DBポート + schema (str): DBスキーマ名 + """ + self.__username = username + self.__password = password + self.__host = host + self.__port = int(port) + self.__schema = schema + + self.__connection_string = URL.create( + drivername='mysql+pymysql', + username=self.__username, + password=self.__password, + host=self.__host, + port=self.__port, + database=self.__schema, + query={"charset": "utf8mb4"} + ) + + self.__engine = create_engine( + self.__connection_string, + pool_timeout=5, + poolclass=QueuePool + ) + + @classmethod + def get_instance(cls): + """インスタンスを取得します + + Returns: + Database: DB操作クラスインスタンス + """ + return cls( + username=environment.DB_USERNAME, + password=environment.DB_PASSWORD, + host=environment.DB_HOST, + port=environment.DB_PORT, + schema=environment.DB_SCHEMA + ) + + @retry( + wait=wait_exponential( + multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, + min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, + max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS + ), + stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT)) + def connect(self): + """ + DBに接続します。接続に失敗した場合、リトライします。 + Raises: + DBException: 接続失敗 + """ + try: + self.__connection = self.__engine.connect() + except Exception as e: + raise DBException(e) + + def execute_select(self, select_query: str, parameters=None) -> list[dict]: + """SELECTクエリを実行します。 + + Args: + select_query (str): SELECT文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + list[dict]: カラム名: 値の辞書リスト + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(select_query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(select_query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + result_rows = result.mappings().all() + return result_rows + + def execute(self, query: str, parameters=None) -> CursorResult: + """SQLクエリを実行します。 + + Args: + query (str): SQL文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + CursorResult: 取得結果 + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + return result + + def begin(self): + """トランザクションを開始します。""" + if not self.__connection.in_transaction(): + self.__connection.begin() + + def commit(self): + """トランザクションをコミットします""" + if self.__connection.in_transaction(): + self.__connection.commit() + + def rollback(self): + """トランザクションをロールバックします""" + if self.__connection.in_transaction(): + self.__connection.rollback() + + def disconnect(self): + """DB接続を切断します。""" + if self.__connection is not None: + self.__connection.close() + self.__connection = None + + def __execute_with_transaction(self, query: str, parameters: dict): + # トランザクションを開始してクエリを実行する + with self.__connection.begin(): + try: + result = self.__connection.execute(text(query), parameters=parameters) + except Exception as e: + self.__connection.rollback() + raise e + # ここでコミットされる + return result diff --git a/ecs/jskult-batch-monthly/src/error/__init__.py b/ecs/jskult-batch-monthly/src/error/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/error/exceptions.py b/ecs/jskult-batch-monthly/src/error/exceptions.py new file mode 100644 index 00000000..055c24f6 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/error/exceptions.py @@ -0,0 +1,10 @@ +class MeDaCaException(Exception): + pass + + +class DBException(MeDaCaException): + pass + + +class BatchOperationException(MeDaCaException): + pass diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py new file mode 100644 index 00000000..b58c4601 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -0,0 +1,88 @@ +"""実消化&アルトマーク 月次バッチ処理""" + +from src.aws.s3 import ConfigBucket +from src.batch.batch_functions import ( + get_batch_statuses, update_batch_process_complete, + update_batch_processing_flag_in_processing) +from src.batch.common.batch_context import BatchContext +from src.batch.common.calendar_file import CalendarFile +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.system_var import constants + +logger = get_logger('月次処理コントロール') + +# バッチ共通設定を取得 +batch_context = BatchContext.get_instance() + + +def exec(): + try: + logger.info('月次バッチ:開始') + try: + # 月次バッチ処置中フラグ、dump処理状態区分、処理日を取得 + batch_processing_flag, dump_status_kbn, syor_date = get_batch_statuses() + except BatchOperationException as e: + logger.exception(f'日付テーブル取得(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + # 月次バッチ処理中の場合、後続の処理は行わない + if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING: + logger.error('バッチ処理中のため、月次バッチ処理を終了します。') + return constants.BATCH_EXIT_CODE_SUCCESS + + # dump取得が正常終了していない場合、後続の処理は行わない + if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: + logger.error('dump取得が正常終了していないため、月次バッチ処理を終了します。') + return constants.BATCH_EXIT_CODE_SUCCESS + + logger.info(f'処理日={syor_date}') + # バッチ共通設定に処理日を追加 + batch_context.syor_date = syor_date + + # 稼働日かかどうかを、実消化&アルトマーク月次バッチ稼働日ファイルをダウンロードして判定 + try: + arisj_output_day_list_file_path = ConfigBucket().download_arisj_output_day_list() + arisj_output_day_calendar = CalendarFile(arisj_output_day_list_file_path) + batch_context.is_not_business_monthly = arisj_output_day_calendar.compare_date(syor_date) + except Exception as e: + logger.exception(f'実消化&アルトマーク月次バッチ稼働日ファイルの読み込みに失敗しました。{e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + # 調査目的でV実消化稼働日かどうかをログ出力 + logger.debug(f'本日は{"実消化&アルトマーク月次バッチ稼働日です。" if batch_context.is_not_business_monthly else "実消化&アルトマーク月次バッチ非稼働日です。"}') + + # バッチ処理中に更新 + try: + update_batch_processing_flag_in_processing() + except BatchOperationException as e: + logger.exception(f'処理フラグ更新(未処理→処理中) エラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + try: + logger.info('月次バッチ:起動') + # ultmarc_process.exec_import() + logger.info('月次バッチ:終了') + except BatchOperationException as e: + logger.exception(f'月次バッチ処理エラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + # 調査目的で月次バッチが行われたかどうかをログ出力 + logger.debug(f'{"月次バッチが行われました。" if batch_context.is_not_business_monthly else "月次バッチが行われませんでした。"}') + + # バッチ処理完了とし、処理日、バッチ処置中フラグ、dump取得状態区分を更新 + logger.info('業務日付更新・バッチステータスリフレッシュ:起動') + try: + update_batch_process_complete() + except BatchOperationException as e: + logger.exception(f'業務日付更新・バッチステータスリフレッシュ エラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + logger.info('業務日付更新・バッチステータスリフレッシュ:終了') + + # 正常終了を保守ユーザーに通知 + logger.info('[NOTICE]月次バッチ:終了(正常終了)') + return constants.BATCH_EXIT_CODE_SUCCESS + + except Exception as e: + logger.exception(f'月次バッチ処理中に想定外のエラーが発生しました {e}') + raise e diff --git a/ecs/jskult-batch-monthly/src/logging/get_logger.py b/ecs/jskult-batch-monthly/src/logging/get_logger.py new file mode 100644 index 00000000..f36f1199 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/logging/get_logger.py @@ -0,0 +1,37 @@ +import logging + +from src.system_var.environment import LOG_LEVEL + +# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する +for name in ["boto3", "botocore", "s3transfer", "urllib3"]: + logging.getLogger(name).setLevel(logging.WARNING) + + +def get_logger(log_name: str) -> logging.Logger: + """一意のログ出力モジュールを取得します。 + + Args: + log_name (str): ロガー名 + + Returns: + _type_: _description_ + """ + logger = logging.getLogger(log_name) + level = logging.getLevelName(LOG_LEVEL) + if not isinstance(level, int): + level = logging.INFO + logger.setLevel(level) + + if not logger.hasHandlers(): + handler = logging.StreamHandler() + logger.addHandler(handler) + + formatter = logging.Formatter( + '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', + '%Y-%m-%d %H:%M:%S' + ) + + for handler in logger.handlers: + handler.setFormatter(formatter) + + return logger diff --git a/ecs/jskult-batch-monthly/src/system_var/__init__.py b/ecs/jskult-batch-monthly/src/system_var/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/system_var/constants.py b/ecs/jskult-batch-monthly/src/system_var/constants.py new file mode 100644 index 00000000..8a0ccbb3 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/system_var/constants.py @@ -0,0 +1,17 @@ +# バッチ正常終了コード +BATCH_EXIT_CODE_SUCCESS = 0 + +# バッチ処理中フラグ:未処理 +BATCH_ACTF_BATCH_UNPROCESSED = '0' +# バッチ処理中フラグ:処理中 +BATCH_ACTF_BATCH_IN_PROCESSING = '1' +# dump取得状態区分:未処理 +DUMP_STATUS_KBN_UNPROCESSED = '0' +# dump取得状態区分:dump取得正常終了 +DUMP_STATUS_KBN_COMPLETE = '2' + +# カレンダーファイルのコメントシンボル +CALENDAR_COMMENT_SYMBOL = '#' + +# 月曜日(datetime.weekday()で月曜日を表す数字) +WEEKDAY_MONDAY = 0 diff --git a/ecs/jskult-batch-monthly/src/system_var/environment.py b/ecs/jskult-batch-monthly/src/system_var/environment.py new file mode 100644 index 00000000..25afc294 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/system_var/environment.py @@ -0,0 +1,25 @@ +import os + +# Database +DB_HOST = os.environ['DB_HOST'] +DB_PORT = int(os.environ['DB_PORT']) +DB_USERNAME = os.environ['DB_USERNAME'] +DB_PASSWORD = os.environ['DB_PASSWORD'] +DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS +ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] +ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] +JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] +ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] +JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] +JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME'] + +# 初期値がある環境変数 +LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') +DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) diff --git a/ecs/jskult-batch-monthly/src/time/elapsed_time.py b/ecs/jskult-batch-monthly/src/time/elapsed_time.py new file mode 100644 index 00000000..c1432e91 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/time/elapsed_time.py @@ -0,0 +1,22 @@ +import time + + +class ElapsedTime: + """処理実行時間計測クラス""" + def __init__(self) -> None: + """このクラスの新たなインスタンスを初期化します。""" + self.__start = time.perf_counter() + + @property + def of(self): + """インスタンス化してからの経過時間をhh:mm:ssの形式にフォーマットして返す + Returns: + str: 時分秒形式の経過時間 + """ + elapsed_time = time.perf_counter() - self.__start + h, rem = divmod(elapsed_time, 3600) + m, s = divmod(rem, 60) + h_str = f'{h:02.0f} hour ' if h > 0.0 else '' + m_str = f'{m:02.0f} min ' if m > 0.0 else '' + s_str = f'{s:06.02f} sec' if s > 0.0 else '' + return f"{h_str}{m_str}{s_str}" diff --git a/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt b/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt new file mode 100644 index 00000000..2d5f42c0 --- /dev/null +++ b/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt @@ -0,0 +1,100 @@ +2023/06/23 +2023/06/24 +2023/06/25 +2023/06/26 +2023/06/27 +2023/06/28 +2023/06/29 +2023/06/30 +2023/07/01 +2023/07/02 +2023/07/03 +2023/07/04 +2023/07/05 +2023/07/06 +2023/07/07 +2023/07/08 +2023/07/09 +2023/07/10 +2023/07/11 +2023/07/12 +2023/07/13 +2023/07/14 +2023/07/15 +2023/07/16 +2023/07/17 +2023/07/18 +2023/07/19 +2023/07/20 +2023/07/21 +2023/07/22 +2023/07/23 +2023/07/24 +2023/07/25 +2023/07/26 +2023/07/27 +2023/07/28 +2023/07/29 +2023/07/30 +2023/07/31 +2023/08/01 +2023/08/02 +2023/08/03 +2023/08/04 +2023/08/05 +2023/08/06 +2023/08/07 +2023/08/08 +2023/08/09 +2023/08/10 +2023/08/11 +2023/08/12 +2023/08/13 +2023/08/14 +2023/08/15 +2023/08/16 +2023/08/17 +2023/08/18 +2023/08/19 +2023/08/20 +2023/08/21 +2023/08/22 +2023/08/23 +2023/08/24 +2023/08/25 +2023/08/26 +2023/08/27 +2023/08/28 +2023/08/29 +2023/08/30 +2023/08/31 +2023/09/01 +2023/09/02 +2023/09/03 +2023/09/04 +2023/09/05 +2023/09/06 +2023/09/07 +2023/09/08 +2023/09/09 +2023/09/10 +2023/09/11 +2023/09/12 +2023/09/13 +2023/09/14 +2023/09/15 +2023/09/16 +2023/09/17 +2023/09/18 +2023/09/19 +2023/09/20 +2023/09/21 +2023/09/22 +2023/09/23 +2023/09/24 +2023/09/25 +2023/09/26 +2023/09/27 +2023/09/28 +2023/09/29 +2023/09/30 From d285a28c9636d3dbee1d81620ad2301a7b7bf412 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 23 Jun 2023 20:03:37 +0900 Subject: [PATCH 059/103] =?UTF-8?q?LOAD=E3=81=AE=E5=AE=9F=E8=A1=8C?= =?UTF-8?q?=E7=B5=90=E6=9E=9C=E3=81=AE=E3=83=AF=E3=83=BC=E3=83=8B=E3=83=B3?= =?UTF-8?q?=E3=82=B0=E3=82=92=E3=82=AD=E3=83=A3=E3=83=83=E3=83=81=E3=81=97?= =?UTF-8?q?=E3=81=A6=E4=BE=8B=E5=A4=96=E3=81=A7=E8=90=BD=E3=81=A8=E3=81=99?= =?UTF-8?q?=E5=87=A6=E7=90=86=E3=82=92=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 17 +++++++++-------- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 1 + 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 61f287c5..f869b983 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -35,17 +35,18 @@ class VjskDataLoadManager: ENCLOSED BY '\"' IGNORE 1 LINES; """ + db.begin() result = db.execute(sql, {"src_file_name": src_file_name}) - logger.debug(sql) - # MEMO : sqlalchemy(engine=pymysql)としたときの result.context.cursor は、engineに依存してクラスが異なる - # https://nds-tyo.backlog.com/view/NEWDWH2021-1006#comment-266127218 - if result.context.cursor._result.warning_count > 0: - result_w = db.execute("SHOW WARNINGS;") - for row in result_w.fetchall(): - logger.info(f"SHOW WARNINGS : {row}") + result_w = db.execute("SHOW WARNINGS;") + has_mysql_warnings = False + for row in result_w.fetchall(): + has_mysql_warnings = True + logger.info(f"SHOW WARNINGS : {row}") + if has_mysql_warnings: raise Exception("LOAD文実行時にWARNINGが発生しました。") - + logger.debug(sql) logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') + db.commit() # org→srcにinsert select db.begin() diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index a5cfd370..2dbe2ef5 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -35,6 +35,7 @@ class TestImportFileToDb: self.db = database self.db.connect() + self.db.execute("set sql_mode = 'TRADITIONAL';") # self.db.begin() # testing From 626ee291c36245e5fc6562f3114c29a0261998a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Mon, 26 Jun 2023 08:54:13 +0900 Subject: [PATCH 060/103] =?UTF-8?q?=E4=BD=9C=E6=A5=AD=E4=B8=AD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/jskult_batch_monthly.py | 236 ++++++++++++++++++ .../src/jobctrl_monthly.py | 5 +- 2 files changed, 239 insertions(+), 2 deletions(-) create mode 100644 ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py diff --git a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py new file mode 100644 index 00000000..3cbecd15 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py @@ -0,0 +1,236 @@ + +from datetime import datetime + +# from src.aws.s3 import UltmarcBucket +# from src.batch.common.batch_context import BatchContext +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +# from src.system_var import constants +import pathlib +import os + +logger = get_logger('実消化&アルトマーク月次バッチ') + + +class JskultBathcMonthly(): + """ 実消化&アルトマーク月次バッチ """ + + # WKテーブルの過去分削除SQL + PHYSICAL_NORMAL_DELETE_QUERY = """\ + DELETE FROM src05.wk_inst_aris_if + """ + + # 正常系データを取得しWKテーブルに保存SQL + NORMAL_INSERT_SELECT_QUERY = """\ + INSERT src05.wk_inst_aris_if + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRIN(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.JOIN com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND ci.dcf_dsf_inst_cd IS NOT NULL + AND ci.form_inst_name_kanji IS NOT NULL + AND ci.prefc_cd IS NOT NULL + AND cp.prefc_name IS NOT NULL + AND cc.city_name IS NOT NULL + AND ci.inst_addr IS NOT NULL + ORDER BY ci.dcf_dsf_inst_cd + """ + + # 正常系データの件数を取得SQL + NORMAL_COUNT_QUERY = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if + """ + + # 異常系WKテーブルの過去分削除SQL + PHYSICAL_ABNORMAL_DELETE_QUERY = """\ + DELETE FROM src05.wk_inst_aris_if_wrn + """ + + # 異常系データを取得しWKテーブルに保存SQL + ABNORMAL_INSERT_SELECT_QUERY = """\ + INSERT src05.wk_inst_aris_if_wrn + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' from ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date + ,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd + ,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form + ,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd + ,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name + ,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name + ,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND( ci.dcf_dsf_inst_cd IS NULL + OR ci.form_inst_name_kanji IS NULL + OR ci.prefc_cd IS NULL + OR cp.prefc_name IS NULL + OR cc.city_name IS NULL + OR ci.inst_addr IS NULL) + ORDER BY ci.dcf_dsf_inst_cd + """ + + # 正常系データの件数を取得SQL + ABNORMAL_COUNT_QUERY = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn + """ + # CSVファイルの作成用のSQL + SELECT_QUERY = """\ + SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name, + postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd, + '', inst_delete_date + FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd + """ + + aris_log = '/var/log/temporarydwh/' + move_file_path = '/data/mountaris/DATA/' + create_date = datetime.now().strftime('%Y%m%d%H%M%S') + create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + aris_create_csv = f'/home/nds_dwh/tmpcsv/D0004_ARIS_M_DCF_{create_date}csv' + aris_move_csv = f'{move_file_path}D0004_ARIS_M_DCF_{create_date}.csv' + res_log = f'{aris_log}D0004{create_date}.log' + move_res_og = f'{move_file_path}D0004{create_date}log' + prg_id = 'PrgId:BI0402' + head_str = 'TC_HOSPITAL, TJ_HOSPITAL, TJ_HOSPITALSHORT, TK_HOSPITAL, \ + TC_PREFECTURE, TJ_PREFECTURE, TJ_ZIPCODE, TJ_CITY, TJ_ADDRESS, TJ_DEPARTMENT, \ + TJ_TELEPHONENUMBER, TC_HOSPITALCAT, TC_HOSPITALTYPE, TS_UPDATE, TD_UPDATE' + + start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。\n" + dbConnect_err_msg = "MsgID:999999000002 Message:DB接続エラーです。\n" + err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。\n" + move_err_msg = "MsgID:BI0000000041 Message:S3バケットARISへのCSVデータ、実行ログ移動できませんでした。\n" + sql_err_msg = "MsgID:999999000002 Message:SQL実行エラーです。\n" + csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。\n" + cnt_msg = "MsgID: Message: LogText:" + suc_end_msg = "MsgID:BI0000009999 Message:バッチ処理を正常に終了しました。\n" + + def exec_batch_monthly(self): + """ 実消化&アルトマーク月次バッチ """ + try: + # 実行ログに書き込む + res_log_p = pathlib.Path(self.res_log) + res_log_p.touch() + os.chmod(self.res_log, '0664') + + # 実行ログ + resLog_f = open(self.res_log) + print(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.start_msg}') + + db = Database.get_instance() + # DB接続 + db.connect() + # トランザクションの開始 + db.begin() + + # 正常系データの反映 + # 過去分は不要のため、デリート + db.execute(self.PHYSICAL_NORMAL_DELETE_QUERY) + + # 正常系データを取得しWKテーブルに保存する。 + db.execute(self.NORMAL_INSERT_SELECT_QUERY) + + # 正常系データの件数を取得 + record_count = db.execute_select(self.NORMAL_COUNT_QUERY) + suc_count = record_count[0]['countNum'] + + # 警告系データの反映 + # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 + db.execute(self.PHYSICAL_ABNORMAL_DELETE_QUERY) + + # 異常系データを取得しWKテーブルに保存する。 + db.execute(self.ABNORMAL_INSERT_SELECT_QUERY) + + # 異常系データの件数を取得 + record_count = db.execute_select(self.ABNORMAL_COUNT_QUERY) + wrn_count = record_count[0]['countNum'] + + # CSVファイルの作成用のSQL実行 + record_csv = db.execute_select(self.SELECT_QUERY) + + # CSVファイル作成 + arisCreateCsv_p = pathlib.Path(self.arisCreateCsv) + arisCreateCsv_p.touch() + if not os.path.exists(self.arisCreateCsv): + print(f'{self.create_date_format}[DWH][5][ERROR]{self.prg_id} {self.csv_err_msg}') + print(f'{self.create_date_format}[DWH][5][ERROR]{self.prg_id} {self.err_end_msg}') + + # ヘッダ行書き込み + resLog_f = open(self.aris_create_csv) + print(f'{self.head_str}\r\n') + + # データ部分書き込み + for record_data in record_csv: + csv_data = ",".join(record_data).encode('shift_jis') + print(f'{csv_data}\r\n') + + logger.info('use memory--->') + logger.info('memory_get_usage 与えられたメモリの量') + logger.info('\n') + logger.info('max memory--->') + logger.info('memory_get_peak_usage  メモリの最大値') + logger.info('\n') + + resLog_f.close() + + # トランザクションの終了 + db.commit() + + # 実行ログファイルの追記 + # 実行ログに処理件数を書き込む。 + sum_count = suc_count + wrn_count + print(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})\n') + + # ファイル移動処理 + + logger.info('実消化&アルトマーク月次バッチ処理: 終了') + except Exception as e: + raise BatchOperationException(e) + + finally: + # 終了時に必ずコミットする + db.commit() + db.disconnect() + return diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py index b58c4601..302993a2 100644 --- a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -9,6 +9,7 @@ from src.batch.common.calendar_file import CalendarFile from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants +from src.batch.jskult_batch_monthly import JskultBathcMonthly logger = get_logger('月次処理コントロール') @@ -34,7 +35,7 @@ def exec(): # dump取得が正常終了していない場合、後続の処理は行わない if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: logger.error('dump取得が正常終了していないため、月次バッチ処理を終了します。') - return constants.BATCH_EXIT_CODE_SUCCESS + # 戻すんだよ return constants.BATCH_EXIT_CODE_SUCCESS logger.info(f'処理日={syor_date}') # バッチ共通設定に処理日を追加 @@ -61,7 +62,7 @@ def exec(): try: logger.info('月次バッチ:起動') - # ultmarc_process.exec_import() + JskultBathcMonthly.exec_batch_monthly() logger.info('月次バッチ:終了') except BatchOperationException as e: logger.exception(f'月次バッチ処理エラー(異常終了){e}') From 8534e6d66bcd53ced315251ceec3a198384ded4a Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 26 Jun 2023 13:38:23 +0900 Subject: [PATCH 061/103] =?UTF-8?q?feat:=20=E3=83=87=E3=83=BC=E3=82=BF?= =?UTF-8?q?=E3=83=99=E3=83=BC=E3=82=B9=E6=93=8D=E4=BD=9C=E9=83=A8=E5=93=81?= =?UTF-8?q?=E3=82=92=E4=BF=AE=E6=AD=A3=E3=80=82=E3=83=91=E3=83=A9=E3=83=A1?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E6=8C=87=E5=AE=9A=E3=81=AB=E3=82=88=E3=82=8A?= =?UTF-8?q?=E3=80=81AUTOCOMMIT=E3=81=A7=E6=8E=A5=E7=B6=9A=E3=81=99?= =?UTF-8?q?=E3=82=8B=E3=81=8B=E3=81=A9=E3=81=86=E3=81=8B=E3=82=92=E5=88=86?= =?UTF-8?q?=E5=B2=90=E3=81=99=E3=82=8B=E3=82=88=E3=81=86=E3=81=AB=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../laundering/sales_results_laundering.py | 4 +-- ecs/jskult-batch-daily/src/db/database.py | 26 ++++++++++++++----- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 530fd9b9..87958e5c 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -1,14 +1,14 @@ +from src.batch.batch_functions import logging_sql from src.db.database import Database from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger -from src.batch.batch_functions import logging_sql from src.system_var import environment logger = get_logger('卸卸販売洗替') def exec(): - db = Database.get_instance() + db = Database.get_instance(autocommit=True) try: db.connect() logger.debug('処理開始') diff --git a/ecs/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-daily/src/db/database.py index f67a21b9..b9a745be 100644 --- a/ecs/jskult-batch-daily/src/db/database.py +++ b/ecs/jskult-batch-daily/src/db/database.py @@ -13,15 +13,17 @@ logger = get_logger(__name__) class Database: """データベース操作クラス""" __connection: Connection = None - __engine: Engine = None + __transactional_engine: Engine = None + __autocommit_engine: Engine = None __host: str = None __port: str = None __username: str = None __password: str = None __schema: str = None + __autocommit: bool = None __connection_string: str = None - def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None: + def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: """このクラスの新たなインスタンスを初期化します Args: @@ -30,12 +32,14 @@ class Database: host (str): DBホスト名 port (int): DBポート schema (str): DBスキーマ名 + autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. """ self.__username = username self.__password = password self.__host = host self.__port = int(port) self.__schema = schema + self.__autocommit = autocommit self.__connection_string = URL.create( drivername='mysql+pymysql', @@ -47,16 +51,20 @@ class Database: query={"charset": "utf8mb4"} ) - self.__engine = create_engine( + self.__transactional_engine = create_engine( self.__connection_string, pool_timeout=5, poolclass=QueuePool ) + self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + @classmethod - def get_instance(cls): + def get_instance(cls, autocommit=False): """インスタンスを取得します + Args: + autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. Returns: Database: DB操作クラスインスタンス """ @@ -65,7 +73,8 @@ class Database: password=environment.DB_PASSWORD, host=environment.DB_HOST, port=environment.DB_PORT, - schema=environment.DB_SCHEMA + schema=environment.DB_SCHEMA, + autocommit=autocommit ) @retry( @@ -77,12 +86,15 @@ class Database: stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT)) def connect(self): """ - DBに接続します。接続に失敗した場合、リトライします。 + DBに接続します。接続に失敗した場合、リトライします。\n + インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) Raises: DBException: 接続失敗 """ try: - self.__connection = self.__engine.connect() + self.__connection = ( + self.__autocommit_engine.connect() if self.__autocommit is True + else self.__transactional_engine.connect()) except Exception as e: raise DBException(e) From 00175d26f7213d912aadecd164898de913736633 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Mon, 26 Jun 2023 14:20:17 +0900 Subject: [PATCH 062/103] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/laundering/sales_results_laundering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 530fd9b9..05086e4a 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -4,7 +4,7 @@ from src.logging.get_logger import get_logger from src.batch.batch_functions import logging_sql from src.system_var import environment -logger = get_logger('卸卸販売洗替') +logger = get_logger('卸販売洗替') def exec(): From c76700a37ae165a10114312c2200767fe33c60e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Mon, 26 Jun 2023 16:28:01 +0900 Subject: [PATCH 063/103] =?UTF-8?q?=E4=BB=AE=E5=AE=8C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/.env.example | 3 +- .../src/batch/jskult_batch_monthly.py | 108 +++++++++++------- .../src/jobctrl_monthly.py | 5 +- 3 files changed, 69 insertions(+), 47 deletions(-) diff --git a/ecs/jskult-batch-monthly/.env.example b/ecs/jskult-batch-monthly/.env.example index 19a3f19f..f2bb73c8 100644 --- a/ecs/jskult-batch-monthly/.env.example +++ b/ecs/jskult-batch-monthly/.env.example @@ -13,7 +13,8 @@ JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt ARISJ_DATA_BUCKET=********** LOG_LEVEL=************** -ARISJ_BACKUP_FOLDER=************** +ARISJ_BACKUP_FOLDER=arisj +ARISJ_DATA_FOLDER=DATA JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME=jskult_arisj_output_day_list.txt DB_CONNECTION_MAX_RETRY_ATTEMPT=************** DB_CONNECTION_RETRY_INTERVAL_INIT=************** diff --git a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py index 3cbecd15..b4caefe1 100644 --- a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py +++ b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py @@ -1,14 +1,13 @@ from datetime import datetime -# from src.aws.s3 import UltmarcBucket -# from src.batch.common.batch_context import BatchContext from src.db.database import Database from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger -# from src.system_var import constants -import pathlib import os +import tempfile +import os.path as path +import boto3 logger = get_logger('実消化&アルトマーク月次バッチ') @@ -25,7 +24,7 @@ class JskultBathcMonthly(): NORMAL_INSERT_SELECT_QUERY = """\ INSERT src05.wk_inst_aris_if SELECT - TRIM(' ' FROM TRIM(' ' FROM SUBSTRIN(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form @@ -47,7 +46,7 @@ class JskultBathcMonthly(): LEFT JOIN src05.mst_city cc ON ci.prefc_cd = cc.prefc_cd AND ci.city_cd = cc.city_cd - LEFT OUTER JOIN src05.JOIN com_inst_div cd + LEFT OUTER JOIN src05.com_inst_div cd ON ci.inst_div_cd = cd.inst_div_cd WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' AND ci.dcf_dsf_inst_cd IS NOT NULL @@ -129,35 +128,26 @@ class JskultBathcMonthly(): move_file_path = '/data/mountaris/DATA/' create_date = datetime.now().strftime('%Y%m%d%H%M%S') create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') - aris_create_csv = f'/home/nds_dwh/tmpcsv/D0004_ARIS_M_DCF_{create_date}csv' - aris_move_csv = f'{move_file_path}D0004_ARIS_M_DCF_{create_date}.csv' - res_log = f'{aris_log}D0004{create_date}.log' - move_res_og = f'{move_file_path}D0004{create_date}log' + aris_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' + res_log = f'D0004{create_date}.log' prg_id = 'PrgId:BI0402' head_str = 'TC_HOSPITAL, TJ_HOSPITAL, TJ_HOSPITALSHORT, TK_HOSPITAL, \ TC_PREFECTURE, TJ_PREFECTURE, TJ_ZIPCODE, TJ_CITY, TJ_ADDRESS, TJ_DEPARTMENT, \ TJ_TELEPHONENUMBER, TC_HOSPITALCAT, TC_HOSPITALTYPE, TS_UPDATE, TD_UPDATE' start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。\n" - dbConnect_err_msg = "MsgID:999999000002 Message:DB接続エラーです。\n" err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。\n" - move_err_msg = "MsgID:BI0000000041 Message:S3バケットARISへのCSVデータ、実行ログ移動できませんでした。\n" - sql_err_msg = "MsgID:999999000002 Message:SQL実行エラーです。\n" csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。\n" cnt_msg = "MsgID: Message: LogText:" - suc_end_msg = "MsgID:BI0000009999 Message:バッチ処理を正常に終了しました。\n" def exec_batch_monthly(self): """ 実消化&アルトマーク月次バッチ """ try: # 実行ログに書き込む - res_log_p = pathlib.Path(self.res_log) - res_log_p.touch() - os.chmod(self.res_log, '0664') - - # 実行ログ - resLog_f = open(self.res_log) - print(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.start_msg}') + resLog = make_log_data(self) + resLog_f = resLog[0] + log_file_path = resLog[1] + resLog_f.write(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.start_msg}') db = Database.get_instance() # DB接続 @@ -191,29 +181,11 @@ class JskultBathcMonthly(): record_csv = db.execute_select(self.SELECT_QUERY) # CSVファイル作成 - arisCreateCsv_p = pathlib.Path(self.arisCreateCsv) - arisCreateCsv_p.touch() - if not os.path.exists(self.arisCreateCsv): - print(f'{self.create_date_format}[DWH][5][ERROR]{self.prg_id} {self.csv_err_msg}') - print(f'{self.create_date_format}[DWH][5][ERROR]{self.prg_id} {self.err_end_msg}') + csv_file_path = make_csv_data(self, record_csv) - # ヘッダ行書き込み - resLog_f = open(self.aris_create_csv) - print(f'{self.head_str}\r\n') - - # データ部分書き込み - for record_data in record_csv: - csv_data = ",".join(record_data).encode('shift_jis') - print(f'{csv_data}\r\n') - - logger.info('use memory--->') - logger.info('memory_get_usage 与えられたメモリの量') - logger.info('\n') - logger.info('max memory--->') - logger.info('memory_get_peak_usage  メモリの最大値') - logger.info('\n') - - resLog_f.close() + # テスト用に出力している(あとで消す) + logger.info(log_file_path) + logger.info(csv_file_path) # トランザクションの終了 db.commit() @@ -221,12 +193,17 @@ class JskultBathcMonthly(): # 実行ログファイルの追記 # 実行ログに処理件数を書き込む。 sum_count = suc_count + wrn_count - print(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})\n') + resLog_f.write(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})\n') + + # 実行ログファイルクローズ + resLog_f.close() # ファイル移動処理 + s3_upload_data(self, csv_file_path, log_file_path) logger.info('実消化&アルトマーク月次バッチ処理: 終了') except Exception as e: + resLog_f.write(f'{self.create_date_format}[DWH][5][INFO]{e.message}') raise BatchOperationException(e) finally: @@ -234,3 +211,46 @@ class JskultBathcMonthly(): db.commit() db.disconnect() return + + +def make_csv_data(self, record_csv: list): + # 一時ファイルとして保存する(CSVファイル) + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, self.aris_create_csv) + + # ヘッダ行書き込み + fp = open(csv_file_path, mode='w') + fp.write(f'{self.head_str}\n') + + # データ部分書き込み + for record_data in record_csv: + record_value = list(record_data.values()) + csv_data = ",".join(map(str, record_value)) + fp.write(f'{csv_data}\n') + + # ファイルクローズ + fp.close() + return csv_file_path + + +def make_log_data(self): + # 一時ファイルとして保存する(ログファイル) + temporary_dir = tempfile.mkdtemp() + log_file_path = path.join(temporary_dir, self.res_log) + fp = open(log_file_path, mode='w') + return fp, log_file_path + + +def s3_upload_data(self, csv_file_path, log_file_path): + # s3にログファイルとCSVファイルをUPする + + Bucket = os.environ['ARISJ_DATA_BUCKET'] + folder = os.environ['ARISJ_DATA_FOLDER'] + csv_file_name = f'{folder}/{self.aris_create_csv}' + log_file_name = f'{folder}/{self.res_log}' + + s3_client = boto3.client('s3') + s3_client.upload_file(csv_file_path, Bucket, csv_file_name) + s3_client.upload_file(csv_file_path, Bucket, log_file_name) + + return diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py index 302993a2..3a1e8d68 100644 --- a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -30,7 +30,7 @@ def exec(): # 月次バッチ処理中の場合、後続の処理は行わない if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING: logger.error('バッチ処理中のため、月次バッチ処理を終了します。') - return constants.BATCH_EXIT_CODE_SUCCESS + # 戻すんだよ return constants.BATCH_EXIT_CODE_SUCCESS # dump取得が正常終了していない場合、後続の処理は行わない if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: @@ -62,7 +62,8 @@ def exec(): try: logger.info('月次バッチ:起動') - JskultBathcMonthly.exec_batch_monthly() + BathcMonthly = JskultBathcMonthly() + BathcMonthly.exec_batch_monthly() logger.info('月次バッチ:終了') except BatchOperationException as e: logger.exception(f'月次バッチ処理エラー(異常終了){e}') From 07d9f3785353ef33b1e118d9424878df59d45ce7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 27 Jun 2023 09:33:00 +0900 Subject: [PATCH 064/103] =?UTF-8?q?feat:=20=E4=B8=8D=E8=A6=81=E3=81=AA?= =?UTF-8?q?=E3=82=B9=E3=83=9A=E3=83=BC=E3=82=B9=E7=AD=89=E3=81=AE=E5=89=8A?= =?UTF-8?q?=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../stored_procedure/src05/sales_lau_upsert.sql | 16 ++++++++-------- .../src05/v_inst_merge_laundering.sql | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 63049f09..5f26b9c8 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -41,7 +41,7 @@ BEGIN internal05.bu_prd_name_contrast_t ( prd_cd, bu_cd, - phm_itm_cd, + phm_itm_cd, pp_start_date, pp_end_date, update_date, @@ -344,14 +344,14 @@ BEGIN AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date LEFT OUTER JOIN src05.com_inst AS ci ON s.v_inst_cd = ci.dcf_dsf_inst_cd - WHERE - (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + WHERE + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') OR ( s.rec_sts_kbn = '0' AND s.err_flg20 != 'M' AND s.v_tran_cd IN (110, 120, 210, 220) AND ( - (s.fcl_exec_kbn NOT IN ('2', '5') AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn != 1)) + (s.fcl_exec_kbn NOT IN ('2', '5') AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn != 1)) OR s.fcl_exec_kbn IS NULL ) ) @@ -427,8 +427,8 @@ BEGIN dwh_upd_dt = SYSDATE() "; SET @upsert_sales_launderning = REPLACE(@upsert_sales_launderning, "$$target_table$$", target_table); - PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; - EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; + PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; + EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 終了' @@ -463,8 +463,8 @@ BEGIN AND tt.row_num = s.row_num "; SET @update_institution_code = REPLACE(@update_institution_code, "$$target_table$$", target_table); - PREPARE update_institution_code_stmt from @update_institution_code; - EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; + PREPARE update_institution_code_stmt from @update_institution_code; + EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 終了' diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 35c4a700..7c1dee91 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -63,10 +63,10 @@ BEGIN WHERE tt.v_inst_cd = vimt.v_inst_cd AND (tt.inst_clas_cd IN ('1', '2', '3')) - "; - SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); - PREPARE update_institution_stmt from @update_institution; - EXECUTE update_institution_stmt; + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 終了' From 6239ef783a21edde213c4c753f8f0fcd40c144b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 27 Jun 2023 14:02:47 +0900 Subject: [PATCH 065/103] =?UTF-8?q?feat:=20=E4=BE=8B=E5=A4=96=E5=87=A6?= =?UTF-8?q?=E7=90=86=E3=81=AE=E3=83=A1=E3=83=83=E3=82=BB=E3=83=BC=E3=82=B8?= =?UTF-8?q?=E3=82=AA=E3=83=90=E3=83=BC=E3=83=95=E3=83=AD=E3=83=BC=E3=81=AE?= =?UTF-8?q?=E5=AF=BE=E5=87=A6=E7=AD=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src05/hco_to_mdb_laundering.sql | 20 ++++++++---- .../src05/inst_merge_laundering.sql | 12 +++++-- .../src05/sales_lau_delete.sql | 12 +++++-- .../src05/sales_lau_upsert.sql | 32 +++++++++++-------- .../src05/v_inst_merge_laundering.sql | 12 +++++-- .../src05/whs_org_laundering.sql | 20 ++++++++---- 6 files changed, 72 insertions(+), 36 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql index b3cbfc6e..8201f3bc 100644 --- a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -14,23 +14,29 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'hco_to_mdb_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A① 開始'); TRUNCATE TABLE internal05.hco_cnv_mdb_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A① 終了'); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A② 開始'); INSERT INTO @@ -74,10 +80,10 @@ BEGIN AND ci.delete_flg = '0' ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A② 終了'); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A③ 開始'); SET @update_institution = " @@ -96,7 +102,7 @@ BEGIN PREPARE update_institution_stmt from @update_institution; EXECUTE update_institution_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A③ 終了'); END diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql index 6a0642a9..05908ca1 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -14,15 +14,21 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_B① 開始'); SET @update_institution = " @@ -53,7 +59,7 @@ BEGIN PREPARE update_institution_stmt from @update_institution; EXECUTE update_institution_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_B① 終了'); END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql index c1610435..8b312d6f 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -14,15 +14,21 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'sales_lau_deleteでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 開始'); SET @delete_data = " @@ -35,7 +41,7 @@ BEGIN PREPARE delete_data_stmt from @delete_data; EXECUTE delete_data_stmt USING @laundering_period_year; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 5f26b9c8..72a86310 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -15,25 +15,31 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'sales_lau_upsertでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成① 開始' ); TRUNCATE TABLE internal05.bu_prd_name_contrast_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成① 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 開始' ); @@ -65,22 +71,22 @@ BEGIN ppmv.rec_sts_kbn != '9' ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成③ 開始' ); TRUNCATE TABLE internal05.fcl_mst_v_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成③ 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成④ 開始' ); @@ -129,11 +135,11 @@ BEGIN fmv1.rec_sts_kbn != '9' ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成④ 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 開始' ); @@ -430,11 +436,11 @@ BEGIN PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 開始' ); @@ -466,7 +472,7 @@ BEGIN PREPARE update_institution_code_stmt from @update_institution_code; EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 終了' ); diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 7c1dee91..06af3867 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -14,15 +14,21 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'v_inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 開始' ); @@ -68,7 +74,7 @@ BEGIN PREPARE update_institution_stmt from @update_institution; EXECUTE update_institution_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 終了' ); diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index 65dc9e30..e183d0c8 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -14,25 +14,31 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'whs_org_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替① 開始' ); TRUNCATE TABLE internal05.whs_customer_org_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替① 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替② 開始' ); @@ -91,11 +97,11 @@ BEGIN AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替② 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替③ 開始' ); @@ -115,7 +121,7 @@ BEGIN PREPARE update_organization_stmt from @update_organization; EXECUTE update_organization_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替③ 終了' ); From 6478bd078b0a49ac82448eae5b08bfba26be5a8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Tue, 27 Jun 2023 14:15:20 +0900 Subject: [PATCH 066/103] =?UTF-8?q?=E4=BB=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/jskult_batch_monthly.py | 441 +++++++++--------- .../src/jobctrl_monthly.py | 9 +- 2 files changed, 235 insertions(+), 215 deletions(-) diff --git a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py index b4caefe1..fe9b46ee 100644 --- a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py +++ b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py @@ -11,246 +11,267 @@ import boto3 logger = get_logger('実消化&アルトマーク月次バッチ') +# WKテーブルの過去分削除SQL +PHYSICAL_NORMAL_DELETE_QUERY = """\ + DELETE FROM src05.wk_inst_aris_if +""" -class JskultBathcMonthly(): - """ 実消化&アルトマーク月次バッチ """ +# 正常系データを取得しWKテーブルに保存SQL +NORMAL_INSERT_SELECT_QUERY = """\ + INSERT src05.wk_inst_aris_if + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND ci.dcf_dsf_inst_cd IS NOT NULL + AND ci.form_inst_name_kanji IS NOT NULL + AND ci.prefc_cd IS NOT NULL + AND cp.prefc_name IS NOT NULL + AND cc.city_name IS NOT NULL + AND ci.inst_addr IS NOT NULL + ORDER BY ci.dcf_dsf_inst_cd +""" - # WKテーブルの過去分削除SQL - PHYSICAL_NORMAL_DELETE_QUERY = """\ - DELETE FROM src05.wk_inst_aris_if - """ +# 正常系データの件数を取得SQL +NORMAL_COUNT_QUERY = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if +""" - # 正常系データを取得しWKテーブルに保存SQL - NORMAL_INSERT_SELECT_QUERY = """\ - INSERT src05.wk_inst_aris_if +# 異常系WKテーブルの過去分削除SQL +PHYSICAL_ABNORMAL_DELETE_QUERY = """\ + DELETE FROM src05.wk_inst_aris_if_wrn +""" + +# 異常系データを取得しWKテーブルに保存SQL +ABNORMAL_INSERT_SELECT_QUERY = """\ + INSERT src05.wk_inst_aris_if_wrn SELECT - TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form - ,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name - ,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd - ,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name - ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address - ,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name)) - ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no - ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd)) - ,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd)) - ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date - ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date - ,sysdate() + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' from ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date + ,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd + ,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form + ,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd + ,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name + ,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name + ,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address + ,sysdate() FROM src05.com_inst ci LEFT JOIN src05.mst_prefc cp - ON ci.prefc_cd = cp.prefc_cd + ON ci.prefc_cd = cp.prefc_cd LEFT JOIN src05.mst_city cc - ON ci.prefc_cd = cc.prefc_cd - AND ci.city_cd = cc.city_cd + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd LEFT OUTER JOIN src05.com_inst_div cd - ON ci.inst_div_cd = cd.inst_div_cd + ON ci.inst_div_cd = cd.inst_div_cd WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' - AND ci.dcf_dsf_inst_cd IS NOT NULL - AND ci.form_inst_name_kanji IS NOT NULL - AND ci.prefc_cd IS NOT NULL - AND cp.prefc_name IS NOT NULL - AND cc.city_name IS NOT NULL - AND ci.inst_addr IS NOT NULL + AND( ci.dcf_dsf_inst_cd IS NULL + OR ci.form_inst_name_kanji IS NULL + OR ci.prefc_cd IS NULL + OR cp.prefc_name IS NULL + OR cc.city_name IS NULL + OR ci.inst_addr IS NULL) ORDER BY ci.dcf_dsf_inst_cd - """ +""" - # 正常系データの件数を取得SQL - NORMAL_COUNT_QUERY = """\ - SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if - """ +# 正常系データの件数を取得SQL +ABNORMAL_COUNT_QUERY = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn +""" +# CSVファイルの作成用のSQL +SELECT_QUERY = """\ + SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name, + postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd, + '', inst_delete_date + FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd +""" - # 異常系WKテーブルの過去分削除SQL - PHYSICAL_ABNORMAL_DELETE_QUERY = """\ - DELETE FROM src05.wk_inst_aris_if_wrn - """ - - # 異常系データを取得しWKテーブルに保存SQL - ABNORMAL_INSERT_SELECT_QUERY = """\ - INSERT src05.wk_inst_aris_if_wrn - SELECT - TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form - ,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name - ,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd - ,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name - ,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address - ,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name)) - ,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no - ,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd)) - ,TRIM(' ' FROM TRIM(' ' from ci.manage_cd)) - ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date - ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date - ,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd - ,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form - ,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd - ,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name - ,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name - ,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address - ,sysdate() - FROM src05.com_inst ci - LEFT JOIN src05.mst_prefc cp - ON ci.prefc_cd = cp.prefc_cd - LEFT JOIN src05.mst_city cc - ON ci.prefc_cd = cc.prefc_cd - AND ci.city_cd = cc.city_cd - LEFT OUTER JOIN src05.com_inst_div cd - ON ci.inst_div_cd = cd.inst_div_cd - WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' - AND( ci.dcf_dsf_inst_cd IS NULL - OR ci.form_inst_name_kanji IS NULL - OR ci.prefc_cd IS NULL - OR cp.prefc_name IS NULL - OR cc.city_name IS NULL - OR ci.inst_addr IS NULL) - ORDER BY ci.dcf_dsf_inst_cd - """ - - # 正常系データの件数を取得SQL - ABNORMAL_COUNT_QUERY = """\ - SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn - """ - # CSVファイルの作成用のSQL - SELECT_QUERY = """\ - SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name, - postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd, - '', inst_delete_date - FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd - """ - - aris_log = '/var/log/temporarydwh/' - move_file_path = '/data/mountaris/DATA/' - create_date = datetime.now().strftime('%Y%m%d%H%M%S') - create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') - aris_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' - res_log = f'D0004{create_date}.log' - prg_id = 'PrgId:BI0402' - head_str = 'TC_HOSPITAL, TJ_HOSPITAL, TJ_HOSPITALSHORT, TK_HOSPITAL, \ - TC_PREFECTURE, TJ_PREFECTURE, TJ_ZIPCODE, TJ_CITY, TJ_ADDRESS, TJ_DEPARTMENT, \ - TJ_TELEPHONENUMBER, TC_HOSPITALCAT, TC_HOSPITALTYPE, TS_UPDATE, TD_UPDATE' - - start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。\n" - err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。\n" - csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。\n" - cnt_msg = "MsgID: Message: LogText:" - - def exec_batch_monthly(self): - """ 実消化&アルトマーク月次バッチ """ - try: - # 実行ログに書き込む - resLog = make_log_data(self) - resLog_f = resLog[0] - log_file_path = resLog[1] - resLog_f.write(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.start_msg}') - - db = Database.get_instance() - # DB接続 - db.connect() - # トランザクションの開始 - db.begin() - - # 正常系データの反映 - # 過去分は不要のため、デリート - db.execute(self.PHYSICAL_NORMAL_DELETE_QUERY) - - # 正常系データを取得しWKテーブルに保存する。 - db.execute(self.NORMAL_INSERT_SELECT_QUERY) - - # 正常系データの件数を取得 - record_count = db.execute_select(self.NORMAL_COUNT_QUERY) - suc_count = record_count[0]['countNum'] - - # 警告系データの反映 - # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 - db.execute(self.PHYSICAL_ABNORMAL_DELETE_QUERY) - - # 異常系データを取得しWKテーブルに保存する。 - db.execute(self.ABNORMAL_INSERT_SELECT_QUERY) - - # 異常系データの件数を取得 - record_count = db.execute_select(self.ABNORMAL_COUNT_QUERY) - wrn_count = record_count[0]['countNum'] - - # CSVファイルの作成用のSQL実行 - record_csv = db.execute_select(self.SELECT_QUERY) - - # CSVファイル作成 - csv_file_path = make_csv_data(self, record_csv) - - # テスト用に出力している(あとで消す) - logger.info(log_file_path) - logger.info(csv_file_path) - - # トランザクションの終了 - db.commit() - - # 実行ログファイルの追記 - # 実行ログに処理件数を書き込む。 - sum_count = suc_count + wrn_count - resLog_f.write(f'{self.create_date_format}[DWH][3][INFO]{self.prg_id} {self.cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})\n') - - # 実行ログファイルクローズ - resLog_f.close() - - # ファイル移動処理 - s3_upload_data(self, csv_file_path, log_file_path) - - logger.info('実消化&アルトマーク月次バッチ処理: 終了') - except Exception as e: - resLog_f.write(f'{self.create_date_format}[DWH][5][INFO]{e.message}') - raise BatchOperationException(e) - - finally: - # 終了時に必ずコミットする - db.commit() - db.disconnect() - return +create_date = datetime.now().strftime('%Y%m%d%H%M%S') +create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') +aris_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' +res_log = f'D0004{create_date}.log' +prg_id = 'PrgId:BI0402' +head_str = 'TC_HOSPITAL,TJ_HOSPITAL,TJ_HOSPITALSHORT,TK_HOSPITAL,TC_PREFECTURE,TJ_PREFECTURE,TJ_ZIPCODE,TJ_CITY,TJ_ADDRESS,\ +TJ_DEPARTMENT,TJ_TELEPHONENUMBER,TC_HOSPITALCAT,TC_HOSPITALTYPE,TS_UPDATE, TD_UPDATE' +start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。" +err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。" +csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。" +cnt_msg = "MsgID: Message: LogText:" +move_err_msg = "MsgID:BI0000000041 Message:S3バケットARISへのCSVデータ、実行ログ移動できませんでした。" -def make_csv_data(self, record_csv: list): +def exec(): + """ 実消化&アルトマーク月次バッチ """ + try: + # 実行ログに書き込む + resLog = make_log_data() + resLog_f = resLog[0] + log_file_path = resLog[1] + resLog_f.write(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}\n') + logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}') + + db = Database.get_instance() + # DB接続 + db.connect() + # トランザクションの開始 + db.begin() + + # 正常系データの反映 + # 過去分は不要のため、デリート + db.execute(PHYSICAL_NORMAL_DELETE_QUERY) + + # 正常系データを取得しWKテーブルに保存する。 + db.execute(NORMAL_INSERT_SELECT_QUERY) + + # 正常系データの件数を取得 + record_count = db.execute_select(NORMAL_COUNT_QUERY) + suc_count = record_count[0]['countNum'] + + # 警告系データの反映 + # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 + db.execute(PHYSICAL_ABNORMAL_DELETE_QUERY) + + # 異常系データを取得しWKテーブルに保存する。 + db.execute(ABNORMAL_INSERT_SELECT_QUERY) + + # 異常系データの件数を取得 + record_count = db.execute_select(ABNORMAL_COUNT_QUERY) + wrn_count = record_count[0]['countNum'] + + # CSVファイルの作成用のSQL実行 + record_csv = db.execute_select(SELECT_QUERY) + + # CSVファイル作成 + csv_file_path = make_csv_data(record_csv, resLog_f) + + # トランザクションの終了 + db.commit() + + # 実行ログファイルの追記 + # 実行ログに処理件数を書き込む。 + sum_count = suc_count + wrn_count + resLog_f.write(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})\n') + logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') + + # CSVファイル移動処理 + s3_csv_upload_data(csv_file_path, resLog_f) + + # 実行ログファイルクローズ + resLog_f.close() + + # logファイル移動処理 + s3_log_upload_data(log_file_path) + + logger.info('実消化&アルトマーク月次バッチ処理: 終了') + except Exception as e: + logger.info(f'{create_date_format}[DWH][5][INFO]{e.message}') + raise BatchOperationException(e) + + finally: + # 終了時に必ずコミットする + db.commit() + db.disconnect() + + +def make_csv_data(record_csv: list, resLog_f): # 一時ファイルとして保存する(CSVファイル) - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, self.aris_create_csv) + try: - # ヘッダ行書き込み - fp = open(csv_file_path, mode='w') - fp.write(f'{self.head_str}\n') + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, aris_create_csv) - # データ部分書き込み - for record_data in record_csv: - record_value = list(record_data.values()) - csv_data = ",".join(map(str, record_value)) - fp.write(f'{csv_data}\n') + # ヘッダ行書き込み + fp = open(csv_file_path, mode='w') + fp.write(f'{head_str}\n') + + # データ部分書き込み + for record_data in record_csv: + record_value = list(record_data.values()) + csv_data = ",".join(map(str, record_value)) + fp.write(f'{csv_data}\n') + + # ファイルクローズ + fp.close() + except Exception as e: + resLog_f.write(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}\n') + resLog_f.write(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}\n') + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}') + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}') + raise e - # ファイルクローズ - fp.close() return csv_file_path -def make_log_data(self): +def make_log_data(): # 一時ファイルとして保存する(ログファイル) temporary_dir = tempfile.mkdtemp() - log_file_path = path.join(temporary_dir, self.res_log) + log_file_path = path.join(temporary_dir, res_log) fp = open(log_file_path, mode='w') return fp, log_file_path -def s3_upload_data(self, csv_file_path, log_file_path): +def s3_csv_upload_data(csv_file_path, resLog_f): # s3にログファイルとCSVファイルをUPする - Bucket = os.environ['ARISJ_DATA_BUCKET'] folder = os.environ['ARISJ_DATA_FOLDER'] - csv_file_name = f'{folder}/{self.aris_create_csv}' - log_file_name = f'{folder}/{self.res_log}' - + csv_file_name = f'{folder}/{aris_create_csv}' s3_client = boto3.client('s3') - s3_client.upload_file(csv_file_path, Bucket, csv_file_name) - s3_client.upload_file(csv_file_path, Bucket, log_file_name) + + try: + s3_client.upload_file(csv_file_path, Bucket, csv_file_name) + except Exception as e: + resLog_f.write(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}\n') + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') + raise e + + return + + +def s3_log_upload_data(log_file_path): + # s3にログファイルとCSVファイルをUPする + Bucket = os.environ['ARISJ_DATA_BUCKET'] + folder = os.environ['ARISJ_DATA_FOLDER'] + log_file_name = f'{folder}/{res_log}' + s3_client = boto3.client('s3') + + try: + s3_client.upload_file(log_file_path, Bucket, log_file_name) + except Exception as e: + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') + raise e return diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py index 3a1e8d68..fdb7d0c1 100644 --- a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -9,7 +9,7 @@ from src.batch.common.calendar_file import CalendarFile from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants -from src.batch.jskult_batch_monthly import JskultBathcMonthly +from src.batch import jskult_batch_monthly logger = get_logger('月次処理コントロール') @@ -30,12 +30,12 @@ def exec(): # 月次バッチ処理中の場合、後続の処理は行わない if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING: logger.error('バッチ処理中のため、月次バッチ処理を終了します。') - # 戻すんだよ return constants.BATCH_EXIT_CODE_SUCCESS + return constants.BATCH_EXIT_CODE_SUCCESS # dump取得が正常終了していない場合、後続の処理は行わない if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: logger.error('dump取得が正常終了していないため、月次バッチ処理を終了します。') - # 戻すんだよ return constants.BATCH_EXIT_CODE_SUCCESS + return constants.BATCH_EXIT_CODE_SUCCESS logger.info(f'処理日={syor_date}') # バッチ共通設定に処理日を追加 @@ -62,8 +62,7 @@ def exec(): try: logger.info('月次バッチ:起動') - BathcMonthly = JskultBathcMonthly() - BathcMonthly.exec_batch_monthly() + jskult_batch_monthly.exec() logger.info('月次バッチ:終了') except BatchOperationException as e: logger.exception(f'月次バッチ処理エラー(異常終了){e}') From be75bf7a9a5f291f8211d746c1b5df0069fc62db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Tue, 27 Jun 2023 16:50:29 +0900 Subject: [PATCH 067/103] =?UTF-8?q?=E4=BB=AE=E5=AE=8C=E6=88=90=EF=BC=92?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py index fe9b46ee..2a8d4c06 100644 --- a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py +++ b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py @@ -221,6 +221,7 @@ def make_csv_data(record_csv: list, resLog_f): # データ部分書き込み for record_data in record_csv: record_value = list(record_data.values()) + record_value = ['' if n is None else n for n in record_value] csv_data = ",".join(map(str, record_value)) fp.write(f'{csv_data}\n') From a767457749dc789415efcab987131f43ed434415 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 23 Jun 2023 20:15:35 +0900 Subject: [PATCH 068/103] =?UTF-8?q?LOAD=E5=AE=9F=E8=A1=8C=E7=B5=90?= =?UTF-8?q?=E6=9E=9C=E3=80=81=E3=83=AF=E3=83=BC=E3=83=8B=E3=83=B3=E3=82=B0?= =?UTF-8?q?1261=E3=81=AF=E8=A8=B1=E5=AE=B9=E3=81=99=E3=82=8B=E3=81=A8?= =?UTF-8?q?=E3=81=97=E3=81=A6=E4=BE=8B=E5=A4=96=E5=88=A4=E5=AE=9A=E3=81=8B?= =?UTF-8?q?=E3=82=89=E9=99=A4=E5=A4=96=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index f869b983..fa82805d 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -37,14 +37,18 @@ class VjskDataLoadManager: """ db.begin() result = db.execute(sql, {"src_file_name": src_file_name}) + logger.debug(sql) result_w = db.execute("SHOW WARNINGS;") has_mysql_warnings = False for row in result_w.fetchall(): + # 例外スロー対象から除外 : Warning(1261) Row {ROW NUMBER} doesn't contain data for all columns + if len(row) >= 2 and row[0] == "Warning" and row[1] == 1261: + logger.info(f"SHOW WARNINGS (SKIP) : {row}") + continue has_mysql_warnings = True logger.info(f"SHOW WARNINGS : {row}") if has_mysql_warnings: raise Exception("LOAD文実行時にWARNINGが発生しました。") - logger.debug(sql) logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') db.commit() From e38938a243ac3b2c9a7ee2203496d9f58a9071ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 28 Jun 2023 15:02:16 +0900 Subject: [PATCH 069/103] =?UTF-8?q?=E6=8C=87=E6=91=98=E4=BA=8B=E9=A0=85?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3=EF=BC=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/.env.example | 29 +- ecs/jskult-batch-monthly/Pipfile | 6 - ecs/jskult-batch-monthly/README.md | 2 +- ecs/jskult-batch-monthly/entrypoint.py | 2 +- ecs/jskult-batch-monthly/src/aws/s3.py | 25 -- .../src/batch/batch_functions.py | 58 +-- .../src/batch/common/batch_context.py | 31 +- .../src/batch/jskult_batch_monthly.py | 278 -------------- .../src/batch/output_arisj_file_process.py | 361 ++++++++++++++++++ .../src/batch/parallel_processes.py | 32 -- .../src/jobctrl_monthly.py | 48 +-- .../src/system_var/constants.py | 9 - .../src/system_var/environment.py | 8 +- 13 files changed, 403 insertions(+), 486 deletions(-) delete mode 100644 ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py create mode 100644 ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py delete mode 100644 ecs/jskult-batch-monthly/src/batch/parallel_processes.py diff --git a/ecs/jskult-batch-monthly/.env.example b/ecs/jskult-batch-monthly/.env.example index f2bb73c8..6bda1a69 100644 --- a/ecs/jskult-batch-monthly/.env.example +++ b/ecs/jskult-batch-monthly/.env.example @@ -1,23 +1,20 @@ DB_HOST=************ -DB_PORT=************ +DB_PORT=3306 DB_USERNAME=************ DB_PASSWORD=************ DB_SCHEMA=src05 + +ARISJ_DATA_BUCKET=mbj-newdwh2021-staging-jskult-arisj +JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult +JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config +ULTMARC_BACKUP_FOLDER=************ + LOG_LEVEL=INFO -ULTMARC_DATA_BUCKET=**************** -ULTMARC_DATA_FOLDER=recv -JSKULT_BACKUP_BUCKET=**************** -ULTMARC_BACKUP_FOLDER=ultmarc -JSKULT_CONFIG_BUCKET=********************** -JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt -ARISJ_DATA_BUCKET=********** -LOG_LEVEL=************** -ARISJ_BACKUP_FOLDER=arisj ARISJ_DATA_FOLDER=DATA +ARISJ_BACKUP_FOLDER=arisj +JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME=jskult_arisj_output_day_list.txt -DB_CONNECTION_MAX_RETRY_ATTEMPT=************** -DB_CONNECTION_RETRY_INTERVAL_INIT=************** -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=************** -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=************* -VJSK_DATA_BUCKET=************* \ No newline at end of file +DB_CONNECTION_MAX_RETRY_ATTEMPT=************ +DB_CONNECTION_RETRY_INTERVAL_INIT=************ +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=************ +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=************ diff --git a/ecs/jskult-batch-monthly/Pipfile b/ecs/jskult-batch-monthly/Pipfile index 24e5efcd..fe0fdc38 100644 --- a/ecs/jskult-batch-monthly/Pipfile +++ b/ecs/jskult-batch-monthly/Pipfile @@ -3,10 +3,6 @@ url = "https://pypi.org/simple" verify_ssl = true name = "pypi" -[scripts] -"test:ultmarc" = "pytest tests/batch/ultmarc/" -"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" - [packages] boto3 = "*" sqlalchemy = "*" @@ -16,8 +12,6 @@ pymysql = "*" [dev-packages] autopep8 = "*" flake8 = "*" -pytest = "*" -pytest-cov = "*" [requires] python_version = "3.9" diff --git a/ecs/jskult-batch-monthly/README.md b/ecs/jskult-batch-monthly/README.md index acf096d2..f6e7c2da 100644 --- a/ecs/jskult-batch-monthly/README.md +++ b/ecs/jskult-batch-monthly/README.md @@ -42,7 +42,7 @@ - VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 - 「entrypoint.py」が、バッチ処理のエントリーポイント。 -- 実際の処理は、「src/jobctrl_daily.py」で行っている。 +- 実際の処理は、「src/jobctrl_monthly.py」で行っている。 ## フォルダ構成(工事中) diff --git a/ecs/jskult-batch-monthly/entrypoint.py b/ecs/jskult-batch-monthly/entrypoint.py index 191d0eae..4ab8d3b3 100644 --- a/ecs/jskult-batch-monthly/entrypoint.py +++ b/ecs/jskult-batch-monthly/entrypoint.py @@ -1,4 +1,4 @@ -"""実消化&アルトマーク 日次バッチのエントリーポイント""" +"""実消化&アルトマーク 月次バッチのエントリーポイント""" from src import jobctrl_monthly if __name__ == '__main__': diff --git a/ecs/jskult-batch-monthly/src/aws/s3.py b/ecs/jskult-batch-monthly/src/aws/s3.py index 847f5bad..ed337407 100644 --- a/ecs/jskult-batch-monthly/src/aws/s3.py +++ b/ecs/jskult-batch-monthly/src/aws/s3.py @@ -51,31 +51,6 @@ class S3Bucket(): _bucket_name: str = None -class UltmarcBucket(S3Bucket): - _bucket_name = environment.ULTMARC_DATA_BUCKET - _folder = environment.ULTMARC_DATA_FOLDER - - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def download_dat_file(self, dat_filename: str): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, dat_filename, f) - f.seek(0) - return temporary_file_path - - def backup_dat_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - ultmarc_backup_bucket = UltmarcBackupBucket() - backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) - # コピー元のファイルを削除 - self._s3_client.delete_file(self._bucket_name, dat_file_key) - - class ConfigBucket(S3Bucket): _bucket_name = environment.JSKULT_CONFIG_BUCKET diff --git a/ecs/jskult-batch-monthly/src/batch/batch_functions.py b/ecs/jskult-batch-monthly/src/batch/batch_functions.py index 27aac450..40cf84f2 100644 --- a/ecs/jskult-batch-monthly/src/batch/batch_functions.py +++ b/ecs/jskult-batch-monthly/src/batch/batch_functions.py @@ -5,23 +5,21 @@ from datetime import datetime from src.db.database import Database from src.error.exceptions import BatchOperationException, DBException -from src.system_var import constants -def get_batch_statuses() -> tuple[str, str, str]: +def get_batch_statuses() -> tuple[str, str]: """日付テーブルから、以下を取得して返す。 - バッチ処理中フラグ - - dump取得状況区分 - 処理日(YYYY/MM/DD) Raises: BatchOperationException: 日付テーブルが取得できないとき、何らかのエラーが発生したとき Returns: - tuple[str, str]: [0]バッチ処理中フラグ、dump取得状況区分 + tuple[str, str]: [0]バッチ処理中フラグ,[1]処理日 """ db = Database.get_instance() - sql = 'SELECT bch_actf, dump_sts_kbn, src05.get_syor_date() AS syor_date FROM src05.hdke_tbl' + sql = 'SELECT bch_actf, src05.get_syor_date() AS syor_date FROM src05.hdke_tbl' try: db.connect() hdke_tbl_result = db.execute_select(sql) @@ -36,59 +34,11 @@ def get_batch_statuses() -> tuple[str, str, str]: # 必ず1件取れる hdke_tbl_record = hdke_tbl_result[0] batch_processing_flag = hdke_tbl_record['bch_actf'] - dump_status_kbn = hdke_tbl_record['dump_sts_kbn'] syor_date = hdke_tbl_record['syor_date'] # 処理日を文字列に変換する syor_date_str = datetime.strftime(syor_date, '%Y/%m/%d') - return batch_processing_flag, dump_status_kbn, syor_date_str - - -def update_batch_processing_flag_in_processing() -> None: - """バッチ処理中フラグを処理中に更新する - - Raises: - BatchOperationException: DB操作の何らかのエラー - """ - db = Database.get_instance() - sql = 'UPDATE src05.hdke_tbl SET bch_actf = :in_processing' - try: - db.connect() - db.execute(sql, {'in_processing': constants.BATCH_ACTF_BATCH_IN_PROCESSING}) - except DBException as e: - raise BatchOperationException(e) - finally: - db.disconnect() - - return - - -def update_batch_process_complete() -> None: - """バッチ処理を完了とし、処理日、バッチ処理中フラグ、dump処理状態区分を更新する - - Raises: - BatchOperationException: DB操作の何らかのエラー - """ - db = Database.get_instance() - sql = """\ - UPDATE src05.hdke_tbl - SET - bch_actf = :batch_complete, - dump_sts_kbn = :dump_unprocessed, - syor_date = DATE_FORMAT((src05.get_syor_date() + interval 1 day), '%Y%m%d') -- +1日 - """ - try: - db.connect() - db.execute(sql, { - 'batch_complete': constants.BATCH_ACTF_BATCH_UNPROCESSED, - 'dump_unprocessed': constants.DUMP_STATUS_KBN_UNPROCESSED - }) - except DBException as e: - raise BatchOperationException(e) - finally: - db.disconnect() - - return + return batch_processing_flag, syor_date_str def logging_sql(logger: logging.Logger, sql: str) -> None: diff --git a/ecs/jskult-batch-monthly/src/batch/common/batch_context.py b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py index 8b76415a..6a05a423 100644 --- a/ecs/jskult-batch-monthly/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py @@ -1,10 +1,9 @@ class BatchContext: __instance = None - __syor_date: str # 処理日(yyyy/mm/dd形式) - __is_not_business_monthly: bool # 月次バッチ起動日フラグ + __is_arisj_output_day: bool # 月次バッチ起動日フラグ def __init__(self) -> None: - self.__is_not_business_monthly = False + self.__is_arisj_output_day = False @classmethod def get_instance(cls): @@ -13,25 +12,9 @@ class BatchContext: return cls.__instance @property - def syor_date(self): - return self.__syor_date + def is_arisj_output_day(self): + return self.__is_arisj_output_day - @syor_date.setter - def syor_date(self, syor_date_str: str): - self.__syor_date = syor_date_str - - @property - def is_not_business_monthly(self): - return self.__is_not_business_monthly - - @is_not_business_monthly.setter - def is_not_business_monthly(self, flag: bool): - self.__is_not_business_monthly = flag - - @property - def is_ultmarc_imported(self): - return self.__is_ultmarc_imported - - @is_ultmarc_imported.setter - def is_ultmarc_imported(self, flag: bool): - self.__is_ultmarc_imported = flag + @is_arisj_output_day.setter + def is_arisj_output_day(self, flag: bool): + self.__is_arisj_output_day = flag diff --git a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py b/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py deleted file mode 100644 index 2a8d4c06..00000000 --- a/ecs/jskult-batch-monthly/src/batch/jskult_batch_monthly.py +++ /dev/null @@ -1,278 +0,0 @@ - -from datetime import datetime - -from src.db.database import Database -from src.error.exceptions import BatchOperationException -from src.logging.get_logger import get_logger -import os -import tempfile -import os.path as path -import boto3 - -logger = get_logger('実消化&アルトマーク月次バッチ') - -# WKテーブルの過去分削除SQL -PHYSICAL_NORMAL_DELETE_QUERY = """\ - DELETE FROM src05.wk_inst_aris_if -""" - -# 正常系データを取得しWKテーブルに保存SQL -NORMAL_INSERT_SELECT_QUERY = """\ - INSERT src05.wk_inst_aris_if - SELECT - TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form - ,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd - ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name - ,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd - ,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name - ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address - ,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name)) - ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no - ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd)) - ,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd)) - ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date - ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date - ,sysdate() - FROM src05.com_inst ci - LEFT JOIN src05.mst_prefc cp - ON ci.prefc_cd = cp.prefc_cd - LEFT JOIN src05.mst_city cc - ON ci.prefc_cd = cc.prefc_cd - AND ci.city_cd = cc.city_cd - LEFT OUTER JOIN src05.com_inst_div cd - ON ci.inst_div_cd = cd.inst_div_cd - WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' - AND ci.dcf_dsf_inst_cd IS NOT NULL - AND ci.form_inst_name_kanji IS NOT NULL - AND ci.prefc_cd IS NOT NULL - AND cp.prefc_name IS NOT NULL - AND cc.city_name IS NOT NULL - AND ci.inst_addr IS NOT NULL - ORDER BY ci.dcf_dsf_inst_cd -""" - -# 正常系データの件数を取得SQL -NORMAL_COUNT_QUERY = """\ - SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if -""" - -# 異常系WKテーブルの過去分削除SQL -PHYSICAL_ABNORMAL_DELETE_QUERY = """\ - DELETE FROM src05.wk_inst_aris_if_wrn -""" - -# 異常系データを取得しWKテーブルに保存SQL -ABNORMAL_INSERT_SELECT_QUERY = """\ - INSERT src05.wk_inst_aris_if_wrn - SELECT - TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form - ,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd - ,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name - ,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd - ,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name - ,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address - ,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name)) - ,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no - ,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd)) - ,TRIM(' ' FROM TRIM(' ' from ci.manage_cd)) - ,DATE_FORMAT(ci.sys_update_date,'%y%m%d') AS update_date - ,DATE_FORMAT(ci.abolish_ymd,'%y%m%d') AS delete_date - ,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd - ,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form - ,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd - ,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name - ,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name - ,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address - ,sysdate() - FROM src05.com_inst ci - LEFT JOIN src05.mst_prefc cp - ON ci.prefc_cd = cp.prefc_cd - LEFT JOIN src05.mst_city cc - ON ci.prefc_cd = cc.prefc_cd - AND ci.city_cd = cc.city_cd - LEFT OUTER JOIN src05.com_inst_div cd - ON ci.inst_div_cd = cd.inst_div_cd - WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' - AND( ci.dcf_dsf_inst_cd IS NULL - OR ci.form_inst_name_kanji IS NULL - OR ci.prefc_cd IS NULL - OR cp.prefc_name IS NULL - OR cc.city_name IS NULL - OR ci.inst_addr IS NULL) - ORDER BY ci.dcf_dsf_inst_cd -""" - -# 正常系データの件数を取得SQL -ABNORMAL_COUNT_QUERY = """\ - SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn -""" -# CSVファイルの作成用のSQL -SELECT_QUERY = """\ - SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name, - postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd, - '', inst_delete_date - FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd -""" - -create_date = datetime.now().strftime('%Y%m%d%H%M%S') -create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') -aris_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' -res_log = f'D0004{create_date}.log' -prg_id = 'PrgId:BI0402' -head_str = 'TC_HOSPITAL,TJ_HOSPITAL,TJ_HOSPITALSHORT,TK_HOSPITAL,TC_PREFECTURE,TJ_PREFECTURE,TJ_ZIPCODE,TJ_CITY,TJ_ADDRESS,\ -TJ_DEPARTMENT,TJ_TELEPHONENUMBER,TC_HOSPITALCAT,TC_HOSPITALTYPE,TS_UPDATE, TD_UPDATE' -start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。" -err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。" -csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。" -cnt_msg = "MsgID: Message: LogText:" -move_err_msg = "MsgID:BI0000000041 Message:S3バケットARISへのCSVデータ、実行ログ移動できませんでした。" - - -def exec(): - """ 実消化&アルトマーク月次バッチ """ - try: - # 実行ログに書き込む - resLog = make_log_data() - resLog_f = resLog[0] - log_file_path = resLog[1] - resLog_f.write(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}\n') - logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}') - - db = Database.get_instance() - # DB接続 - db.connect() - # トランザクションの開始 - db.begin() - - # 正常系データの反映 - # 過去分は不要のため、デリート - db.execute(PHYSICAL_NORMAL_DELETE_QUERY) - - # 正常系データを取得しWKテーブルに保存する。 - db.execute(NORMAL_INSERT_SELECT_QUERY) - - # 正常系データの件数を取得 - record_count = db.execute_select(NORMAL_COUNT_QUERY) - suc_count = record_count[0]['countNum'] - - # 警告系データの反映 - # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 - db.execute(PHYSICAL_ABNORMAL_DELETE_QUERY) - - # 異常系データを取得しWKテーブルに保存する。 - db.execute(ABNORMAL_INSERT_SELECT_QUERY) - - # 異常系データの件数を取得 - record_count = db.execute_select(ABNORMAL_COUNT_QUERY) - wrn_count = record_count[0]['countNum'] - - # CSVファイルの作成用のSQL実行 - record_csv = db.execute_select(SELECT_QUERY) - - # CSVファイル作成 - csv_file_path = make_csv_data(record_csv, resLog_f) - - # トランザクションの終了 - db.commit() - - # 実行ログファイルの追記 - # 実行ログに処理件数を書き込む。 - sum_count = suc_count + wrn_count - resLog_f.write(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})\n') - logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') - - # CSVファイル移動処理 - s3_csv_upload_data(csv_file_path, resLog_f) - - # 実行ログファイルクローズ - resLog_f.close() - - # logファイル移動処理 - s3_log_upload_data(log_file_path) - - logger.info('実消化&アルトマーク月次バッチ処理: 終了') - except Exception as e: - logger.info(f'{create_date_format}[DWH][5][INFO]{e.message}') - raise BatchOperationException(e) - - finally: - # 終了時に必ずコミットする - db.commit() - db.disconnect() - - -def make_csv_data(record_csv: list, resLog_f): - # 一時ファイルとして保存する(CSVファイル) - try: - - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, aris_create_csv) - - # ヘッダ行書き込み - fp = open(csv_file_path, mode='w') - fp.write(f'{head_str}\n') - - # データ部分書き込み - for record_data in record_csv: - record_value = list(record_data.values()) - record_value = ['' if n is None else n for n in record_value] - csv_data = ",".join(map(str, record_value)) - fp.write(f'{csv_data}\n') - - # ファイルクローズ - fp.close() - except Exception as e: - resLog_f.write(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}\n') - resLog_f.write(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}\n') - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}') - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}') - raise e - - return csv_file_path - - -def make_log_data(): - # 一時ファイルとして保存する(ログファイル) - temporary_dir = tempfile.mkdtemp() - log_file_path = path.join(temporary_dir, res_log) - fp = open(log_file_path, mode='w') - return fp, log_file_path - - -def s3_csv_upload_data(csv_file_path, resLog_f): - # s3にログファイルとCSVファイルをUPする - Bucket = os.environ['ARISJ_DATA_BUCKET'] - folder = os.environ['ARISJ_DATA_FOLDER'] - csv_file_name = f'{folder}/{aris_create_csv}' - s3_client = boto3.client('s3') - - try: - s3_client.upload_file(csv_file_path, Bucket, csv_file_name) - except Exception as e: - resLog_f.write(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}\n') - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') - raise e - - return - - -def s3_log_upload_data(log_file_path): - # s3にログファイルとCSVファイルをUPする - Bucket = os.environ['ARISJ_DATA_BUCKET'] - folder = os.environ['ARISJ_DATA_FOLDER'] - log_file_name = f'{folder}/{res_log}' - s3_client = boto3.client('s3') - - try: - s3_client.upload_file(log_file_path, Bucket, log_file_name) - except Exception as e: - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') - raise e - - return diff --git a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py new file mode 100644 index 00000000..0deec661 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py @@ -0,0 +1,361 @@ + +from datetime import datetime + +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.aws.s3 import S3Client +from src.logging.get_logger import get_logger +import tempfile +import os +import os.path as path +import logging +import csv + +logger = get_logger('実消化&アルトマーク月次バッチ') + +create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') +prg_id = 'PrgId:BI0402' +create_date = datetime.now().strftime('%Y%m%d%H%M%S') +aris_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' +res_log = f'D0004{create_date}.log' +sql_err_msg = "MsgID:999999000002 Message:SQL実行エラーです。" +move_err_msg = "MsgID:BI0000000041 Message:S3バケットARISへのCSVデータ、実行ログ移動できませんでした。" + + +def exec(): + """ 実消化&アルトマーク月次バッチ """ + try: + + start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。" + cnt_msg = "MsgID: Message: LogText:" + + # 実行ログに書き込む + resLog, log_file_path = make_log_data() + resLog.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}') + logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}') + + db = Database.get_instance() + # DB接続 + db.connect() + # トランザクションの開始 + db.begin() + + # 正常系データの反映 + # 過去分は不要のため、デリート + physical_normal_delete(db) + + # 正常系データを取得しWKテーブルに保存する。 + normal_insert_into(db) + + # 正常系データの件数を取得 + suc_count = normal_count(db) + + # 警告系データの反映 + # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 + physical_abnormal_delete(db) + + # 異常系データを取得しWKテーブルに保存する。 + abnormal_insert_into(db) + + # 異常系データの件数を取得 + wrn_count = abnormal_count(db) + + # CSVファイルの作成用のSQL実行 + record_csv = csv_data_select(db) + + # CSVファイル作成 + csv_file_path = make_csv_data(record_csv, resLog) + + # トランザクションの終了 + db.commit() + + # 実行ログファイルの追記 + # 実行ログに処理件数を書き込む。 + sum_count = suc_count + wrn_count + resLog.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') + logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') + + # CSVファイル移動処理 + s3_csv_upload_data(csv_file_path, resLog) + + # logファイル移動処理 + s3_log_upload_data(log_file_path) + + logger.info('実消化&アルトマーク月次バッチ処理: 終了') + except Exception as e: + logger.info(f'{create_date_format}[DWH][5][INFO]') + raise BatchOperationException(e) + + finally: + # 終了時に必ずコミットする + db.commit() + db.disconnect() + + +def physical_normal_delete(db): + # 過去分は不要のため、デリート + try: + # WKテーブルの過去分削除SQL + sql = """\ + DELETE FROM src05.wk_inst_aris_if + """ + db.execute(sql) + return + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def normal_insert_into(db): + # 正常系データを取得しWKテーブルに保存する。 + try: + # 正常系データを取得しWKテーブルに保存SQL + sql = """\ + INSERT src05.wk_inst_aris_if + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%Y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%Y%m%d') AS delete_date + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND ci.dcf_dsf_inst_cd IS NOT NULL + AND ci.form_inst_name_kanji IS NOT NULL + AND ci.prefc_cd IS NOT NULL + AND cp.prefc_name IS NOT NULL + AND cc.city_name IS NOT NULL + AND ci.inst_addr IS NOT NULL + ORDER BY ci.dcf_dsf_inst_cd + """ + + db.execute(sql) + return + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def normal_count(db): + # 正常系データの件数を取得 + try: + # 正常系データの件数を取得SQL + sql = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if + """ + record_count = db.execute_select(sql) + return record_count[0]['countNum'] + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def physical_abnormal_delete(db): + # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 + try: + # 異常系WKテーブルの過去分削除SQL + sql = """\ + DELETE FROM src05.wk_inst_aris_if_wrn + """ + + db.execute(sql) + return + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def abnormal_insert_into(db): + # 異常系データを取得しWKテーブルに保存する。 + try: + # 異常系データを取得しWKテーブルに保存SQL + sql = """\ + INSERT src05.wk_inst_aris_if_wrn + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' from ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%Y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%Y%m%d') AS delete_date + ,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd + ,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form + ,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd + ,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name + ,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name + ,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND( ci.dcf_dsf_inst_cd IS NULL + OR ci.form_inst_name_kanji IS NULL + OR ci.prefc_cd IS NULL + OR cp.prefc_name IS NULL + OR cc.city_name IS NULL + OR ci.inst_addr IS NULL) + ORDER BY ci.dcf_dsf_inst_cd + """ + + db.execute(sql) + return + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def abnormal_count(db): + # 異常系データの件数を取得 + try: + # 異常系データの件数を取得SQL + sql = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn + """ + + record_count = db.execute_select(sql) + + return record_count[0]['countNum'] + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def csv_data_select(db): + # CSVファイルの作成用のSQL実行 + try: + # CSVファイルの作成用のSQL + sql = """\ + SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name, + postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd, + '', inst_delete_date + FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd + """ + + return db.execute_select(sql) + except Exception as e: + logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + raise e + + +def make_csv_data(record_csv: list, resLog): + # 一時ファイルとして保存する(CSVファイル) + try: + err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。" + csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。" + + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, aris_create_csv) + + head_str = ['TC_HOSPITAL', 'TJ_HOSPITAL', 'TJ_HOSPITALSHORT', 'TK_HOSPITAL', + 'TC_PREFECTURE', 'TJ_PREFECTURE', 'TJ_ZIPCODE', 'TJ_CITY', 'TJ_ADDRESS', 'TJ_DEPARTMENT', + 'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', ' TD_UPDATE'] + + # Shift-JIS、CRLF、価囲いありで書き込む + with open(csv_file_path, mode='w', encoding='cp932') as csv_file: + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + # ヘッダ行書き込み + writer.writerow(head_str) + # データ部分書き込み + for record_data in record_csv: + record_value = list(record_data.values()) + csv_data = ['' if n is None else n for n in record_value] + writer.writerow(csv_data) + + except Exception as e: + resLog.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}') + resLog.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}') + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}') + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}') + raise e + + return csv_file_path + + +def make_log_data(): + # 一時ファイルとして保存する(ログファイル) + temporary_dir = tempfile.mkdtemp() + log_file_path = path.join(temporary_dir, res_log) + + # ロガーの生成 + resLog = logging.getLogger('resLog') + # 出力レベルの設定 + resLog.setLevel(logging.INFO) + # ハンドラの生成 + resLog_handler = logging.FileHandler(log_file_path) + # ロガーにハンドラを登録 + resLog.addHandler(resLog_handler) + # フォーマッタの生成 + fmt = logging.Formatter('%(message)s') + # ハンドラにフォーマッタを登録 + resLog_handler.setFormatter(fmt) + + return resLog, log_file_path + + +def s3_csv_upload_data(csv_file_path, resLog): + # s3にCSVファイルをUPする + Bucket = os.environ['ARISJ_DATA_BUCKET'] + folder = os.environ['ARISJ_DATA_FOLDER'] + csv_file_name = f'{folder}/{aris_create_csv}' + s3_client = S3Client() + + try: + s3_client.upload_file(csv_file_path, Bucket, csv_file_name) + except Exception as e: + resLog.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') + raise e + + return + + +def s3_log_upload_data(log_file_path): + # s3にログファイルをUPする + Bucket = os.environ['ARISJ_DATA_BUCKET'] + folder = os.environ['ARISJ_DATA_FOLDER'] + log_file_name = f'{folder}/{res_log}' + s3_client = S3Client() + + try: + s3_client.upload_file(log_file_path, Bucket, log_file_name) + except Exception as e: + logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') + raise e + + return diff --git a/ecs/jskult-batch-monthly/src/batch/parallel_processes.py b/ecs/jskult-batch-monthly/src/batch/parallel_processes.py deleted file mode 100644 index 0fb2d715..00000000 --- a/ecs/jskult-batch-monthly/src/batch/parallel_processes.py +++ /dev/null @@ -1,32 +0,0 @@ -"""並列処理""" - -import concurrent.futures - -from src.batch.bio_sales import create_bio_sales_lot -from src.batch.laundering import sales_laundering -from src.error.exceptions import BatchOperationException - - -def exec(): - # 並列処理を開始 - with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: - - # 実績更新 - future_sales_laundering = executor.submit(sales_laundering.exec) - # 生物由来ロット分解 - future_create_bio_sales_lot = executor.submit(create_bio_sales_lot.exec) - - # 両方の処理が完了するまで待つ - concurrent.futures.wait([future_sales_laundering, future_create_bio_sales_lot]) - - # エラーがあれば呼び出し元でキャッチする - sales_laundering_exc = future_sales_laundering.exception() - create_bio_sales_lot_exc = future_create_bio_sales_lot.exception() - - # いずれかにエラーが発生していれば、1つのエラーとして返す。 - if sales_laundering_exc is not None or create_bio_sales_lot_exc is not None: - sales_laundering_exc_message = str(sales_laundering_exc) if sales_laundering_exc is not None else '' - create_bio_sales_lot_exc_message = str(create_bio_sales_lot_exc) if create_bio_sales_lot_exc is not None else '' - raise BatchOperationException(f'並列処理中にエラーが発生しました。実績更新="{sales_laundering_exc_message}", 生物由来ロット分解={create_bio_sales_lot_exc_message}') - - return diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py index fdb7d0c1..dba5fac4 100644 --- a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -1,15 +1,13 @@ """実消化&アルトマーク 月次バッチ処理""" from src.aws.s3 import ConfigBucket -from src.batch.batch_functions import ( - get_batch_statuses, update_batch_process_complete, - update_batch_processing_flag_in_processing) +from src.batch.batch_functions import get_batch_statuses from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants -from src.batch import jskult_batch_monthly +from src.batch import output_arisj_file_process logger = get_logger('月次処理コントロール') @@ -22,64 +20,42 @@ def exec(): logger.info('月次バッチ:開始') try: # 月次バッチ処置中フラグ、dump処理状態区分、処理日を取得 - batch_processing_flag, dump_status_kbn, syor_date = get_batch_statuses() + batch_processing_flag, syor_date = get_batch_statuses() except BatchOperationException as e: logger.exception(f'日付テーブル取得(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS - # 月次バッチ処理中の場合、後続の処理は行わない + # 日次バッチ処理中の場合、後続の処理は行わない if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING: - logger.error('バッチ処理中のため、月次バッチ処理を終了します。') - return constants.BATCH_EXIT_CODE_SUCCESS - - # dump取得が正常終了していない場合、後続の処理は行わない - if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: - logger.error('dump取得が正常終了していないため、月次バッチ処理を終了します。') + logger.error('日次バッチ処理中のため、月次バッチ処理を終了します。') return constants.BATCH_EXIT_CODE_SUCCESS logger.info(f'処理日={syor_date}') - # バッチ共通設定に処理日を追加 - batch_context.syor_date = syor_date # 稼働日かかどうかを、実消化&アルトマーク月次バッチ稼働日ファイルをダウンロードして判定 try: arisj_output_day_list_file_path = ConfigBucket().download_arisj_output_day_list() arisj_output_day_calendar = CalendarFile(arisj_output_day_list_file_path) - batch_context.is_not_business_monthly = arisj_output_day_calendar.compare_date(syor_date) + batch_context.is_arisj_output_day = arisj_output_day_calendar.compare_date(syor_date) except Exception as e: logger.exception(f'実消化&アルトマーク月次バッチ稼働日ファイルの読み込みに失敗しました。{e}') return constants.BATCH_EXIT_CODE_SUCCESS - # 調査目的でV実消化稼働日かどうかをログ出力 - logger.debug(f'本日は{"実消化&アルトマーク月次バッチ稼働日です。" if batch_context.is_not_business_monthly else "実消化&アルトマーク月次バッチ非稼働日です。"}') - - # バッチ処理中に更新 - try: - update_batch_processing_flag_in_processing() - except BatchOperationException as e: - logger.exception(f'処理フラグ更新(未処理→処理中) エラー(異常終了){e}') + # 調査目的で実消化&アルトマーク月次バッチ稼働日かどうかをログ出力 + if batch_context.is_arisj_output_day: + logger.info('本日は実消化&アルトマーク月次バッチ稼働日です。') + else: + logger.info('月次バッチは行われませんでした。') return constants.BATCH_EXIT_CODE_SUCCESS try: logger.info('月次バッチ:起動') - jskult_batch_monthly.exec() + output_arisj_file_process.exec() logger.info('月次バッチ:終了') except BatchOperationException as e: logger.exception(f'月次バッチ処理エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS - # 調査目的で月次バッチが行われたかどうかをログ出力 - logger.debug(f'{"月次バッチが行われました。" if batch_context.is_not_business_monthly else "月次バッチが行われませんでした。"}') - - # バッチ処理完了とし、処理日、バッチ処置中フラグ、dump取得状態区分を更新 - logger.info('業務日付更新・バッチステータスリフレッシュ:起動') - try: - update_batch_process_complete() - except BatchOperationException as e: - logger.exception(f'業務日付更新・バッチステータスリフレッシュ エラー(異常終了){e}') - return constants.BATCH_EXIT_CODE_SUCCESS - logger.info('業務日付更新・バッチステータスリフレッシュ:終了') - # 正常終了を保守ユーザーに通知 logger.info('[NOTICE]月次バッチ:終了(正常終了)') return constants.BATCH_EXIT_CODE_SUCCESS diff --git a/ecs/jskult-batch-monthly/src/system_var/constants.py b/ecs/jskult-batch-monthly/src/system_var/constants.py index 8a0ccbb3..aaa8a3c0 100644 --- a/ecs/jskult-batch-monthly/src/system_var/constants.py +++ b/ecs/jskult-batch-monthly/src/system_var/constants.py @@ -1,17 +1,8 @@ # バッチ正常終了コード BATCH_EXIT_CODE_SUCCESS = 0 -# バッチ処理中フラグ:未処理 -BATCH_ACTF_BATCH_UNPROCESSED = '0' # バッチ処理中フラグ:処理中 BATCH_ACTF_BATCH_IN_PROCESSING = '1' -# dump取得状態区分:未処理 -DUMP_STATUS_KBN_UNPROCESSED = '0' -# dump取得状態区分:dump取得正常終了 -DUMP_STATUS_KBN_COMPLETE = '2' # カレンダーファイルのコメントシンボル CALENDAR_COMMENT_SYMBOL = '#' - -# 月曜日(datetime.weekday()で月曜日を表す数字) -WEEKDAY_MONDAY = 0 diff --git a/ecs/jskult-batch-monthly/src/system_var/environment.py b/ecs/jskult-batch-monthly/src/system_var/environment.py index 25afc294..c98503dd 100644 --- a/ecs/jskult-batch-monthly/src/system_var/environment.py +++ b/ecs/jskult-batch-monthly/src/system_var/environment.py @@ -8,14 +8,14 @@ DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] # AWS -ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] -ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] +ARISJ_DATA_BUCKET = os.environ['ARISJ_DATA_BUCKET'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] -ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +ARISJ_DATA_FOLDER = os.environ['ARISJ_DATA_FOLDER'] +ARISJ_BACKUP_FOLDER = os.environ['ARISJ_BACKUP_FOLDER'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME'] +ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') From 2d56d8242013621b95dc6f86484e313bf42370a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 29 Jun 2023 10:53:04 +0900 Subject: [PATCH 070/103] =?UTF-8?q?feat:=20=E4=B8=8D=E5=85=B7=E5=90=88?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C=E3=80=81=E5=BC=95=E6=95=B0=E5=87=BA=E5=8A=9B?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C=E3=80=81=E4=BE=8B=E5=A4=96=E5=87=A6=E7=90=86?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../stored_procedure/src05/hco_to_mdb_laundering.sql | 4 ++-- .../stored_procedure/src05/inst_merge_laundering.sql | 4 ++-- rds_mysql/stored_procedure/src05/sales_lau_delete.sql | 8 +++++--- rds_mysql/stored_procedure/src05/sales_lau_upsert.sql | 9 +++++---- .../stored_procedure/src05/v_inst_merge_laundering.sql | 6 ++---- rds_mysql/stored_procedure/src05/whs_org_laundering.sql | 4 ++-- 6 files changed, 18 insertions(+), 17 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql index 8201f3bc..eacd56e9 100644 --- a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'hco_to_mdb_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'hco_to_mdb_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql index 05908ca1..39f6e431 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'inst_merge_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql index 8b312d6f..39e8f065 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -7,7 +7,8 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_delete'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table, + 'laundering_period_year', laundering_period_year); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +19,7 @@ BEGIN 'sales_lau_deleteでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); @@ -39,7 +40,8 @@ BEGIN "; SET @delete_data = REPLACE(@delete_data, "$$target_table$$", target_table); PREPARE delete_data_stmt from @delete_data; - EXECUTE delete_data_stmt USING @laundering_period_year; + SET @interval_year = laundering_period_year; + EXECUTE delete_data_stmt USING @interval_year; CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 72a86310..56758f18 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -1,6 +1,6 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.sales_lau_upsert(target_table VARCHAR(255), extract_from_date date, - extract_to_date date) +CREATE PROCEDURE src05.sales_lau_upsert(target_table VARCHAR(255), extract_from_date DATE, + extract_to_date DATE) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -8,7 +8,8 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_upsert'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table, 'extract_from_date', + extract_from_date, 'extract_to_date', extract_to_date); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -19,7 +20,7 @@ BEGIN 'sales_lau_upsertでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 06af3867..db50980a 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'v_inst_merge_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); @@ -43,8 +43,6 @@ BEGIN prft_cd FROM internal05.v_inst_merge_t - WHERE - (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' ) AS vimt, $$target_table$$ AS tt SET diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index e183d0c8..aad11100 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'whs_org_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'whs_org_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); From ccd4ff79a9eb8033abd0aefe59dda2b727a1f9ed Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 16:13:04 +0900 Subject: [PATCH 071/103] =?UTF-8?q?fix:=20=E3=83=A1=E3=83=B3=E3=83=86?= =?UTF-8?q?=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=83=AD=E3=82=B0=E3=82=A4?= =?UTF-8?q?=E3=83=B3=E5=A4=B1=E6=95=97=E6=99=82=E3=81=AB=E3=82=A8=E3=83=A9?= =?UTF-8?q?=E3=83=BC=E9=80=9A=E7=9F=A5=E3=81=8C=E5=87=BA=E3=81=A6=E3=81=97?= =?UTF-8?q?=E3=81=BE=E3=81=86=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3=E3=80=82?= =?UTF-8?q?SSO=E3=81=AF=E9=80=9A=E7=9F=A5=E3=81=95=E3=81=9B=E3=82=8B?= =?UTF-8?q?=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 09032af5..6d867645 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -69,10 +69,10 @@ def login( try: jwt_token = login_service.login(request.username, request.password) except NotAuthorizeException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) verified_token = jwt_token.verify_token() @@ -126,7 +126,7 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(e) + logger.exception(f'SSOログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) # トークンからユーザーIDを取得 From d76b8d1c9332550239effe6e0c6d5c88b0aed986 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 16:39:12 +0900 Subject: [PATCH 072/103] =?UTF-8?q?fix:=20=E3=83=A6=E3=83=BC=E3=82=B6?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=81=AB=E3=83=AC=E3=82=B3=E3=83=BC?= =?UTF-8?q?=E3=83=89=E3=81=8C=E5=AD=98=E5=9C=A8=E3=81=97=E3=81=AA=E3=81=84?= =?UTF-8?q?=E5=A0=B4=E5=90=88=E3=81=AB=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AB?= =?UTF-8?q?=E3=81=AA=E3=82=8B=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 6d867645..00e5cb75 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -79,6 +79,10 @@ def login( # 普通の認証だと、`cognito:username`に入る。 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') @@ -132,6 +136,11 @@ def sso_authorize( # トークンからユーザーIDを取得 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') From 535ae8fa482d601da6d7e6154edecc2aeda37d65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 30 Jun 2023 15:15:55 +0900 Subject: [PATCH 073/103] =?UTF-8?q?feat:=20=E4=BB=95=E6=A7=98=E5=A4=89?= =?UTF-8?q?=E6=9B=B4=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrate_dcf_inst_merge.py | 34 +++++++++++++------ 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index ab4d107a..816a0545 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -55,12 +55,14 @@ def _set_disabled_dct_inst_merge(db: Database): _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') -def _select_ult_ident_presc_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: - # 納入先処方元マスタから、DCF施設コードに対応した領域コードの取得 +def _select_ult_ident_presc_dcf_inst_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 納入先処方元マスタから、DCF施設コードに対応したレコードの取得 try: sql = """ SELECT - ta_cd + ta_cd, + ult_ident_cd, + ratio FROM src05.ult_ident_presc WHERE @@ -70,9 +72,9 @@ def _select_ult_ident_presc_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: params = {'dcf_inst_cd': dcf_inst_cd} ult_ident_presc_ta_cd_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('納入先処方元マスタから領域コードの取得に成功') + logger.info('納入先処方元マスタからDCF施設コードに対応したレコードの取得に成功') except Exception as e: - logger.debug('納入先処方元マスタから領域コードの取得に失敗') + logger.debug('納入先処方元マスタからDCF施設コードに対応したレコードの取得に失敗') raise e return ult_ident_presc_ta_cd_records @@ -83,11 +85,12 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict logger.info('納入先処方元マスタの登録 開始') for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) - ult_ident_presc_ta_cd_records = _select_ult_ident_presc_ta_cd(db, enabled_merge_record['dcf_inst_cd']) - for ult_ident_presc_ta_cd_record in ult_ident_presc_ta_cd_records: - ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], + ult_ident_presc_source_records = _select_ult_ident_presc_dcf_inst_cd(db, enabled_merge_record['dcf_inst_cd']) + for ult_ident_presc_source_record in ult_ident_presc_source_records: + ult_ident_presc_records = _select_ult_ident_presc(db, + enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd'], - ult_ident_presc_ta_cd_record['ta_cd']) + ult_ident_presc_source_record) for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') # 処方元コード=重複時相手先コードが発生した場合 @@ -554,7 +557,8 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: return emp_chg_inst_records -def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, + ult_ident_presc_row: dict) -> list[dict]: # ult_ident_prescからSELECT try: sql = """ @@ -572,6 +576,8 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_ WHERE uipopp.presc_cd = :dup_opp_cd AND uipopp.ta_cd = :ta_cd + AND uipopp.ult_ident_cd = :ult_ident_cd + AND uipopp.ratio = :ratio ) AS opp_count FROM src05.ult_ident_presc AS uip @@ -580,7 +586,13 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_ AND uip.ta_cd = :ta_cd AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date """ - params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} + params = { + 'dcf_inst_cd': dcf_inst_cd, + 'dup_opp_cd': dup_opp_cd, + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'] + } ult_ident_presc_records = db.execute_select(sql, params) logging_sql(logger, sql) logger.info('納入先処方元マスタの取得 成功') From ef6a079467681146d777700ff88f7630593c2c9b Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 16:34:56 +0900 Subject: [PATCH 074/103] =?UTF-8?q?fix:=20=E3=83=A1=E3=83=B3=E3=83=86?= =?UTF-8?q?=E3=83=8A=E3=83=B3=E3=82=B9=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC?= =?UTF-8?q?=E3=81=AE=E3=83=AD=E3=82=B0=E3=82=A4=E3=83=B3=E6=99=82=E3=80=81?= =?UTF-8?q?=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=81=8C=E8=A6=8B=E3=81=A4?= =?UTF-8?q?=E3=81=8B=E3=82=89=E3=81=AA=E3=81=84=E3=81=A8=E3=81=8D=E3=81=AE?= =?UTF-8?q?=E3=83=AD=E3=82=B0=E3=82=A2=E3=82=A6=E3=83=88=E7=94=BB=E9=9D=A2?= =?UTF-8?q?=E3=81=8B=E3=82=89=E3=83=A1=E3=83=B3=E3=83=86=E3=83=8A=E3=83=B3?= =?UTF-8?q?=E3=82=B9=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A4=E3=83=B3=E7=94=BB=E9=9D=A2=E3=81=AB=E6=88=BB=E3=82=8C?= =?UTF-8?q?=E3=82=8B=E3=82=88=E3=81=86=E3=81=AB=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/logout.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/controller/logout.py b/ecs/jskult-webapp/src/controller/logout.py index 79de281c..4f30d802 100644 --- a/ecs/jskult-webapp/src/controller/logout.py +++ b/ecs/jskult-webapp/src/controller/logout.py @@ -22,11 +22,15 @@ def logout_view( reason: Optional[str] = None, session: Union[UserSession, None] = Depends(verify_session) ): + # どういうルートでログインしたかを判断するため、refererを取得 + referer = request.headers.get('referer', '') + redirect_to = '/login/userlogin' link_text = 'MeDaCA機能メニューへ' - if session is not None and session.user_flg == '1': + if (session is not None and session.user_flg == '1') or referer.endswith('maintlogin'): redirect_to = '/login/maintlogin' link_text = 'Login画面に戻る' + logout = LogoutViewModel() logout.redirect_to = redirect_to logout.reason = constants.LOGOUT_REASON_MESSAGE_MAP.get(reason, '') From 7ba0ec99422fe137b8a51dc356695285df355976 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 16:36:10 +0900 Subject: [PATCH 075/103] =?UTF-8?q?style:=20=E3=82=B3=E3=83=A1=E3=83=B3?= =?UTF-8?q?=E3=83=88=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/logout.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ecs/jskult-webapp/src/controller/logout.py b/ecs/jskult-webapp/src/controller/logout.py index 4f30d802..c841e48c 100644 --- a/ecs/jskult-webapp/src/controller/logout.py +++ b/ecs/jskult-webapp/src/controller/logout.py @@ -27,6 +27,7 @@ def logout_view( redirect_to = '/login/userlogin' link_text = 'MeDaCA機能メニューへ' + # セッションが切れておらず、メンテユーザである、またはメンテログイン画面から遷移した場合、メンテログイン画面に戻す if (session is not None and session.user_flg == '1') or referer.endswith('maintlogin'): redirect_to = '/login/maintlogin' link_text = 'Login画面に戻る' From 3bf951c632da508bbfdf8435bf5cf71c2cc6e137 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 30 Jun 2023 16:55:31 +0900 Subject: [PATCH 076/103] =?UTF-8?q?feat:=20=E4=BE=8B=E5=A4=96=E5=87=A6?= =?UTF-8?q?=E7=90=86=E4=BF=AE=E6=AD=A3=E3=81=AE=E6=A8=AA=E5=B1=95=E9=96=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src05/inst_merge_t_create.sql | 118 ++++++++-------- .../src05/v_inst_merge_t_create.sql | 130 +++++++++--------- 2 files changed, 130 insertions(+), 118 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql b/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql index 74472fcc..b825ac53 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql @@ -2,69 +2,75 @@ CREATE PROCEDURE src05.inst_merge_t_create() SQL SECURITY INVOKER BEGIN - -- スキーマ名 - DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); - -- プロシージャ名 - DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_t_create'; - -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_t_create'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); - -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION - BEGIN - GET DIAGNOSTICS CONDITION 1 - @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, - 'inst_merge_t_createでエラーが発生', @error_state, @error_msg); - SIGNAL SQLSTATE '45000' - SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; - END; + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'inst_merge_t_createでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') + ELSE @error_msg + END + ); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; - SET @error_state = NULL, @error_msg = NULL; + SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 開始' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 開始' + ); - TRUNCATE TABLE internal05.inst_merge_t; + TRUNCATE TABLE internal05.inst_merge_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 終了' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 終了' + ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 開始' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 開始' + ); - INSERT INTO - internal05.inst_merge_t ( - dcf_dsf_inst_cd, - dup_opp_cd, - form_inst_name_kanji, - form_inst_name_kana, - inst_addr, - prefc_cd - ) - SELECT - dim.dcf_inst_cd, - dim.dcf_inst_cd_new, - ci.form_inst_name_kanji, - ci.form_inst_name_kana, - ci.inst_addr, - ci.prefc_cd - FROM - src05.dcf_inst_merge dim - LEFT OUTER JOIN src05.com_inst ci - ON dim.dcf_inst_cd_new = ci.dcf_dsf_inst_cd - AND ci.delete_flg = '0' - WHERE - dim.muko_flg = '0' - AND dim.dcf_inst_cd_new IS NOT NULL - AND dim.enabled_flg = 'Y' - AND src05.to_date_yyyymm01(dim.tekiyo_month) <= src05.get_syor_date(); + INSERT INTO + internal05.inst_merge_t ( + dcf_dsf_inst_cd, + dup_opp_cd, + form_inst_name_kanji, + form_inst_name_kana, + inst_addr, + prefc_cd + ) + SELECT + dim.dcf_inst_cd, + dim.dcf_inst_cd_new, + ci.form_inst_name_kanji, + ci.form_inst_name_kana, + ci.inst_addr, + ci.prefc_cd + FROM + src05.dcf_inst_merge AS dim + LEFT OUTER JOIN src05.com_inst AS ci + ON dim.dcf_inst_cd_new = ci.dcf_dsf_inst_cd + AND ci.delete_flg = '0' + WHERE + dim.muko_flg = '0' + AND dim.dcf_inst_cd_new IS NOT NULL + AND dim.enabled_flg = 'Y' + AND src05.to_date_yyyymm01(dim.tekiyo_month) <= src05.get_syor_date(); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 終了' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 終了' + ); END diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql index 6a6f9dd7..1c75c4ea 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql @@ -2,75 +2,81 @@ CREATE PROCEDURE src05.v_inst_merge_t_create() SQL SECURITY INVOKER BEGIN - -- スキーマ名 - DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); - -- プロシージャ名 - DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_t_create'; - -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_t_create'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); - -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION - BEGIN - GET DIAGNOSTICS CONDITION 1 - @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, - 'v_inst_merge_t_createでエラーが発生', @error_state, @error_msg); - SIGNAL SQLSTATE '45000' - SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; - END; + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'v_inst_merge_t_createでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') + ELSE @error_msg + END + ); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; - SET @error_state = NULL, @error_msg = NULL; + SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成① 開始'); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成① 開始'); - TRUNCATE TABLE internal05.v_inst_merge_t; + TRUNCATE TABLE internal05.v_inst_merge_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成① 終了'); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成① 終了'); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成② 開始'); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成② 開始'); - INSERT INTO - internal05.v_inst_merge_t ( - v_inst_cd, - v_inst_cd_merge, - fcl_name, - fcl_kn_name, - fmt_addr, - prft_cd, - fcl_type - ) - SELECT - vhmv.v_inst_cd, - vhmv.v_inst_cd_merg, - fmv.fcl_name, - fmv.fcl_kn_name, - fmv.fmt_addr, - fmv.prft_cd, - fmv.fcl_type - FROM - src05.vop_hco_merge_v vhmv, - src05.fcl_mst_v fmv - INNER JOIN ( - SELECT - v_inst_cd, - MAX(sub_num) AS sno - FROM - src05.fcl_mst_v - GROUP BY - v_inst_cd - ) max_sno_fmv - ON fmv.v_inst_cd = max_sno_fmv.v_inst_cd - AND fmv.sub_num = max_sno_fmv.sno - WHERE - vhmv.v_inst_cd_merg = fmv.v_inst_cd - AND STR_TO_DATE(vhmv.apply_dt, '%Y-%m-%d') <= src05.get_syor_date() - AND fmv.rec_sts_kbn != '9'; + INSERT INTO + internal05.v_inst_merge_t ( + v_inst_cd, + v_inst_cd_merge, + fcl_name, + fcl_kn_name, + fmt_addr, + prft_cd, + fcl_type + ) + SELECT + vhmv.v_inst_cd, + vhmv.v_inst_cd_merg, + fmv.fcl_name, + fmv.fcl_kn_name, + fmv.fmt_addr, + fmv.prft_cd, + fmv.fcl_type + FROM + src05.vop_hco_merge_v AS vhmv, + src05.fcl_mst_v AS fmv + INNER JOIN ( + SELECT + v_inst_cd, + MAX(sub_num) AS sno + FROM + src05.fcl_mst_v + GROUP BY + v_inst_cd + ) AS max_sno_fmv + ON fmv.v_inst_cd = max_sno_fmv.v_inst_cd + AND fmv.sub_num = max_sno_fmv.sno + WHERE + vhmv.v_inst_cd_merg = fmv.v_inst_cd + AND STR_TO_DATE(vhmv.apply_dt, '%Y-%m-%d') <= src05.get_syor_date() + AND fmv.rec_sts_kbn != '9'; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成② 終了' ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成② 終了' ); END From b77eab5e7c82fbc0b2a4f8d9e9439aa703059724 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 18:27:11 +0900 Subject: [PATCH 077/103] =?UTF-8?q?NEWDWH2021-1130=20LOAD=E6=96=87?= =?UTF-8?q?=E3=81=AEWarning=E3=81=AF=E3=82=82=E3=81=86=E8=A6=8B=E3=81=AA?= =?UTF-8?q?=E3=81=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index fa82805d..b345b0d8 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -23,6 +23,7 @@ class VjskDataLoadManager: try: db.connect() + db.execute("SET SESSION sql_mode = 'TRADITIONAL';") # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") @@ -37,18 +38,6 @@ class VjskDataLoadManager: """ db.begin() result = db.execute(sql, {"src_file_name": src_file_name}) - logger.debug(sql) - result_w = db.execute("SHOW WARNINGS;") - has_mysql_warnings = False - for row in result_w.fetchall(): - # 例外スロー対象から除外 : Warning(1261) Row {ROW NUMBER} doesn't contain data for all columns - if len(row) >= 2 and row[0] == "Warning" and row[1] == 1261: - logger.info(f"SHOW WARNINGS (SKIP) : {row}") - continue - has_mysql_warnings = True - logger.info(f"SHOW WARNINGS : {row}") - if has_mysql_warnings: - raise Exception("LOAD文実行時にWARNINGが発生しました。") logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') db.commit() From 9e6403dea521cfc95ced01e5ef3f6231d6ed6630 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 19:23:42 +0900 Subject: [PATCH 078/103] =?UTF-8?q?NEWDWH2021-1130=20tsv=E3=83=88=E3=83=81?= =?UTF-8?q?=E5=88=87=E3=82=8C=E5=88=A4=E5=AE=9A=E3=82=92LOAD=E5=AE=9F?= =?UTF-8?q?=E8=A1=8C=E5=89=8D=E3=81=AB=E8=A6=8B=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 27 +++++++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 19 +++++++++++++ 2 files changed, 46 insertions(+) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index b345b0d8..3ef87186 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -60,6 +60,26 @@ class VjskDataLoadManager: logger.debug("_import_to_db done") return + def _get_tsv_last_row_tab_count(src_file_name: str) -> int: + # memo: tsvファイルが数百MBに及ぶことを想定して、末尾から1行分を参照する + # memo: 前提1 行区切りは LF('\n') + buf_count = 0 + + # バイナリモードでファイルオープン + with open(src_file_name, 'rb') as file: + # ファイルの末尾から2バイト手前に移動 + file.seek(-2, 2) + # 改行文字を見つけるまで逆方向に読み進める + while file.read(1) != b'\n': + # 1バイト戻って再度読み込み + file.seek(-2, 1) + # 末尾行を抽出 + last_line = file.readline().decode().rstrip('\n') + # 末尾行に含まれるタブ文字の数を抽出 + buf_count = last_line.count('\t') + + return buf_count + @classmethod def load(self, target: dict): logger.debug(f'load start target:{target}') @@ -67,6 +87,13 @@ class VjskDataLoadManager: # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 local_file_name = target["src_file_path"] + # tsvファイル末尾行のTABの数が総定数と一致しない場合は例外をスロー + tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) + expect_tabs = mapper.get_file_column_separators(target["condkey"]) + if tsv_tabs != expect_tabs: + msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" + raise BatchOperationException(msg) + # データベース登録 self._import_to_db(local_file_name, target["condkey"]) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 352e2f91..19f70067 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -21,6 +21,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME = "data_name" _KEY_FILE_PREFIX = "file_prefix" _KEY_FILE_SUFFIX = "file_suffix" + _KEY_FILE_COLUMN_SEPARATORS = "file_column_separators" _KEY_ORG_TABLE = "org_table" _KEY_SRC_TABLE = "src_table" _KEY_UPSERT_SQL = "upsert_sql" @@ -30,6 +31,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "82", _KEY_ORG_TABLE: "org05.sales", _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -299,6 +301,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "10", _KEY_ORG_TABLE: "org05.hld_mst_v", _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -352,6 +355,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "15", _KEY_ORG_TABLE: "org05.whs_mst_v", _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -420,6 +424,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "45", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -578,6 +583,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "10", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -631,6 +637,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "8", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -678,6 +685,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "23", _KEY_ORG_TABLE: "org05.fcl_mst_v", _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -770,6 +778,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "27", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -874,6 +883,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "9", _KEY_ORG_TABLE: "org05.phm_price_mst_v", _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -924,6 +934,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "3", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -956,6 +967,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "16", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1027,6 +1039,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "7", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1071,6 +1084,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "28", _KEY_ORG_TABLE: "org05.whole_stock", _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1178,6 +1192,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "77", _KEY_ORG_TABLE: "org05.bio_sales", _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1432,6 +1447,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "5", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1481,6 +1497,9 @@ class VjskReceiveFileMapper: def get_file_suffix(self, condkey: str) -> str: return self._get_interface_property(condkey, self._KEY_FILE_SUFFIX) + def get_file_column_separators(self, condkey: str) -> int: + return int(self._get_interface_property(condkey, self._KEY_FILE_COLUMN_SEPARATORS)) + def get_org_table(self, condkey: str) -> str: return self._get_interface_property(condkey, self._KEY_ORG_TABLE) From 2e6937eb71275d43c66172cd696efdce1a212410 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 3 Jul 2023 11:11:06 +0900 Subject: [PATCH 079/103] =?UTF-8?q?fix:=20=E3=82=BB=E3=83=83=E3=82=B7?= =?UTF-8?q?=E3=83=A7=E3=83=B3=E5=88=87=E3=82=8C=E3=81=AE=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A2=E3=82=A6=E3=83=88=E6=99=82=E3=80=81=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A2=E3=82=A6=E3=83=88=E7=94=BB=E9=9D=A2=E3=81=AB=E4=BD=95?= =?UTF-8?q?=E3=82=82=E5=87=BA=E3=81=95=E3=81=AA=E3=81=84=E7=94=A8=E3=81=AB?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 4 ++-- ecs/jskult-webapp/src/router/session_router.py | 3 +-- ecs/jskult-webapp/src/system_var/constants.py | 2 -- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 09032af5..412ba068 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -73,7 +73,7 @@ def login( raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: logger.exception(e) - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) verified_token = jwt_token.verify_token() # 普通の認証だと、`cognito:username`に入る。 @@ -127,7 +127,7 @@ def sso_authorize( verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: logger.exception(e) - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) # トークンからユーザーIDを取得 user_id = verified_token.user_id diff --git a/ecs/jskult-webapp/src/router/session_router.py b/ecs/jskult-webapp/src/router/session_router.py index 90f3a5c9..324c777f 100644 --- a/ecs/jskult-webapp/src/router/session_router.py +++ b/ecs/jskult-webapp/src/router/session_router.py @@ -90,8 +90,7 @@ class BeforeCheckSessionRoute(MeDaCaRoute): verified_session = verify_session(checked_session) # セッションが有効でない場合、エラーにする if verified_session is None: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, - detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) scope = request.scope scope['session'] = verified_session session_request = Request(receive=request.receive, scope=scope) diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index 899c19e5..2ea9454f 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -113,7 +113,6 @@ LOGOUT_REASON_LOGIN_ERROR = 'login_error' LOGOUT_REASON_BATCH_PROCESSING = 'batch_processing' LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE = 'batch_processing_ult' LOGOUT_REASON_NOT_LOGIN = 'not_login' -LOGOUT_REASON_SESSION_EXPIRED = 'session_expired' LOGOUT_REASON_DB_ERROR = 'db_error' LOGOUT_REASON_UNEXPECTED = 'unexpected' @@ -123,7 +122,6 @@ LOGOUT_REASON_MESSAGE_MAP = { LOGOUT_REASON_BATCH_PROCESSING: '日次バッチ処理中なので、
生物由来データ参照は使用出来ません。', LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE: '日次バッチ処理中のため、
マスタ-メンテは使用出来ません。', LOGOUT_REASON_NOT_LOGIN: 'Loginしてからページにアクセスしてください。', - LOGOUT_REASON_SESSION_EXPIRED: 'セッションが切れています。
再度Loginしてください。', LOGOUT_REASON_DB_ERROR: 'DB接続に失敗しました。
再度Loginするか、
管理者にお問い合わせください。', LOGOUT_REASON_UNEXPECTED: '予期しないエラーが発生しました。
再度Loginするか、
管理者に問い合わせてください。' } From b326bf65a9a1c44b9ea1bdd57f045671a1df2492 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 3 Jul 2023 11:38:18 +0900 Subject: [PATCH 080/103] =?UTF-8?q?style:=20=E3=83=86=E3=83=BC=E3=83=96?= =?UTF-8?q?=E3=83=AB=E3=81=AE=E3=83=95=E3=82=A9=E3=83=B3=E3=83=88=E3=82=B5?= =?UTF-8?q?=E3=82=A4=E3=82=BA=E8=AA=BF=E6=95=B4=208pt=E2=86=9212pt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/static/css/ultStyle.css | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-webapp/src/static/css/ultStyle.css b/ecs/jskult-webapp/src/static/css/ultStyle.css index 8d018b01..382ef4fd 100644 --- a/ecs/jskult-webapp/src/static/css/ultStyle.css +++ b/ecs/jskult-webapp/src/static/css/ultStyle.css @@ -633,14 +633,14 @@ table{ table.tablesorter { font-family:arial; background-color: #CDCDCD; - font-size: 8pt; + font-size: 12pt; text-align: left; } table.tablesorter thead tr th, table.tablesorter tfoot tr th { background-color: #e6EEEE; border: 0.1px solid silver; - font-size: 8pt; + font-size: 12pt; padding: 4px; padding-right: 20px; } From 46304d5b2c47d45aa260922b1effef88c359c0d6 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 3 Jul 2023 11:41:32 +0900 Subject: [PATCH 081/103] =?UTF-8?q?style:=20=E4=BB=A5=E4=B8=8B=E3=82=92?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C=20=E3=83=BB=E3=80=8CPrev=E3=80=8D=E3=80=8CNe?= =?UTF-8?q?xt=E3=80=8D=E3=83=9C=E3=82=BF=E3=83=B3=E3=81=8C=E4=B8=80?= =?UTF-8?q?=E8=A6=A7=E3=81=AE=E9=A0=85=E7=9B=AE=E5=90=8D=E3=81=A8=E9=87=8D?= =?UTF-8?q?=E3=81=AA=E3=81=A3=E3=81=A6=E3=81=84=E3=82=8B=20=E3=83=BB?= =?UTF-8?q?=E4=B8=80=E8=A6=A7=E3=82=92=E3=82=B9=E3=82=AF=E3=83=AD=E3=83=BC?= =?UTF-8?q?=E3=83=AB=E3=81=99=E3=82=8B=E3=81=A8=E3=83=81=E3=82=A7=E3=83=83?= =?UTF-8?q?=E3=82=AF=E3=83=9C=E3=83=83=E3=82=AF=E3=82=B9=E3=81=AE=E5=B9=85?= =?UTF-8?q?=E3=81=8C=E5=A4=89=E3=82=8F=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/static/css/bioStyle.css | 9 +++++++-- ecs/jskult-webapp/src/static/css/ultStyle.css | 10 ++++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-webapp/src/static/css/bioStyle.css b/ecs/jskult-webapp/src/static/css/bioStyle.css index 8bc72999..c0ab2ca5 100644 --- a/ecs/jskult-webapp/src/static/css/bioStyle.css +++ b/ecs/jskult-webapp/src/static/css/bioStyle.css @@ -1,3 +1,9 @@ +/* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */ +/* @see https://bootstrap-guide.com/content/reboot#page-defaults */ +*, ::after, ::before { + box-sizing: initial; +} + body { white-space: nowrap; background-color: LightCyan; @@ -75,7 +81,7 @@ table{ .bioScroll_div { overflow: auto; - padding-top: 10px; + margin-top: 1%; height: 250px; width: 1132px; } @@ -215,7 +221,6 @@ table{ .result_tr{ overflow-y: scroll; overflow-x: scroll; - } .result_data{ diff --git a/ecs/jskult-webapp/src/static/css/ultStyle.css b/ecs/jskult-webapp/src/static/css/ultStyle.css index 382ef4fd..e39fa143 100644 --- a/ecs/jskult-webapp/src/static/css/ultStyle.css +++ b/ecs/jskult-webapp/src/static/css/ultStyle.css @@ -1,3 +1,9 @@ +/* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */ +/* @see https://bootstrap-guide.com/content/reboot#page-defaults */ +*, ::after, ::before { + box-sizing: initial; +} + body { background-color: LightCyan; font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; @@ -39,8 +45,8 @@ table{ .scroll_table{ overflow: auto; white-space: nowrap; - margin-bottom: 2%; - /*スクロール時カラムが動く問題の修正 width: 100%;をコメントアウト*/ + margin-top: 1%; + margin-bottom: 1%; width: 100%; height: 250px; } From 0250080bc6831b804983b121e49ec2606dc3d146 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Mon, 3 Jul 2023 13:47:19 +0900 Subject: [PATCH 082/103] =?UTF-8?q?=E5=87=BA=E5=8A=9B=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E7=84=A1=E3=81=97=E3=83=90=E3=83=BC=E3=82=B8=E3=83=A7=E3=83=B3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/.env.example | 1 - ecs/jskult-batch-monthly/src/aws/s3.py | 29 ++- .../src/batch/common/batch_context.py | 9 + .../src/batch/output_arisj_file_process.py | 168 ++++++------------ .../src/jobctrl_monthly.py | 33 ++-- .../src/system_var/environment.py | 1 - .../calendar/jskult_arisj_output_day_list.txt | 106 +---------- 7 files changed, 120 insertions(+), 227 deletions(-) diff --git a/ecs/jskult-batch-monthly/.env.example b/ecs/jskult-batch-monthly/.env.example index 6bda1a69..d1f67281 100644 --- a/ecs/jskult-batch-monthly/.env.example +++ b/ecs/jskult-batch-monthly/.env.example @@ -7,7 +7,6 @@ DB_SCHEMA=src05 ARISJ_DATA_BUCKET=mbj-newdwh2021-staging-jskult-arisj JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config -ULTMARC_BACKUP_FOLDER=************ LOG_LEVEL=INFO ARISJ_DATA_FOLDER=DATA diff --git a/ecs/jskult-batch-monthly/src/aws/s3.py b/ecs/jskult-batch-monthly/src/aws/s3.py index ed337407..8804bba9 100644 --- a/ecs/jskult-batch-monthly/src/aws/s3.py +++ b/ecs/jskult-batch-monthly/src/aws/s3.py @@ -65,9 +65,34 @@ class ConfigBucket(S3Bucket): return temporary_file_path +class ArisjBucket(S3Bucket): + _bucket_name = environment.ARISJ_DATA_BUCKET + _folder = environment.ARISJ_BACKUP_FOLDER + + def list_dat_file(self): + return self._s3_client.list_objects(self._bucket_name, self._folder) + + def s3_arisj_csv_upload(self, arisj_create_csv: str, csv_file_path: str): + # s3にCSVファイルをUPする + Bucket = environment.ARISJ_DATA_BUCKET + folder = environment.ARISJ_DATA_FOLDER + csv_file_name = f'{folder}/{arisj_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, Bucket, csv_file_name) + return + + def backup_dat_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + arisj_backup_bucket = ArisjBackupBucket() + folder = environment.ARISJ_DATA_FOLDER + dat_file_key = f'{folder}/{dat_file_key}' + backup_key = f'{arisj_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_file_key, arisj_backup_bucket._bucket_name, backup_key) + + class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET -class UltmarcBackupBucket(JskUltBackupBucket): - _folder = environment.ULTMARC_BACKUP_FOLDER +class ArisjBackupBucket(JskUltBackupBucket): + _folder = environment.ARISJ_BACKUP_FOLDER diff --git a/ecs/jskult-batch-monthly/src/batch/common/batch_context.py b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py index 6a05a423..8c8c12fb 100644 --- a/ecs/jskult-batch-monthly/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py @@ -1,5 +1,6 @@ class BatchContext: __instance = None + __syor_date: str # 処理日(yyyy/mm/dd形式) __is_arisj_output_day: bool # 月次バッチ起動日フラグ def __init__(self) -> None: @@ -11,6 +12,14 @@ class BatchContext: cls.__instance = cls() return cls.__instance + @property + def syor_date(self): + return self.__syor_date + + @syor_date.setter + def syor_date(self, syor_date_str: str): + self.__syor_date = syor_date_str + @property def is_arisj_output_day(self): return self.__is_arisj_output_day diff --git a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py index 0deec661..b8645039 100644 --- a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py +++ b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py @@ -3,87 +3,88 @@ from datetime import datetime from src.db.database import Database from src.error.exceptions import BatchOperationException -from src.aws.s3 import S3Client from src.logging.get_logger import get_logger +from src.aws.s3 import ArisjBucket +from src.batch.common.batch_context import BatchContext import tempfile -import os import os.path as path -import logging import csv -logger = get_logger('実消化&アルトマーク月次バッチ') +logger = get_logger('ARIS-J連携データ出力') -create_date_format = datetime.now().strftime('%Y-%m-%d %H:%M:%S') -prg_id = 'PrgId:BI0402' create_date = datetime.now().strftime('%Y%m%d%H%M%S') -aris_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' -res_log = f'D0004{create_date}.log' -sql_err_msg = "MsgID:999999000002 Message:SQL実行エラーです。" -move_err_msg = "MsgID:BI0000000041 Message:S3バケットARISへのCSVデータ、実行ログ移動できませんでした。" +arisj_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' +sql_err_msg = "SQL実行エラーです。" def exec(): """ 実消化&アルトマーク月次バッチ """ try: + logger.info('バッチ処理を開始しました。') - start_msg = "MsgID:BI0000000001 Message:バッチ処理を開始しました。" - cnt_msg = "MsgID: Message: LogText:" + try: + db = Database.get_instance() + # DB接続 + db.connect() + except Exception as e: + logger.info('DB接続エラーです') + raise e - # 実行ログに書き込む - resLog, log_file_path = make_log_data() - resLog.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}') - logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {start_msg}') - - db = Database.get_instance() - # DB接続 - db.connect() # トランザクションの開始 db.begin() # 正常系データの反映 # 過去分は不要のため、デリート - physical_normal_delete(db) + physical_wk_inst_aris_if_delete(db) # 正常系データを取得しWKテーブルに保存する。 - normal_insert_into(db) + wk_inst_aris_if_insert_into(db) # 正常系データの件数を取得 - suc_count = normal_count(db) + suc_count = wk_inst_aris_if_count(db) # 警告系データの反映 # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 - physical_abnormal_delete(db) + physical_wk_inst_aris_if_wrn_delete(db) # 異常系データを取得しWKテーブルに保存する。 - abnormal_insert_into(db) + wk_inst_aris_if_wrn_insert_into(db) # 異常系データの件数を取得 - wrn_count = abnormal_count(db) + wrn_count = wk_inst_aris_if_wrn_count(db) # CSVファイルの作成用のSQL実行 record_csv = csv_data_select(db) # CSVファイル作成 - csv_file_path = make_csv_data(record_csv, resLog) + csv_file_path = make_csv_data(record_csv) # トランザクションの終了 db.commit() - # 実行ログファイルの追記 - # 実行ログに処理件数を書き込む。 + # ログに処理件数を出力 sum_count = suc_count + wrn_count - resLog.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') - logger.info(f'{create_date_format}[DWH][3][INFO]{prg_id} {cnt_msg}(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') + logger.info(f'(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') # CSVファイル移動処理 - s3_csv_upload_data(csv_file_path, resLog) + try: + ArisjBucket().s3_arisj_csv_upload(arisj_create_csv, csv_file_path) + except Exception as e: + logger.info('S3バケットArisjへのCSVデータ、移動できませんでした。') + raise e - # logファイル移動処理 - s3_log_upload_data(log_file_path) + # 処理後ファイルをバックアップ + try: + arisj_bucket = ArisjBucket() + batch_context = BatchContext.get_instance() + arisj_bucket.backup_dat_file(arisj_create_csv, batch_context.syor_date) + except Exception as e: + logger.info('S3バケットArisjバックアップへCSVデータ、コピーできませんでした。') + raise e + + logger.info('バッチ処理を終了しました。') - logger.info('実消化&アルトマーク月次バッチ処理: 終了') except Exception as e: - logger.info(f'{create_date_format}[DWH][5][INFO]') raise BatchOperationException(e) finally: @@ -92,7 +93,7 @@ def exec(): db.disconnect() -def physical_normal_delete(db): +def physical_wk_inst_aris_if_delete(db): # 過去分は不要のため、デリート try: # WKテーブルの過去分削除SQL @@ -102,11 +103,11 @@ def physical_normal_delete(db): db.execute(sql) return except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e -def normal_insert_into(db): +def wk_inst_aris_if_insert_into(db): # 正常系データを取得しWKテーブルに保存する。 try: # 正常系データを取得しWKテーブルに保存SQL @@ -150,11 +151,11 @@ def normal_insert_into(db): db.execute(sql) return except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e -def normal_count(db): +def wk_inst_aris_if_count(db): # 正常系データの件数を取得 try: # 正常系データの件数を取得SQL @@ -164,11 +165,11 @@ def normal_count(db): record_count = db.execute_select(sql) return record_count[0]['countNum'] except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e -def physical_abnormal_delete(db): +def physical_wk_inst_aris_if_wrn_delete(db): # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 try: # 異常系WKテーブルの過去分削除SQL @@ -179,11 +180,11 @@ def physical_abnormal_delete(db): db.execute(sql) return except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e -def abnormal_insert_into(db): +def wk_inst_aris_if_wrn_insert_into(db): # 異常系データを取得しWKテーブルに保存する。 try: # 異常系データを取得しWKテーブルに保存SQL @@ -233,11 +234,11 @@ def abnormal_insert_into(db): db.execute(sql) return except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e -def abnormal_count(db): +def wk_inst_aris_if_wrn_count(db): # 異常系データの件数を取得 try: # 異常系データの件数を取得SQL @@ -249,7 +250,7 @@ def abnormal_count(db): return record_count[0]['countNum'] except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e @@ -266,22 +267,19 @@ def csv_data_select(db): return db.execute_select(sql) except Exception as e: - logger.debug(f'{create_date_format}:{prg_id} {sql_err_msg}') + logger.debug(f'{sql_err_msg}') raise e -def make_csv_data(record_csv: list, resLog): +def make_csv_data(record_csv: list): # 一時ファイルとして保存する(CSVファイル) try: - err_end_msg = "MsgID:BI0000009998 Message:バッチ処理を異常終了しました。" - csv_err_msg = "MsgID:BI0000000040 Message:ワークデータの作成に失敗しました。" - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, aris_create_csv) + csv_file_path = path.join(temporary_dir, arisj_create_csv) head_str = ['TC_HOSPITAL', 'TJ_HOSPITAL', 'TJ_HOSPITALSHORT', 'TK_HOSPITAL', 'TC_PREFECTURE', 'TJ_PREFECTURE', 'TJ_ZIPCODE', 'TJ_CITY', 'TJ_ADDRESS', 'TJ_DEPARTMENT', - 'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', ' TD_UPDATE'] + 'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', 'TD_UPDATE'] # Shift-JIS、CRLF、価囲いありで書き込む with open(csv_file_path, mode='w', encoding='cp932') as csv_file: @@ -298,64 +296,8 @@ def make_csv_data(record_csv: list, resLog): writer.writerow(csv_data) except Exception as e: - resLog.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}') - resLog.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}') - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {csv_err_msg}') - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {err_end_msg}') + logger.info('ワークデータの作成に失敗しました。') + logger.info('バッチ処理を異常終了しました。') raise e return csv_file_path - - -def make_log_data(): - # 一時ファイルとして保存する(ログファイル) - temporary_dir = tempfile.mkdtemp() - log_file_path = path.join(temporary_dir, res_log) - - # ロガーの生成 - resLog = logging.getLogger('resLog') - # 出力レベルの設定 - resLog.setLevel(logging.INFO) - # ハンドラの生成 - resLog_handler = logging.FileHandler(log_file_path) - # ロガーにハンドラを登録 - resLog.addHandler(resLog_handler) - # フォーマッタの生成 - fmt = logging.Formatter('%(message)s') - # ハンドラにフォーマッタを登録 - resLog_handler.setFormatter(fmt) - - return resLog, log_file_path - - -def s3_csv_upload_data(csv_file_path, resLog): - # s3にCSVファイルをUPする - Bucket = os.environ['ARISJ_DATA_BUCKET'] - folder = os.environ['ARISJ_DATA_FOLDER'] - csv_file_name = f'{folder}/{aris_create_csv}' - s3_client = S3Client() - - try: - s3_client.upload_file(csv_file_path, Bucket, csv_file_name) - except Exception as e: - resLog.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') - raise e - - return - - -def s3_log_upload_data(log_file_path): - # s3にログファイルをUPする - Bucket = os.environ['ARISJ_DATA_BUCKET'] - folder = os.environ['ARISJ_DATA_FOLDER'] - log_file_name = f'{folder}/{res_log}' - s3_client = S3Client() - - try: - s3_client.upload_file(log_file_path, Bucket, log_file_name) - except Exception as e: - logger.info(f'{create_date_format}[DWH][5][INFO]{prg_id} {move_err_msg}') - raise e - - return diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py index dba5fac4..0534808d 100644 --- a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -1,6 +1,7 @@ """実消化&アルトマーク 月次バッチ処理""" from src.aws.s3 import ConfigBucket +from src.aws.s3 import ArisjBackupBucket from src.batch.batch_functions import get_batch_statuses from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile @@ -9,28 +10,34 @@ from src.logging.get_logger import get_logger from src.system_var import constants from src.batch import output_arisj_file_process -logger = get_logger('月次処理コントロール') +logger = get_logger('月次処理コントロール(ARIS-J)') # バッチ共通設定を取得 batch_context = BatchContext.get_instance() +arisj_bucket = ArisjBackupBucket() def exec(): try: logger.info('月次バッチ:開始') try: - # 月次バッチ処置中フラグ、dump処理状態区分、処理日を取得 + logger.info('処理日取得') + # 月次バッチ処置中フラグ、処理日を取得 batch_processing_flag, syor_date = get_batch_statuses() except BatchOperationException as e: - logger.exception(f'日付テーブル取得(異常終了){e}') + logger.exception(f'日次ジョブ取得エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS # 日次バッチ処理中の場合、後続の処理は行わない + logger.info('日次ジョブ処理中判定') if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING: - logger.error('日次バッチ処理中のため、月次バッチ処理を終了します。') + logger.error('日次ジョブ処理中エラー(異常終了)') return constants.BATCH_EXIT_CODE_SUCCESS - logger.info(f'処理日={syor_date}') + # バッチ共通設定に処理日を追加 + batch_context.syor_date = syor_date + + logger.info(f'処理日取得={syor_date}') # 稼働日かかどうかを、実消化&アルトマーク月次バッチ稼働日ファイルをダウンロードして判定 try: @@ -38,22 +45,22 @@ def exec(): arisj_output_day_calendar = CalendarFile(arisj_output_day_list_file_path) batch_context.is_arisj_output_day = arisj_output_day_calendar.compare_date(syor_date) except Exception as e: - logger.exception(f'実消化&アルトマーク月次バッチ稼働日ファイルの読み込みに失敗しました。{e}') + logger.exception(f'処理日取得エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS # 調査目的で実消化&アルトマーク月次バッチ稼働日かどうかをログ出力 - if batch_context.is_arisj_output_day: - logger.info('本日は実消化&アルトマーク月次バッチ稼働日です。') - else: - logger.info('月次バッチは行われませんでした。') + if not batch_context.is_arisj_output_day: + logger.info('ARIS-J連携データ出力日でない為、処理終了') return constants.BATCH_EXIT_CODE_SUCCESS + logger.info('ARIS-J連携データ出力日です') + try: - logger.info('月次バッチ:起動') + logger.info('ARIS-J連携データ出力:起動') output_arisj_file_process.exec() - logger.info('月次バッチ:終了') + logger.info('ARIS-J連携データ出力:終了') except BatchOperationException as e: - logger.exception(f'月次バッチ処理エラー(異常終了){e}') + logger.exception(f'ARIS-J連携データ出力(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS # 正常終了を保守ユーザーに通知 diff --git a/ecs/jskult-batch-monthly/src/system_var/environment.py b/ecs/jskult-batch-monthly/src/system_var/environment.py index c98503dd..08d6cd16 100644 --- a/ecs/jskult-batch-monthly/src/system_var/environment.py +++ b/ecs/jskult-batch-monthly/src/system_var/environment.py @@ -15,7 +15,6 @@ ARISJ_DATA_FOLDER = os.environ['ARISJ_DATA_FOLDER'] ARISJ_BACKUP_FOLDER = os.environ['ARISJ_BACKUP_FOLDER'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME'] -ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') diff --git a/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt b/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt index 2d5f42c0..fe095c8a 100644 --- a/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt +++ b/s3/config/jskult/calendar/jskult_arisj_output_day_list.txt @@ -1,100 +1,12 @@ -2023/06/23 -2023/06/24 -2023/06/25 -2023/06/26 -2023/06/27 -2023/06/28 -2023/06/29 -2023/06/30 -2023/07/01 -2023/07/02 +2023/01/05 +2023/02/01 +2023/03/01 +2023/04/03 +2023/05/01 +2023/06/01 2023/07/03 -2023/07/04 -2023/07/05 -2023/07/06 -2023/07/07 -2023/07/08 -2023/07/09 -2023/07/10 -2023/07/11 -2023/07/12 -2023/07/13 -2023/07/14 -2023/07/15 -2023/07/16 -2023/07/17 -2023/07/18 -2023/07/19 -2023/07/20 -2023/07/21 -2023/07/22 -2023/07/23 -2023/07/24 -2023/07/25 -2023/07/26 -2023/07/27 -2023/07/28 -2023/07/29 -2023/07/30 -2023/07/31 2023/08/01 -2023/08/02 -2023/08/03 -2023/08/04 -2023/08/05 -2023/08/06 -2023/08/07 -2023/08/08 -2023/08/09 -2023/08/10 -2023/08/11 -2023/08/12 -2023/08/13 -2023/08/14 -2023/08/15 -2023/08/16 -2023/08/17 -2023/08/18 -2023/08/19 -2023/08/20 -2023/08/21 -2023/08/22 -2023/08/23 -2023/08/24 -2023/08/25 -2023/08/26 -2023/08/27 -2023/08/28 -2023/08/29 -2023/08/30 -2023/08/31 2023/09/01 -2023/09/02 -2023/09/03 -2023/09/04 -2023/09/05 -2023/09/06 -2023/09/07 -2023/09/08 -2023/09/09 -2023/09/10 -2023/09/11 -2023/09/12 -2023/09/13 -2023/09/14 -2023/09/15 -2023/09/16 -2023/09/17 -2023/09/18 -2023/09/19 -2023/09/20 -2023/09/21 -2023/09/22 -2023/09/23 -2023/09/24 -2023/09/25 -2023/09/26 -2023/09/27 -2023/09/28 -2023/09/29 -2023/09/30 +2023/10/02 +2023/11/01 +2023/12/01 \ No newline at end of file From cca854be178336cc13422e4562ec3f38db130d7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Tue, 4 Jul 2023 15:24:05 +0900 Subject: [PATCH 083/103] =?UTF-8?q?=E4=BB=AE=E5=AE=8C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 3 + ecs/jskult-batch-daily/src/aws/s3.py | 26 ++ .../batch/ultmarc/export_vjsk_csv_process.py | 270 ++++++++++++++++++ .../src/batch/ultmarc/ultmarc_process.py | 13 +- ecs/jskult-batch-daily/src/jobctrl_daily.py | 8 +- .../src/system_var/environment.py | 3 + 6 files changed, 318 insertions(+), 5 deletions(-) create mode 100644 ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 95aef7fe..4a0eefbe 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -11,3 +11,6 @@ ULTMARC_BACKUP_FOLDER=ultmarc JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +VJSK_BACKUP_FOLDER=vjsk +VJSK_DATA_SEND_FOLDER=send +VJSK_DATA_BUCKET=************* \ No newline at end of file diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 68ed0a7c..fab7e222 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -76,6 +76,28 @@ class UltmarcBucket(S3Bucket): self._s3_client.delete_file(self._bucket_name, dat_file_key) +class VjskBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _folder = environment.VJSK_DATA_SEND_FOLDER + + def list_dat_file(self): + return self._s3_client.list_objects(self._bucket_name, self._folder) + + def upload_dat_file(self, vjsk_create_csv: str, csv_file_path: str): + # S3バケットにファイルを移動 + csv_file_name = f'{self._folder}/{vjsk_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + return + + def backup_dat_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + vjsk_backup_bucket = VjskBackupBucket() + dat_key = f'{self._folder}/{dat_file_key}' + backup_key = f'{vjsk_backup_bucket._folder}/{self._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) + + class ConfigBucket(S3Bucket): _bucket_name = environment.JSKULT_CONFIG_BUCKET @@ -96,3 +118,7 @@ class JskUltBackupBucket(S3Bucket): class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER + + +class VjskBackupBucket(JskUltBackupBucket): + _folder = environment.VJSK_BACKUP_FOLDER diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py new file mode 100644 index 00000000..27243e4a --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py @@ -0,0 +1,270 @@ +"""アルトマークデータ処理""" + +from src.aws.s3 import UltmarcBucket, VjskBucket +from src.batch.common.batch_context import BatchContext + +from src.db.database import Database +from src.logging.get_logger import get_logger +import tempfile +import os.path as path +import csv + +logger = get_logger('V実用消化施設データ作成処理') +ultmarc_bucket = UltmarcBucket() +batch_context = BatchContext.get_instance() + +sql_err_msg = "SQL実行エラーです。" +vjsk_csv_file_name = 'ComInst.csv' + + +def exec(): + db = Database.get_instance() + try: + logger.info('バッチ処理を開始しました。') + + try: + # DB接続 + db.connect() + # ファイル単位でトランザクションを行う + db.begin() + except Exception as e: + logger.info('DB接続エラーです。') + raise e + + # CSVファイルの作成用のSQL実行(施設) + record_inst_csv = csv_data_inst_select(db) + # CSVファイルの作成用のSQL実行(薬局) + record_pharm_csv = csv_data_pharm_select(db) + # CSVファイル作成 + csv_file_path = make_csv_data(record_inst_csv, record_pharm_csv) + + vjsk_bucket = VjskBucket() + try: + # s3へデータ移動 + vjsk_bucket.upload_dat_file(vjsk_csv_file_name, csv_file_path) + except Exception as e: + logger.info('S3バケットDWHへCSVデータを作成できませんでした。') + raise e + + try: + # 処理後ファイルをバックアップ + batch_context = BatchContext.get_instance() + vjsk_bucket.backup_dat_file(vjsk_csv_file_name, batch_context.syor_date) + except Exception as e: + logger.info('バックアップバケットへCSVデータをコピーできませんでした。') + raise e + + csv_count = len(record_inst_csv) + len(record_pharm_csv) + logger.info(f'CSV出力件数: {csv_count}。') + logger.info('バッチ処理を正常に終了しました。') + except Exception as e: + raise e + finally: + # 終了時に必ずコミットする + db.commit() + db.disconnect() + return + + +def csv_data_inst_select(db): + # CSVファイルの作成用のSQL実行(施設) + try: + # 施設テーブル検索SQL + sql = """\ + SELECT dcf_dsf_inst_cd, + inst_div_cd, + addr_unknown_reason_cd, + form_inst_name_kana, + inst_name_kana, + form_inst_name_kanji, + inst_name_kanji, + rltd_univ_prnt_cd, + bed_num, + close_flg, + estab_sche_flg, + close_start_ym, + estab_sche_ym, + ward_abolish_flg, + inst_repre_cd, + inst_repre_kana, + inst_repre, + phone_number_non_flg, + unconf_flg, + inst_phone_number, + inst_addr_kana, + inst_addr, + postal_number, + village_cd, + prefc_cd, + city_cd, + addr_display_number, + addr_cnt_kana, + addr_cnt, + manage_cd, + delete_sche_reason_cd, + hp_assrt_cd, + dup_opp_cd, + insp_item_micrb, + insp_item_serum, + insp_item_blood, + insp_item_patho, + insp_item_paras, + insp_item_biochem, + insp_item_ri, + re_exam_cd, + prmit_bed_num_other, + prmit_bed_num_mental, + prmit_bed_num_tuber, + prmit_bed_num_infection, + prmit_bed_num_sum, + prmit_bed_num_gen, + prmit_bed_num_rcup, + prmit_bed_maint_ymd, + inst_pharm_div, + abolish_ymd, + delete_flg, + filler_1, + filler_2, + filler_3, + filler_4, + filler_5, + regist_date, + create_user, + update_date, + update_user, + sys_regist_date, + regist_prgm_id, + sys_update_date, + update_prgm_id + FROM src05.com_inst ORDER BY dcf_dsf_inst_cd + """ + return db.execute_select(sql) + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def csv_data_pharm_select(db): + # CSVファイルの作成用のSQL実行(薬局) + try: + # 薬局テーブル検索SQL + sql = """\ + SELECT dcf_dsf_inst_cd, + inst_div_cd, + addr_unknown_reason_cd, + form_inst_name_kana, + inst_name_kana, + form_inst_name_kanji, + inst_name_kanji, + '', + '', + close_flg, + estab_sche_flg, + close_start_ym, + estab_sche_ym, + '', + '', + inst_repre_kana, + inst_repre, + phone_number_non_flg, + unconf_flg, + inst_phone_number, + inst_addr_kana, + inst_addr, + postal_number, + village_cd, + prefc_cd, + city_cd, + addr_display_number, + addr_cnt_kana, + addr_cnt, + manage_cd, + delete_sche_reason_cd, + '', + dup_opp_cd, + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + inst_pharm_div, + abolish_ymd, + delete_flg, + filler_1, + filler_2, + filler_3, + filler_4, + filler_5, + regist_date, + create_user, + update_date, + update_user, + sys_regist_date, + regist_prgm_id, + sys_update_date, + update_prgm_id + FROM src05.com_pharm ORDER BY dcf_dsf_inst_cd + """ + return db.execute_select(sql) + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def make_csv_data(record_inst_csv: list, record_pharm_cs: list): + # 一時ファイルとして保存する(CSVファイル) + try: + + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, vjsk_csv_file_name) + + head_str = ['DCF_DSF_INST_CD', 'INST_DIV_CD', 'ADDR_UNKNOWN_REASON_CD', 'FORM_INST_NAME_KANA', 'INST_NAME_KANA', + 'FORM_INST_NAME_KANJI', 'INST_NAME_KANJI', 'RLTD_UNIV_PRNT_CD', 'BED_NUM', 'CLOSE_FLG', 'ESTAB_SCHE_FLG', + 'CLOSE_START_YM', 'ESTAB_SCHE_YM', 'WARD_ABOLISH_FLG', 'INST_REPRE_CD', 'INST_REPRE_KANA', 'INST_REPRE', + 'PHONE_NUMBER_NON_FLG', 'UNCONF_FLG', 'INST_PHONE_NUMBER', 'INST_ADDR_KANA', 'INST_ADDR', 'POSTAL_NUMBER', + 'VILLAGE_CD', 'PREFC_CD', 'CITY_CD', 'ADDR_DISPLAY_NUMBER', 'ADDR_CNT_KANA', 'ADDR_CNT', 'MANAGE_CD', + 'DELETE_SCHE_REASON_CD', 'HP_ASSRT_CD', 'DUP_OPP_CD', 'INSP_ITEM_MICRB', 'INSP_ITEM_SERUM', 'INSP_ITEM_BLOOD', + 'INSP_ITEM_PATHO', 'INSP_ITEM_PARAS', 'INSP_ITEM_BIOCHEM', 'INSP_ITEM_RI', 'RE_EXAM_CD', 'PRMIT_BED_NUM_OTHER', + 'PRMIT_BED_NUM_MENTAL', 'PRMIT_BED_NUM_TUBER', 'PRMIT_BED_NUM_INFECTION', 'PRMIT_BED_NUM_SUM', 'PRMIT_BED_NUM_GEN', + 'PRMIT_BED_NUM_RCUP', 'PRMIT_BED_MAINT_YMD', 'INST_PHARM_DIV', 'ABOLISH_YMD', 'DELETE_FLG', 'FILLER_1', 'FILLER_2', + 'FILLER_3', 'FILLER_4', 'FILLER_5', 'REGIST_DATE', 'CREATE_USER', 'UPDATE_DATE', 'UPDATE_USER', 'SYS_REGIST_DATE', + 'REGIST_PRGM_ID', 'SYS_UPDATE_DATE', 'UPDATE_PRGM_ID'] + + # Shift-JIS、CRLF、価囲いありで書き込む + with open(csv_file_path, mode='w', encoding='cp932') as csv_file: + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + # ヘッダ行書き込み + writer.writerow(head_str) + + # データ部分書き込み(施設) + for record_inst_data in record_inst_csv: + record_inst_value = list(record_inst_data.values()) + csv_data = ['' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) + + # データ部分書き込み(薬局) + for record_pharm_data in record_pharm_cs: + record_pharm_value = list(record_pharm_data.values()) + csv_data = ['' if n is None else n for n in record_pharm_value] + writer.writerow(csv_data) + + except Exception as e: + logger.info('ワークデータの作成に失敗しました。') + logger.info('バッチ処理を異常終了しました。') + raise e + + return csv_file_path diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py index b511a9c8..e0e6ba22 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py @@ -5,6 +5,7 @@ from datetime import datetime from src.aws.s3 import UltmarcBucket from src.batch.common.batch_context import BatchContext from src.batch.ultmarc.datfile import DatFile +from src.batch.ultmarc import export_vjsk_csv_process from src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory import \ UltmarcTableMapperFactory from src.db.database import Database @@ -61,11 +62,19 @@ def exec_import(): def exec_export(): - """V実消化用施設・薬局薬店データ作成処理""" + """V実消化用施設データ作成処理""" if not batch_context.is_ultmarc_imported: - logger.info('アルトマーク取込が行われていないため、V実消化用施設・薬局薬店データ作成処理をスキップします。') + logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') return + try: + logger.info('V実用消化施設データ作成処理: 開始') + export_vjsk_csv_process.exec() + logger.info('V実用消化施設データ作成処理: 終了') + except Exception as e: + raise BatchOperationException(e) + return + def _import_to_ultmarc_table(dat_file: DatFile): db = Database.get_instance() diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index a98c0d16..d77216ff 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -75,11 +75,13 @@ def exec(): logger.debug(f'{"アルトマーク取込が行われました。" if batch_context.is_ultmarc_imported else "アルトマーク取込が行われませんでした。"}') try: - logger.info('V実消化用施設・薬局薬店データ作成処理:起動') + logger.info('V実消化用施設データ作成処理:起動') +# ***********************実行させるためにtrueにしておく(後で消す)*********************** + batch_context.is_ultmarc_imported = True ultmarc_process.exec_export() - logger.info('V実消化用施設・薬局薬店データ作成処理:終了') + logger.info('V実消化用施設データ作成処理:終了') except BatchOperationException as e: - logger.exception(f'V実消化用施設・薬局薬店データ作成処理エラー(異常終了){e}') + logger.exception(f'V実消化用施設データ作成処理エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS logger.info('日次処理(V実消化)') diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index b1730224..4144319f 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -15,6 +15,9 @@ ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] +VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] +VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') From 92c14d2fa0f2fb499dcdd91b40e0c4d061a549a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Tue, 4 Jul 2023 15:25:21 +0900 Subject: [PATCH 084/103] =?UTF-8?q?=E4=BB=AE=E5=AE=8C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/jobctrl_daily.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index d77216ff..45e297e9 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -76,8 +76,6 @@ def exec(): try: logger.info('V実消化用施設データ作成処理:起動') -# ***********************実行させるためにtrueにしておく(後で消す)*********************** - batch_context.is_ultmarc_imported = True ultmarc_process.exec_export() logger.info('V実消化用施設データ作成処理:終了') except BatchOperationException as e: From b9a2a0c2f7f2545dc7ad3683ce35644e06437615 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 09:05:50 +0900 Subject: [PATCH 085/103] =?UTF-8?q?=E6=8C=87=E6=91=98=E4=BA=8B=E9=A0=85?= =?UTF-8?q?=E5=8F=8D=E6=98=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/output_arisj_file_process.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py index b8645039..197db381 100644 --- a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py +++ b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py @@ -283,12 +283,12 @@ def make_csv_data(record_csv: list): # Shift-JIS、CRLF、価囲いありで書き込む with open(csv_file_path, mode='w', encoding='cp932') as csv_file: + # ヘッダ行書き込み + csv_file.write(f"{','.join(head_str)}\n") writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, strict=True ) - # ヘッダ行書き込み - writer.writerow(head_str) # データ部分書き込み for record_data in record_csv: record_value = list(record_data.values()) From f5de058284bdaea1d842435f217500ee5a9e9b99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 09:45:58 +0900 Subject: [PATCH 086/103] =?UTF-8?q?=E4=BB=AE=E5=AE=8C=E6=88=90=EF=BC=93?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/ultmarc/export_vjsk_csv_process.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py index 27243e4a..a1b2ff23 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py @@ -243,12 +243,12 @@ def make_csv_data(record_inst_csv: list, record_pharm_cs: list): # Shift-JIS、CRLF、価囲いありで書き込む with open(csv_file_path, mode='w', encoding='cp932') as csv_file: + # ヘッダ行書き込み + csv_file.write(f"{','.join(head_str)}\n") writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, strict=True ) - # ヘッダ行書き込み - writer.writerow(head_str) # データ部分書き込み(施設) for record_inst_data in record_inst_csv: From 0c2b3d22f4cf7e2542c137989e66231c7a7bfd8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 10:45:31 +0900 Subject: [PATCH 087/103] =?UTF-8?q?pip=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/Pipfile.lock | 235 ++++++-------------------- 1 file changed, 55 insertions(+), 180 deletions(-) diff --git a/ecs/jskult-batch-monthly/Pipfile.lock b/ecs/jskult-batch-monthly/Pipfile.lock index 3e58b727..10d05022 100644 --- a/ecs/jskult-batch-monthly/Pipfile.lock +++ b/ecs/jskult-batch-monthly/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "0b1dbc40a5069476aa66f172175ae24ffae385c335ff8e4794c1b25a111b9e43" + "sha256": "a2be870e254760b62220c10400b05fa66d24b2cc1bcd6f21044735e320a62e53" }, "pipfile-spec": 6, "requires": { @@ -18,19 +18,19 @@ "default": { "boto3": { "hashes": [ - "sha256:7694df61bd6d253d6d9db34adbcd218b8efbe7f894a4a51611f7e0587ae33218", - "sha256:fe49f91e057b241b23a58c74c2f22654216788052ce95b73439fdb18bfd0e155" + "sha256:908f9c277325d68963dfcfce963a05336f0eb19505fc239c0ab9d01f4cba0296", + "sha256:e1e535e9fb23977252f13652ed2fa9b4f2d59a53b04a5f2fad3ee415b6a3b2b0" ], "index": "pypi", - "version": "==1.26.159" + "version": "==1.27.0" }, "botocore": { "hashes": [ - "sha256:86fe4641fd32dc6a5be4a289e00dc180448fc7bc37abac21bd624656985eef62", - "sha256:da1c61757d466b82cc89f379a50662064bcb0beb67cc6efa1fbfc9a341bd08b0" + "sha256:b9cb5b78a289f0615a48d85066f01869029aa41b95993f2c0c55003df037c23f", + "sha256:cac1333f41ec98e6f75bbba3f2c74b9e76aa3847469ecea6e7773a0af0049bee" ], "markers": "python_version >= '3.7'", - "version": "==1.29.159" + "version": "==1.30.0" }, "greenlet": { "hashes": [ @@ -107,11 +107,11 @@ }, "pymysql": { "hashes": [ - "sha256:766b72e4370aba94e6266a4dbd62c51fbc6a894c38de25a41a8a01f0461a2387", - "sha256:aade29b861e81a3c68a9e90d43f3db257940c0208983a0128b82f1a4cef639aa" + "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96", + "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7" ], "index": "pypi", - "version": "==1.1.0rc2" + "version": "==1.1.0" }, "python-dateutil": { "hashes": [ @@ -139,50 +139,50 @@ }, "sqlalchemy": { "hashes": [ - "sha256:0db6734cb5644c55d0262a813b764c6e2cda1e66e939a488b3d6298cdc7344c2", - "sha256:0e4645b260cfe375a0603aa117f0a47680864cf37833129da870919e88b08d8f", - "sha256:131f0c894c6572cb1bdcf97c92d999d3128c4ff1ca13061296057072f61afe13", - "sha256:1e2caba78e7d1f5003e88817b7a1754d4e58f4a8f956dc423bf8e304c568ab09", - "sha256:2de1477af7f48c633b8ecb88245aedd811dca88e88aee9e9d787b388abe74c44", - "sha256:2f3b6c31b915159b96b68372212fa77f69230b0a32acab40cf539d2823954f5a", - "sha256:3ef876615ff4b53e2033022195830ec4941a6e21068611f8d77de60203b90a98", - "sha256:43e69c8c1cea0188b7094e22fb93ae1a1890aac748628b7e925024a206f75368", - "sha256:53081c6fce0d49bb36d05f12dc87e008c9b0df58a163b792c5fc4ac638925f98", - "sha256:5a934eff1a2882137be3384826f997db8441d43b61fda3094923e69fffe474be", - "sha256:5e8522b49e0e640287308b68f71cc338446bbe1c226c8f81743baa91b0246e92", - "sha256:61f2035dea56ff1a429077e481496f813378beb02b823d2e3e7eb05bc1a7a8ca", - "sha256:63ea36c08792a7a8a08958bc806ecff6b491386feeaf14607c3d9d2d9325e67f", - "sha256:6e85e315725807c127ad8ba3d628fdb861cf9ebfb0e10c39a97c01e257cdd71b", - "sha256:7641f6ed2682de84d77c4894cf2e43700f3cf7a729361d7f9cac98febf3d8614", - "sha256:7be04dbe3470fe8dd332fdb48c979887c381ef6c635eddf2dec43d2766111be4", - "sha256:81d867c1be5abd49f7e547c108391f371a9d980ba7ec34666c50d683f782b754", - "sha256:8544c6e62eacb77d5106e2055ef10f2407fc0dbd547e879f8745b2032eefd2bc", - "sha256:8d3cbdb2f07fb0e4b897dc1df39166735e194fb946f28f26f4c9f9801c8b24f7", - "sha256:8d6ef848e5afcd1bda3e9a843751f845c0ca888b61e669237680e913d84ec206", - "sha256:8e2569dac4e3cb85365b91ab569d06a221e0e17e65ce59949d00c3958946282b", - "sha256:90d320fde566b864adbc19abb40ecb80f4e25d6f084639969bb972d5cca16858", - "sha256:91eb8f89fcce8f709f8a4d65d265bc48a80264ee14c7c9e955f3222f19b4b39c", - "sha256:a08a791c75d6154d46914d1e23bd81d9455f2950ec1de81f2723848c593d2c8b", - "sha256:a2e9f50a906d0b81292576a9fb458f8cace904c81a67088f4a2ca9ff2856f55d", - "sha256:a5a2856e12cf5f54301ddf043bcbf0552561d61555e1bcf348b63f42b8e1eec2", - "sha256:b2801f85c5c0293aa710f8aa5262c707a83c1c203962ae5a22b4d9095e71aa9d", - "sha256:b72f4e4def50414164a1d899f2ce4e782a029fad0ed5585981d1611e8ae29a74", - "sha256:bdaf89dd82f4a0e1b8b5ffc9cdc0c9551be6175f7eee5af6a838e92ba2e57100", - "sha256:c5e333b81fe10d14efebd4e9429b7bb865ed9463ca8bef07a7136dfa1fd4a37b", - "sha256:ce1fc3f64fd42d5f763d6b83651471f32920338a1ba107a3186211474861af57", - "sha256:d0c96592f54edd571e00ba6b1ed5df8263328ca1da9e78088c0ebc93c2e6562c", - "sha256:dc97238fa44be86971270943a0c21c19ce18b8d1596919048e57912e8abc02cc", - "sha256:e19546924f0cf2ec930d1faf318b7365e5827276410a513340f31a2b423e96a4", - "sha256:f2938edc512dd1fa48653e14c1655ab46144d4450f0e6b33da7acd8ba77fbfd7", - "sha256:f387b496a4c9474d8580195bb2660264a3f295a04d3a9d00f4fa15e9e597427e", - "sha256:f409f35a0330ab0cb18ece736b86d8b8233c64f4461fcb10993f67afc0ac7e5a", - "sha256:f662cf69484c59f8a3435902c40dfc34d86050bdb15e23d437074ce9f153306b", - "sha256:fbcc51fdbc89fafe4f4fe66f59372a8be88ded04de34ef438ab04f980beb12d4", - "sha256:fc1dae11bd5167f9eb53b3ccad24a79813004612141e76de21cf4c028dc30b34", - "sha256:ff6496ad5e9dc8baeb93a151cc2f599d01e5f8928a2aaf0b09a06428fdbaf553" + "sha256:04383f1e3452f6739084184e427e9d5cb4e68ddc765d52157bf5ef30d5eca14f", + "sha256:125f9f7e62ddf8b590c069729080ffe18b68a20d9882eb0947f72e06274601d7", + "sha256:1822620c89779b85f7c23d535c8e04b79c517739ae07aaed48c81e591ed5498e", + "sha256:21583808d37f126a647652c90332ac1d3a102edf3c94bcc3319edcc0ea2300cc", + "sha256:218fb20c01e95004f50a3062bf4c447dcb360cab8274232f31947e254f118298", + "sha256:2269b1f9b8be47e52b70936069a25a3771eff53367aa5cc59bb94f28a6412e13", + "sha256:234678ed6576531b8e4be255b980f20368bf07241a2e67b84e6b0fe679edb9c4", + "sha256:28da17059ecde53e2d10ba813d38db942b9f6344360b2958b25872d5cb729d35", + "sha256:2c6ff5767d954f6091113fedcaaf49cdec2197ae4c5301fe83d5ae4393c82f33", + "sha256:36a87e26fe8fa8c466fae461a8fcb780d0a1cbf8206900759fc6fe874475a3ce", + "sha256:394ac3adf3676fad76d4b8fcecddf747627f17f0738dc94bac15f303d05b03d4", + "sha256:40a3dc52b2b16f08b5c16b9ee7646329e4b3411e9280e5e8d57b19eaa51cbef4", + "sha256:48111d56afea5699bab72c38ec95561796b81befff9e13d1dd5ce251ab25f51d", + "sha256:48b40dc2895841ea89d89df9eb3ac69e2950a659db20a369acf4259f68e6dc1f", + "sha256:513411d73503a6fc5804f01fae3b3d44f267c1b3a06cfeac02e9286a7330e857", + "sha256:51736cfb607cf4e8fafb693906f9bc4e5ee55be0b096d44bd7f20cd8489b8571", + "sha256:5f40e3a7d0a464f1c8593f2991e5520b2f5b26da24e88000bbd4423f86103d4f", + "sha256:6150560fcffc6aee5ec9a97419ac768c7a9f56baf7a7eb59cb4b1b6a4d463ad9", + "sha256:724355973297bbe547f3eb98b46ade65a67a3d5a6303f17ab59a2dc6fb938943", + "sha256:74ddcafb6488f382854a7da851c404c394be3729bb3d91b02ad86c5458140eff", + "sha256:7830e01b02d440c27f2a5be68296e74ccb55e6a5b5962ffafd360b98930b2e5e", + "sha256:7f31d4e7ca1dd8ca5a27fd5eaa0f9e2732fe769ff7dd35bf7bba179597e4df07", + "sha256:8741d3d401383e54b2aada37cbd10f55c5d444b360eae3a82f74a2be568a7710", + "sha256:910d45bf3673f0e4ef13858674bd23cfdafdc8368b45b948bf511797dbbb401d", + "sha256:aa995b21f853864996e4056d9fde479bcecf8b7bff4beb3555eebbbba815f35d", + "sha256:af7e2ba75bf84b64adb331918188dda634689a2abb151bc1a583e488363fd2f8", + "sha256:b0eaf82cc844f6b46defe15ad243ea00d1e39ed3859df61130c263dc7204da6e", + "sha256:b114a16bc03dfe20b625062e456affd7b9938286e05a3f904a025b9aacc29dd4", + "sha256:b47be4c6281a86670ea5cfbbbe6c3a65366a8742f5bc8b986f790533c60b5ddb", + "sha256:ba03518e64d86f000dc24ab3d3a1aa876bcbaa8aa15662ac2df5e81537fa3394", + "sha256:cc9c2630c423ac4973492821b2969f5fe99d9736f3025da670095668fbfcd4d5", + "sha256:cf07ff9920cb3ca9d73525dfd4f36ddf9e1a83734ea8b4f724edfd9a2c6e82d9", + "sha256:cf175d26f6787cce30fe6c04303ca0aeeb0ad40eeb22e3391f24b32ec432a1e1", + "sha256:d0aeb3afaa19f187a70fa592fbe3c20a056b57662691fd3abf60f016aa5c1848", + "sha256:e186e9e95fb5d993b075c33fe4f38a22105f7ce11cecb5c17b5618181e356702", + "sha256:e2d5c3596254cf1a96474b98e7ce20041c74c008b0f101c1cb4f8261cb77c6d3", + "sha256:e3189432db2f5753b4fde1aa90a61c69976f4e7e31d1cf4611bfe3514ed07478", + "sha256:e3a6b2788f193756076061626679c5c5a6d600ddf8324f986bc72004c3e9d92e", + "sha256:ead58cae2a089eee1b0569060999cb5f2b2462109498a0937cc230a7556945a1", + "sha256:f2f389f77c68dc22cb51f026619291c4a38aeb4b7ecb5f998fd145b2d81ca513", + "sha256:f593170fc09c5abb1205a738290b39532f7380094dc151805009a07ae0e85330" ], "index": "pypi", - "version": "==2.0.16" + "version": "==2.0.17" }, "tenacity": { "hashes": [ @@ -194,11 +194,11 @@ }, "typing-extensions": { "hashes": [ - "sha256:16224afa8cc2b3679dd9e9a1efe719dd2e20a03f0cc2e4cc4c97870ae9622532", - "sha256:3c2c2cd887648efa0ea8f8ba4260a1213058e8e4a25a6a6f4e084740b2c858e2" + "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36", + "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2" ], "markers": "python_version >= '3.7'", - "version": "==4.7.0rc1" + "version": "==4.7.1" }, "urllib3": { "hashes": [ @@ -218,91 +218,6 @@ "index": "pypi", "version": "==2.0.2" }, - "colorama": { - "hashes": [ - "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", - "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" - ], - "markers": "sys_platform == 'win32'", - "version": "==0.4.6" - }, - "coverage": { - "extras": [ - "toml" - ], - "hashes": [ - "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", - "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2", - "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a", - "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", - "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", - "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6", - "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7", - "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f", - "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02", - "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c", - "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063", - "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", - "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5", - "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959", - "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", - "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", - "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", - "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9", - "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5", - "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f", - "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", - "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", - "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9", - "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f", - "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", - "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb", - "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1", - "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb", - "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250", - "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e", - "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", - "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5", - "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", - "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2", - "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", - "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", - "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", - "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", - "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9", - "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", - "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0", - "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9", - "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", - "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050", - "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d", - "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6", - "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353", - "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb", - "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e", - "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8", - "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495", - "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2", - "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd", - "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27", - "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1", - "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818", - "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", - "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e", - "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850", - "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3" - ], - "markers": "python_version >= '3.7'", - "version": "==7.2.7" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" - }, "flake8": { "hashes": [ "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7", @@ -311,14 +226,6 @@ "index": "pypi", "version": "==6.0.0" }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, "mccabe": { "hashes": [ "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", @@ -327,22 +234,6 @@ "markers": "python_version >= '3.6'", "version": "==0.7.0" }, - "packaging": { - "hashes": [ - "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", - "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1" - }, - "pluggy": { - "hashes": [ - "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", - "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3" - ], - "markers": "python_version >= '3.7'", - "version": "==1.2.0" - }, "pycodestyle": { "hashes": [ "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053", @@ -359,22 +250,6 @@ "markers": "python_version >= '3.6'", "version": "==3.0.1" }, - "pytest": { - "hashes": [ - "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295", - "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b" - ], - "index": "pypi", - "version": "==7.3.2" - }, - "pytest-cov": { - "hashes": [ - "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", - "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a" - ], - "index": "pypi", - "version": "==4.1.0" - }, "tomli": { "hashes": [ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", From 3fa372b5da8c56adc2a7f785374aad8c6f63d42a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 11:24:53 +0900 Subject: [PATCH 088/103] =?UTF-8?q?=E3=82=B3=E3=83=A1=E3=83=B3=E3=83=88?= =?UTF-8?q?=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/output_arisj_file_process.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py index 197db381..9d7d5587 100644 --- a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py +++ b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py @@ -281,10 +281,11 @@ def make_csv_data(record_csv: list): 'TC_PREFECTURE', 'TJ_PREFECTURE', 'TJ_ZIPCODE', 'TJ_CITY', 'TJ_ADDRESS', 'TJ_DEPARTMENT', 'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', 'TD_UPDATE'] - # Shift-JIS、CRLF、価囲いありで書き込む with open(csv_file_path, mode='w', encoding='cp932') as csv_file: - # ヘッダ行書き込み + # ヘッダ行書き込み(くくり文字をつけない為にwriteしています) csv_file.write(f"{','.join(head_str)}\n") + + # Shift-JIS、CRLF、価囲いありで書き込む writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, strict=True From 6df0c43e78a68ff53184b17c841ef8b52105e34c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 11:45:29 +0900 Subject: [PATCH 089/103] =?UTF-8?q?ultmarc=5Fprocess=E3=82=92=E4=BD=BF?= =?UTF-8?q?=E3=82=8F=E3=81=AA=E3=81=84=E3=82=88=E3=81=86=E3=81=AB=E3=81=97?= =?UTF-8?q?=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/ultmarc/export_vjsk_csv_process.py | 5 +++-- .../src/batch/ultmarc/ultmarc_process.py | 17 +---------------- ecs/jskult-batch-daily/src/jobctrl_daily.py | 6 +++++- 3 files changed, 9 insertions(+), 19 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py index a1b2ff23..ddd2a0ae 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py @@ -241,10 +241,11 @@ def make_csv_data(record_inst_csv: list, record_pharm_cs: list): 'FILLER_3', 'FILLER_4', 'FILLER_5', 'REGIST_DATE', 'CREATE_USER', 'UPDATE_DATE', 'UPDATE_USER', 'SYS_REGIST_DATE', 'REGIST_PRGM_ID', 'SYS_UPDATE_DATE', 'UPDATE_PRGM_ID'] - # Shift-JIS、CRLF、価囲いありで書き込む with open(csv_file_path, mode='w', encoding='cp932') as csv_file: - # ヘッダ行書き込み + # ヘッダ行書き込み(くくり文字を加えない為にwriteで出力する) csv_file.write(f"{','.join(head_str)}\n") + + # Shift-JIS、CRLF、価囲いありで書き込む writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, strict=True diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py index e0e6ba22..0e2ffe6a 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py @@ -5,7 +5,7 @@ from datetime import datetime from src.aws.s3 import UltmarcBucket from src.batch.common.batch_context import BatchContext from src.batch.ultmarc.datfile import DatFile -from src.batch.ultmarc import export_vjsk_csv_process + from src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory import \ UltmarcTableMapperFactory from src.db.database import Database @@ -61,21 +61,6 @@ def exec_import(): raise BatchOperationException(e) -def exec_export(): - """V実消化用施設データ作成処理""" - if not batch_context.is_ultmarc_imported: - logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') - return - - try: - logger.info('V実用消化施設データ作成処理: 開始') - export_vjsk_csv_process.exec() - logger.info('V実用消化施設データ作成処理: 終了') - except Exception as e: - raise BatchOperationException(e) - return - - def _import_to_ultmarc_table(dat_file: DatFile): db = Database.get_instance() try: diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 45e297e9..3b314b26 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -13,6 +13,7 @@ from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants +from src.batch.ultmarc import export_vjsk_csv_process logger = get_logger('日次処理コントロール') @@ -75,8 +76,11 @@ def exec(): logger.debug(f'{"アルトマーク取込が行われました。" if batch_context.is_ultmarc_imported else "アルトマーク取込が行われませんでした。"}') try: + if not batch_context.is_ultmarc_imported: + logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') + return logger.info('V実消化用施設データ作成処理:起動') - ultmarc_process.exec_export() + export_vjsk_csv_process.exec() logger.info('V実消化用施設データ作成処理:終了') except BatchOperationException as e: logger.exception(f'V実消化用施設データ作成処理エラー(異常終了){e}') From 819c1543f8c01b4d784b3da2548a7924f34e6a22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 13:54:11 +0900 Subject: [PATCH 090/103] =?UTF-8?q?=E6=8C=87=E6=91=98=E4=BA=8B=E9=A0=85?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-monthly/src/aws/s3.py | 7 +-- .../src/batch/output_arisj_file_process.py | 51 ++++++++++--------- 2 files changed, 28 insertions(+), 30 deletions(-) diff --git a/ecs/jskult-batch-monthly/src/aws/s3.py b/ecs/jskult-batch-monthly/src/aws/s3.py index 8804bba9..17ed0fe9 100644 --- a/ecs/jskult-batch-monthly/src/aws/s3.py +++ b/ecs/jskult-batch-monthly/src/aws/s3.py @@ -69,10 +69,7 @@ class ArisjBucket(S3Bucket): _bucket_name = environment.ARISJ_DATA_BUCKET _folder = environment.ARISJ_BACKUP_FOLDER - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def s3_arisj_csv_upload(self, arisj_create_csv: str, csv_file_path: str): + def upload_arisj_csv_file(self, arisj_create_csv: str, csv_file_path: str): # s3にCSVファイルをUPする Bucket = environment.ARISJ_DATA_BUCKET folder = environment.ARISJ_DATA_FOLDER @@ -81,7 +78,7 @@ class ArisjBucket(S3Bucket): s3_client.upload_file(csv_file_path, Bucket, csv_file_name) return - def backup_dat_file(self, dat_file_key: str, datetime_key: str): + def backup_arisj_csv_file(self, dat_file_key: str, datetime_key: str): # バックアップバケットにコピー arisj_backup_bucket = ArisjBackupBucket() folder = environment.ARISJ_DATA_FOLDER diff --git a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py index 9d7d5587..d3f81597 100644 --- a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py +++ b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py @@ -12,13 +12,14 @@ import csv logger = get_logger('ARIS-J連携データ出力') -create_date = datetime.now().strftime('%Y%m%d%H%M%S') -arisj_create_csv = f'D0004_ARIS_M_DCF_{create_date}.csv' sql_err_msg = "SQL実行エラーです。" def exec(): """ 実消化&アルトマーク月次バッチ """ + create_date = datetime.now().strftime('%Y%m%d%H%M%S') + arisj_csv_file_name = f'D0004_ARIS_M_DCF_{create_date}.csv' + try: logger.info('バッチ処理を開始しました。') @@ -34,30 +35,30 @@ def exec(): db.begin() # 正常系データの反映 - # 過去分は不要のため、デリート - physical_wk_inst_aris_if_delete(db) + # 前回保管した施設IFワークを削除する + delete_previous_wk_inst_aris_if_record(db) # 正常系データを取得しWKテーブルに保存する。 - wk_inst_aris_if_insert_into(db) + insert_normal_record_into_wk_inst_aris_if(db) # 正常系データの件数を取得 - suc_count = wk_inst_aris_if_count(db) + suc_count = count_wk_inst_aris_if_record(db) # 警告系データの反映 - # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 - physical_wk_inst_aris_if_wrn_delete(db) + # 前回保管した施設IF警告ワークを削除する + delete_previous_wk_inst_aris_if_wrn_record(db) # 異常系データを取得しWKテーブルに保存する。 - wk_inst_aris_if_wrn_insert_into(db) + insert_abnormal_record_into_wk_inst_aris_if_wrn(db) # 異常系データの件数を取得 - wrn_count = wk_inst_aris_if_wrn_count(db) + wrn_count = count_wk_inst_aris_if_wrn_record(db) # CSVファイルの作成用のSQL実行 record_csv = csv_data_select(db) # CSVファイル作成 - csv_file_path = make_csv_data(record_csv) + csv_file_path = make_csv_data(record_csv, arisj_csv_file_name) # トランザクションの終了 db.commit() @@ -66,18 +67,18 @@ def exec(): sum_count = suc_count + wrn_count logger.info(f'(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') + arisj_bucket = ArisjBucket() # CSVファイル移動処理 try: - ArisjBucket().s3_arisj_csv_upload(arisj_create_csv, csv_file_path) + arisj_bucket.upload_arisj_csv_file(arisj_csv_file_name, csv_file_path) except Exception as e: logger.info('S3バケットArisjへのCSVデータ、移動できませんでした。') raise e # 処理後ファイルをバックアップ try: - arisj_bucket = ArisjBucket() batch_context = BatchContext.get_instance() - arisj_bucket.backup_dat_file(arisj_create_csv, batch_context.syor_date) + arisj_bucket.backup_arisj_csv_file(arisj_csv_file_name, batch_context.syor_date) except Exception as e: logger.info('S3バケットArisjバックアップへCSVデータ、コピーできませんでした。') raise e @@ -93,8 +94,8 @@ def exec(): db.disconnect() -def physical_wk_inst_aris_if_delete(db): - # 過去分は不要のため、デリート +def delete_previous_wk_inst_aris_if_record(db): + # 前回保管した施設IFワークを削除する try: # WKテーブルの過去分削除SQL sql = """\ @@ -107,7 +108,7 @@ def physical_wk_inst_aris_if_delete(db): raise e -def wk_inst_aris_if_insert_into(db): +def insert_normal_record_into_wk_inst_aris_if(db): # 正常系データを取得しWKテーブルに保存する。 try: # 正常系データを取得しWKテーブルに保存SQL @@ -155,7 +156,7 @@ def wk_inst_aris_if_insert_into(db): raise e -def wk_inst_aris_if_count(db): +def count_wk_inst_aris_if_record(db): # 正常系データの件数を取得 try: # 正常系データの件数を取得SQL @@ -169,8 +170,8 @@ def wk_inst_aris_if_count(db): raise e -def physical_wk_inst_aris_if_wrn_delete(db): - # 過去分は不要のため、DWH.WK_INST_ARIS_IF_WRNをデリートする。 +def delete_previous_wk_inst_aris_if_wrn_record(db): + # 前回保管した施設IF警告ワークを削除する try: # 異常系WKテーブルの過去分削除SQL sql = """\ @@ -184,7 +185,7 @@ def physical_wk_inst_aris_if_wrn_delete(db): raise e -def wk_inst_aris_if_wrn_insert_into(db): +def insert_abnormal_record_into_wk_inst_aris_if_wrn(db): # 異常系データを取得しWKテーブルに保存する。 try: # 異常系データを取得しWKテーブルに保存SQL @@ -238,7 +239,7 @@ def wk_inst_aris_if_wrn_insert_into(db): raise e -def wk_inst_aris_if_wrn_count(db): +def count_wk_inst_aris_if_wrn_record(db): # 異常系データの件数を取得 try: # 異常系データの件数を取得SQL @@ -271,18 +272,18 @@ def csv_data_select(db): raise e -def make_csv_data(record_csv: list): +def make_csv_data(record_csv: list, arisj_csv_file_name: str): # 一時ファイルとして保存する(CSVファイル) try: temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, arisj_create_csv) + csv_file_path = path.join(temporary_dir, arisj_csv_file_name) head_str = ['TC_HOSPITAL', 'TJ_HOSPITAL', 'TJ_HOSPITALSHORT', 'TK_HOSPITAL', 'TC_PREFECTURE', 'TJ_PREFECTURE', 'TJ_ZIPCODE', 'TJ_CITY', 'TJ_ADDRESS', 'TJ_DEPARTMENT', 'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', 'TD_UPDATE'] with open(csv_file_path, mode='w', encoding='cp932') as csv_file: - # ヘッダ行書き込み(くくり文字をつけない為にwriteしています) + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) csv_file.write(f"{','.join(head_str)}\n") # Shift-JIS、CRLF、価囲いありで書き込む From 552f2475e3d96846d5973bf834a2cb6e08798270 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 14:10:41 +0900 Subject: [PATCH 091/103] =?UTF-8?q?=E3=83=8D=E3=83=BC=E3=83=9F=E3=83=B3?= =?UTF-8?q?=E3=82=B0=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...rt_vjsk_csv_process.py => vjsk_process.py} | 31 +++++++++---------- ecs/jskult-batch-daily/src/jobctrl_daily.py | 4 +-- 2 files changed, 17 insertions(+), 18 deletions(-) rename ecs/jskult-batch-daily/src/batch/ultmarc/{export_vjsk_csv_process.py => vjsk_process.py} (91%) diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/vjsk_process.py similarity index 91% rename from ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py rename to ecs/jskult-batch-daily/src/batch/ultmarc/vjsk_process.py index ddd2a0ae..9c0b83a1 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/export_vjsk_csv_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/vjsk_process.py @@ -1,6 +1,6 @@ """アルトマークデータ処理""" -from src.aws.s3 import UltmarcBucket, VjskBucket +from src.aws.s3 import VjskBucket from src.batch.common.batch_context import BatchContext from src.db.database import Database @@ -10,14 +10,13 @@ import os.path as path import csv logger = get_logger('V実用消化施設データ作成処理') -ultmarc_bucket = UltmarcBucket() -batch_context = BatchContext.get_instance() sql_err_msg = "SQL実行エラーです。" -vjsk_csv_file_name = 'ComInst.csv' def exec(): + vjsk_csv_file_name = 'ComInst.csv' + db = Database.get_instance() try: logger.info('バッチ処理を開始しました。') @@ -32,11 +31,11 @@ def exec(): raise e # CSVファイルの作成用のSQL実行(施設) - record_inst_csv = csv_data_inst_select(db) + record_inst = select_inst_record(db) # CSVファイルの作成用のSQL実行(薬局) - record_pharm_csv = csv_data_pharm_select(db) + record_pharm = select_pharm_record(db) # CSVファイル作成 - csv_file_path = make_csv_data(record_inst_csv, record_pharm_csv) + csv_file_path = make_csv_data(record_inst, record_pharm, vjsk_csv_file_name) vjsk_bucket = VjskBucket() try: @@ -54,7 +53,7 @@ def exec(): logger.info('バックアップバケットへCSVデータをコピーできませんでした。') raise e - csv_count = len(record_inst_csv) + len(record_pharm_csv) + csv_count = len(record_inst) + len(record_pharm) logger.info(f'CSV出力件数: {csv_count}。') logger.info('バッチ処理を正常に終了しました。') except Exception as e: @@ -66,8 +65,8 @@ def exec(): return -def csv_data_inst_select(db): - # CSVファイルの作成用のSQL実行(施設) +def select_inst_record(db): + # CSVファイル作成用のSQL実行(施設) try: # 施設テーブル検索SQL sql = """\ @@ -144,8 +143,8 @@ def csv_data_inst_select(db): raise e -def csv_data_pharm_select(db): - # CSVファイルの作成用のSQL実行(薬局) +def select_pharm_record(db): + # CSVファイル作成用のSQL実行(薬局) try: # 薬局テーブル検索SQL sql = """\ @@ -222,7 +221,7 @@ def csv_data_pharm_select(db): raise e -def make_csv_data(record_inst_csv: list, record_pharm_cs: list): +def make_csv_data(record_inst: list, record_pharm: list, vjsk_csv_file_name: str): # 一時ファイルとして保存する(CSVファイル) try: @@ -242,7 +241,7 @@ def make_csv_data(record_inst_csv: list, record_pharm_cs: list): 'REGIST_PRGM_ID', 'SYS_UPDATE_DATE', 'UPDATE_PRGM_ID'] with open(csv_file_path, mode='w', encoding='cp932') as csv_file: - # ヘッダ行書き込み(くくり文字を加えない為にwriteで出力する) + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) csv_file.write(f"{','.join(head_str)}\n") # Shift-JIS、CRLF、価囲いありで書き込む @@ -252,13 +251,13 @@ def make_csv_data(record_inst_csv: list, record_pharm_cs: list): ) # データ部分書き込み(施設) - for record_inst_data in record_inst_csv: + for record_inst_data in record_inst: record_inst_value = list(record_inst_data.values()) csv_data = ['' if n is None else n for n in record_inst_value] writer.writerow(csv_data) # データ部分書き込み(薬局) - for record_pharm_data in record_pharm_cs: + for record_pharm_data in record_pharm: record_pharm_value = list(record_pharm_data.values()) csv_data = ['' if n is None else n for n in record_pharm_value] writer.writerow(csv_data) diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 3b314b26..0e92bd1b 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -13,7 +13,7 @@ from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants -from src.batch.ultmarc import export_vjsk_csv_process +from src.batch.ultmarc import vjsk_process logger = get_logger('日次処理コントロール') @@ -80,7 +80,7 @@ def exec(): logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') return logger.info('V実消化用施設データ作成処理:起動') - export_vjsk_csv_process.exec() + vjsk_process.exec() logger.info('V実消化用施設データ作成処理:終了') except BatchOperationException as e: logger.exception(f'V実消化用施設データ作成処理エラー(異常終了){e}') From c1a0f9a11e0cafa9ffe75886e633d1c11a933bef Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 16:29:04 +0900 Subject: [PATCH 092/103] =?UTF-8?q?=E4=BB=96=E3=82=BF=E3=82=B9=E3=82=AF?= =?UTF-8?q?=E3=81=AB=E5=BD=B1=E9=9F=BF=E3=81=8C=E3=81=82=E3=82=8B=E3=81=AE?= =?UTF-8?q?=E3=81=A7develop=E3=83=9E=E3=83=BC=E3=82=B8=E3=82=92=E5=84=AA?= =?UTF-8?q?=E5=85=88=E3=81=99=E3=82=8B=E3=81=9F=E3=82=81=E3=80=81=E6=9C=AA?= =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=81=AE=E3=83=AD=E3=82=B8=E3=83=83?= =?UTF-8?q?=E3=82=AF=E3=82=92=E4=B8=80=E6=97=A6=E3=82=B3=E3=83=A1=E3=83=B3?= =?UTF-8?q?=E3=83=88=E3=82=A2=E3=82=A6=E3=83=88=E3=81=97=E3=81=A6=E5=8B=95?= =?UTF-8?q?=E4=BD=9C=E3=81=AB=E5=BD=B1=E9=9F=BF=E3=81=97=E3=81=AA=E3=81=84?= =?UTF-8?q?=E3=82=88=E3=81=86=E3=81=AB=E3=81=99=E3=82=8B=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 3ef87186..cef4e1ec 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -88,11 +88,13 @@ class VjskDataLoadManager: local_file_name = target["src_file_path"] # tsvファイル末尾行のTABの数が総定数と一致しない場合は例外をスロー - tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) - expect_tabs = mapper.get_file_column_separators(target["condkey"]) - if tsv_tabs != expect_tabs: - msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" - raise BatchOperationException(msg) + # TODO: ↓↓↓developへのマージを優先させたいので、未テストのロジックはコメントアウトする + # tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) + # expect_tabs = mapper.get_file_column_separators(target["condkey"]) + # if tsv_tabs != expect_tabs: + # msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" + # raise BatchOperationException(msg) + # TODO: ↑↑↑developへのマージを優先させたいので、未テストのロジックはコメントアウトする # データベース登録 self._import_to_db(local_file_name, target["condkey"]) From 449dedd3a5d5ce3a6caa99df3f0e19484c8b393f Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 16:30:35 +0900 Subject: [PATCH 093/103] =?UTF-8?q?fix:=20=E3=83=AD=E3=82=B0=E3=82=A4?= =?UTF-8?q?=E3=83=B3=E6=99=82=E3=81=AE=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AF?= =?UTF-8?q?ERROR=E3=83=AD=E3=82=B0=E3=81=A7=E5=87=BA=E3=81=95=E3=81=AA?= =?UTF-8?q?=E3=81=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 5659c993..c8a5663c 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -130,7 +130,7 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(f'SSOログイン失敗:{e}') + logger.info(f'SSOログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) # トークンからユーザーIDを取得 From afd5db75e5e2bac621e0925d35869357cf2eb08c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 16:32:49 +0900 Subject: [PATCH 094/103] =?UTF-8?q?=E6=8C=87=E6=91=98=E4=BA=8B=E9=A0=85?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...cess.py => output_vjsk_inst_pharm_data.py} | 69 ++++++++++--------- ecs/jskult-batch-daily/src/jobctrl_daily.py | 7 +- 2 files changed, 38 insertions(+), 38 deletions(-) rename ecs/jskult-batch-daily/src/batch/ultmarc/{vjsk_process.py => output_vjsk_inst_pharm_data.py} (84%) diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/vjsk_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py similarity index 84% rename from ecs/jskult-batch-daily/src/batch/ultmarc/vjsk_process.py rename to ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py index 9c0b83a1..1fb338b7 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/vjsk_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py @@ -1,4 +1,4 @@ -"""アルトマークデータ処理""" +"""output_vjsk_inst_pharm_data""" from src.aws.s3 import VjskBucket from src.batch.common.batch_context import BatchContext @@ -9,7 +9,7 @@ import tempfile import os.path as path import csv -logger = get_logger('V実用消化施設データ作成処理') +logger = get_logger('V実消化用施設データ作成処理') sql_err_msg = "SQL実行エラーです。" @@ -17,15 +17,20 @@ sql_err_msg = "SQL実行エラーです。" def exec(): vjsk_csv_file_name = 'ComInst.csv' + # バッチ共通設定を取得 + batch_context = BatchContext.get_instance() + + if not batch_context.is_ultmarc_imported: + logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') + return + db = Database.get_instance() try: - logger.info('バッチ処理を開始しました。') + logger.info('処理開始') try: # DB接続 db.connect() - # ファイル単位でトランザクションを行う - db.begin() except Exception as e: logger.info('DB接続エラーです。') raise e @@ -42,7 +47,7 @@ def exec(): # s3へデータ移動 vjsk_bucket.upload_dat_file(vjsk_csv_file_name, csv_file_path) except Exception as e: - logger.info('S3バケットDWHへCSVデータを作成できませんでした。') + logger.info('S3バケットにCSVデータを作成できませんでした。') raise e try: @@ -54,13 +59,11 @@ def exec(): raise e csv_count = len(record_inst) + len(record_pharm) - logger.info(f'CSV出力件数: {csv_count}。') - logger.info('バッチ処理を正常に終了しました。') + logger.info(f'CSV出力件数: {csv_count}') + logger.info('正常終了') except Exception as e: raise e finally: - # 終了時に必ずコミットする - db.commit() db.disconnect() return @@ -155,14 +158,14 @@ def select_pharm_record(db): inst_name_kana, form_inst_name_kanji, inst_name_kanji, - '', - '', + '' AS rltd_univ_prnt_cd, + '' AS bed_num, close_flg, estab_sche_flg, close_start_ym, estab_sche_ym, - '', - '', + '' AS ward_abolish_flg, + '' AS inst_repre_cd, inst_repre_kana, inst_repre, phone_number_non_flg, @@ -179,24 +182,24 @@ def select_pharm_record(db): addr_cnt, manage_cd, delete_sche_reason_cd, - '', + '' AS hp_assrt_cd, dup_opp_cd, - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', - '', + '' AS insp_item_micrb, + '' AS insp_item_serum, + '' AS insp_item_blood, + '' AS insp_item_patho, + '' AS insp_item_paras, + '' AS insp_item_biochem, + '' AS insp_item_ri, + '' AS re_exam_cd, + '' AS prmit_bed_num_other, + '' AS prmit_bed_num_mental, + '' AS prmit_bed_num_tuber, + '' AS prmit_bed_num_infection, + '' AS prmit_bed_num_sum, + '' AS prmit_bed_num_gen, + '' AS prmit_bed_num_rcup, + '' AS prmit_bed_maint_ymd, inst_pharm_div, abolish_ymd, delete_flg, @@ -240,7 +243,7 @@ def make_csv_data(record_inst: list, record_pharm: list, vjsk_csv_file_name: str 'FILLER_3', 'FILLER_4', 'FILLER_5', 'REGIST_DATE', 'CREATE_USER', 'UPDATE_DATE', 'UPDATE_USER', 'SYS_REGIST_DATE', 'REGIST_PRGM_ID', 'SYS_UPDATE_DATE', 'UPDATE_PRGM_ID'] - with open(csv_file_path, mode='w', encoding='cp932') as csv_file: + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) csv_file.write(f"{','.join(head_str)}\n") @@ -264,7 +267,7 @@ def make_csv_data(record_inst: list, record_pharm: list, vjsk_csv_file_name: str except Exception as e: logger.info('ワークデータの作成に失敗しました。') - logger.info('バッチ処理を異常終了しました。') + logger.info('CSVデータの作成に失敗しました。') raise e return csv_file_path diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 0e92bd1b..e6751a47 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -13,7 +13,7 @@ from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants -from src.batch.ultmarc import vjsk_process +from src.batch.ultmarc import output_vjsk_inst_pharm_data logger = get_logger('日次処理コントロール') @@ -76,11 +76,8 @@ def exec(): logger.debug(f'{"アルトマーク取込が行われました。" if batch_context.is_ultmarc_imported else "アルトマーク取込が行われませんでした。"}') try: - if not batch_context.is_ultmarc_imported: - logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') - return logger.info('V実消化用施設データ作成処理:起動') - vjsk_process.exec() + output_vjsk_inst_pharm_data.exec() logger.info('V実消化用施設データ作成処理:終了') except BatchOperationException as e: logger.exception(f'V実消化用施設データ作成処理エラー(異常終了){e}') From ac5ee0e20bc8bc8bfa5567532f7bff5159ba3793 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 17:04:54 +0900 Subject: [PATCH 095/103] =?UTF-8?q?feat:=20=E3=81=82=E3=82=8B=E3=81=A8?= =?UTF-8?q?=E3=83=9E=E3=83=BC=E3=82=AF=E3=81=AE=E3=83=89=E3=83=AD=E3=83=83?= =?UTF-8?q?=E3=83=97=E3=83=80=E3=82=A6=E3=83=B3=E3=81=AE=E9=AB=98=E3=81=95?= =?UTF-8?q?=E3=82=92=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/static/css/ultStyle.css | 1 + 1 file changed, 1 insertion(+) diff --git a/ecs/jskult-webapp/src/static/css/ultStyle.css b/ecs/jskult-webapp/src/static/css/ultStyle.css index e39fa143..c31553f1 100644 --- a/ecs/jskult-webapp/src/static/css/ultStyle.css +++ b/ecs/jskult-webapp/src/static/css/ultStyle.css @@ -32,6 +32,7 @@ table{ .search_dropdown{ width: 100%; + height: 1.8em; } .search_longtextbox{ From a30577289111705726e673a0b631fff1e527af3d Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 18:03:03 +0900 Subject: [PATCH 096/103] =?UTF-8?q?feat:=20=E3=83=9A=E3=83=BC=E3=82=B8?= =?UTF-8?q?=E3=83=8D=E3=83=BC=E3=82=B7=E3=83=A7=E3=83=B3=E3=81=AE=E3=83=87?= =?UTF-8?q?=E3=82=B6=E3=82=A4=E3=83=B3=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/static/css/pagenation.css | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/ecs/jskult-webapp/src/static/css/pagenation.css b/ecs/jskult-webapp/src/static/css/pagenation.css index 3edbf3b2..72e4aecc 100644 --- a/ecs/jskult-webapp/src/static/css/pagenation.css +++ b/ecs/jskult-webapp/src/static/css/pagenation.css @@ -3,10 +3,6 @@ padding-top: 10px; } -/* .paginationjs { - width: 100%; -} */ - .paginationjs > .paginationjs-nav.J-paginationjs-nav{ position: absolute; right: 0; @@ -19,10 +15,12 @@ div.paginationjs-pages ul { } .paginationjs-pages > ul > li > a { - padding: 6px 18px; + padding: 6px 9px; color: white; - background-color: gainsboro; - border: 1px solid; + background-color: whitesmoke; + border: 1px solid #bbb; + border-radius: 3px; + box-shadow: 0 1px 2px rgba(0, 0, 0, 0.2) } .paginationjs-pages > ul > li > a:hover { color: black; @@ -31,27 +29,38 @@ div.paginationjs-pages ul { } .paginationjs-pages > ul > li.active > a { color: white; - background-color: gray; + background-color: #666666; + box-shadow: none; } .paginationjs-pages > ul > li.active > a:hover { color: white; - background-color: gray; + background-color: #666666; cursor: text; } .paginationjs-pages > ul > li.disabled > a { color: white; - background-color: gray; - cursor: text; + background-color: #666666; + box-shadow: none; } .paginationjs-pages > ul > li.disabled > a:hover { color: white; - background-color: gray; + background-color: #666666; cursor: text; } -.paginationjs-page { +.paginationjs-page,.paginationjs-prev,.paginationjs-next { + text-decoration: underline; margin: 0 4px; } + +.paginationjs-page:hover,.paginationjs-prev:hover,.paginationjs-next:hover { + text-decoration: none; +} + +.paginationjs-page.active,.paginationjs-prev.disabled,.paginationjs-next.disabled { + text-decoration: none; +} + .paginationjs-pages > ul { display: flex; align-items: baseline; From 518c84695e59af7fecfdf50f9244b53834809065 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Wed, 5 Jul 2023 18:05:44 +0900 Subject: [PATCH 097/103] =?UTF-8?q?=E5=90=8D=E7=A7=B0=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 44 +++++++++---------- .../ultmarc/output_vjsk_inst_pharm_data.py | 8 ++-- 2 files changed, 24 insertions(+), 28 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 8cf6c3cf..86ffc169 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -1,4 +1,3 @@ -import io import os import os.path as path import tarfile @@ -80,28 +79,6 @@ class UltmarcBucket(S3Bucket): self._s3_client.delete_file(self._bucket_name, dat_file_key) -class VjskBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _folder = environment.VJSK_DATA_SEND_FOLDER - - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def upload_dat_file(self, vjsk_create_csv: str, csv_file_path: str): - # S3バケットにファイルを移動 - csv_file_name = f'{self._folder}/{vjsk_create_csv}' - s3_client = S3Client() - s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) - return - - def backup_dat_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - vjsk_backup_bucket = VjskBackupBucket() - dat_key = f'{self._folder}/{dat_file_key}' - backup_key = f'{vjsk_backup_bucket._folder}/{self._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) - - class ConfigBucket(S3Bucket): _bucket_name = environment.JSKULT_CONFIG_BUCKET @@ -139,7 +116,7 @@ class VjskBackupBucket(JskUltBackupBucket): class VjskReceiveBucket(S3Bucket): - _bucket_name = environment.JSKULT_DATA_BUCKET + _bucket_name = environment.VJSK_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV _s3_file_list = None @@ -176,3 +153,22 @@ class VjskReceiveBucket(S3Bucket): self._s3_client.copy(self._bucket_name, backup_from_file_path, jskult_backup_bucket._bucket_name, backup_key) self._s3_client.delete_file(self._bucket_name, backup_from_file_path) + + +class VjskSendBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _send_folder = environment.VJSK_DATA_SEND_FOLDER + + def upload_vjsk_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + # S3バケットにファイルを移動 + csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + return + + def backup_vjsk_csv_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + vjsk_backup_bucket = VjskBackupBucket() + dat_key = f'{self._send_folder}/{dat_file_key}' + backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py index 1fb338b7..f72e8d92 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py @@ -1,6 +1,6 @@ """output_vjsk_inst_pharm_data""" -from src.aws.s3 import VjskBucket +from src.aws.s3 import VjskSendBucket from src.batch.common.batch_context import BatchContext from src.db.database import Database @@ -42,10 +42,10 @@ def exec(): # CSVファイル作成 csv_file_path = make_csv_data(record_inst, record_pharm, vjsk_csv_file_name) - vjsk_bucket = VjskBucket() + vjsk_bucket = VjskSendBucket() try: # s3へデータ移動 - vjsk_bucket.upload_dat_file(vjsk_csv_file_name, csv_file_path) + vjsk_bucket.upload_vjsk_csv_file(vjsk_csv_file_name, csv_file_path) except Exception as e: logger.info('S3バケットにCSVデータを作成できませんでした。') raise e @@ -53,7 +53,7 @@ def exec(): try: # 処理後ファイルをバックアップ batch_context = BatchContext.get_instance() - vjsk_bucket.backup_dat_file(vjsk_csv_file_name, batch_context.syor_date) + vjsk_bucket.backup_vjsk_csv_file(vjsk_csv_file_name, batch_context.syor_date) except Exception as e: logger.info('バックアップバケットへCSVデータをコピーできませんでした。') raise e From 4427e9362e2c80681ff8fdda5e0fb4fb847f5488 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 18:16:08 +0900 Subject: [PATCH 098/103] =?UTF-8?q?fix:=20=E4=B8=8D=E8=A6=81=E3=81=AAJS?= =?UTF-8?q?=E9=96=A2=E6=95=B0=E3=82=92=E6=95=B4=E7=90=86=E3=81=97=E3=81=9F?= =?UTF-8?q?=E3=81=AA=E3=81=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../static/function/businessLogicScript.js | 72 ------------------- .../src/templates/docSearch.html | 16 +++-- .../src/templates/instSearch.html | 16 +++-- 3 files changed, 22 insertions(+), 82 deletions(-) diff --git a/ecs/jskult-webapp/src/static/function/businessLogicScript.js b/ecs/jskult-webapp/src/static/function/businessLogicScript.js index 36171aad..f410d5ab 100644 --- a/ecs/jskult-webapp/src/static/function/businessLogicScript.js +++ b/ecs/jskult-webapp/src/static/function/businessLogicScript.js @@ -128,28 +128,6 @@ function autoModifyDate($this){ $this.value = strFormat; } -// 他のページで共通化しよう -// ページが読み込まれたときにsendクラスのボタンを押せないようにする -// 初期値をdisabledにしときゃいい -$(function(){ - $(".send").prop('disabled',true); -}); - -// 検索結果のところのボタンをチェックが1個でも付いたら押せるようにして、チェックがなければ押せないようにする関数 -// 条件:チェックボックスのクラス名に"selectedページ数"というのがついていること -// 条件:ボタンにクラス名 send がついていること -function resultBtDisablead(){ - var selected = ".selected" + tableCurrentPage; - var cnt1 = $(selected + ' :checked').length; - selected += " input.checkbox"; - - if(cnt1 == 0) { - $(".send").prop('disabled',true); - } - else { - $(".send").prop('disabled',false); - } -} // 前のスペースを許さない入力チェック function checkSpaceForm($this) @@ -186,13 +164,6 @@ function checkPassForm($this) $this.value=str; } -// 廃止予定 -function DisplayErrorDialog(strMesssage) { - $("#errorTxt").html(strMesssage); - $("#error").dialog("open"); -} - -/* ult.jsから移植 */ // チェックボックス全選択関数 // 条件:チェックボックスのクラス名に"selected"というのがついていること // 条件:ボタンにクラス名 send がついていること @@ -208,46 +179,3 @@ function allOff(){ $(".selected").prop("checked", false); $(".send").prop('disabled',true); } - -// 検索結果のところのボタンをチェックが1個でも付いたら押せるようにして、チェックがなければ押せないようにする関数 -// 条件:チェックボックスのクラス名に"selected"というのがついていること -// 条件:ボタンにクラス名 send がついていること -function resultBtDisablead(){ - var cnt1 = $('.checkNum input:checkbox:checked').length; - console.log(cnt1); - if(cnt1 == 0) { - $(".send").prop('disabled',true); - } - else { - $(".send").prop('disabled',false); - } -} - -// Enter押下時にsubmitさせなくする -$(function() { - $(document).on("keypress", "input:not(.allow_submit)", function(event) { - return event.which !== 13; - }); -}); - -// 数字-以外を許さない入力チェック -function checkNumberForm($this) -{ - var str=$this.value; - while(str.match(/[^\d\-]/)) - { - str=str.replace(/[^\d\-]/,""); - } - $this.value=str; -} - -// 数字以外を許さない入力チェック -function checkNumberOnlyForm($this) -{ - var str=$this.value; - while(str.match(/[^\d]/)) - { - str=str.replace(/[^\d]/,""); - } - $this.value=str; -} \ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/docSearch.html b/ecs/jskult-webapp/src/templates/docSearch.html index 365e2745..292dd409 100644 --- a/ecs/jskult-webapp/src/templates/docSearch.html +++ b/ecs/jskult-webapp/src/templates/docSearch.html @@ -12,6 +12,12 @@ FixedMidashi.create(); // ボタン、テキストボックス初期化 formBtDisabled(); + // Enter押下時にsubmitさせなくする + $(function() { + $(document).on("keypress", "input:not(.allow_submit)", function(event) { + return event.which !== 13; + }); + }); } @@ -112,8 +118,8 @@
- - + + @@ -231,7 +237,7 @@ return `
-
${td} @@ -241,8 +247,8 @@ }) } - // チェックボックスのチェックされている場合、施設情報ボタンを活性化させる - function resultBtDisablead(){ + // チェックボックスのチェックされている場合、医師情報ボタンを活性化させる + function resultBtDisabled(){ var checkboxes = $('input[name="data"]:checked').length; if(checkboxes == 0) { $(".info_bt").prop('disabled',true); diff --git a/ecs/jskult-webapp/src/templates/instSearch.html b/ecs/jskult-webapp/src/templates/instSearch.html index 361977ae..8a866777 100644 --- a/ecs/jskult-webapp/src/templates/instSearch.html +++ b/ecs/jskult-webapp/src/templates/instSearch.html @@ -12,6 +12,12 @@ FixedMidashi.create(); // ボタン、テキストボックス初期化 formBtDisabled(); + // Enter押下時にsubmitさせなくする + $(function() { + $(document).on("keypress", "input:not(.allow_submit)", function(event) { + return event.which !== 13; + }); + }); } @@ -112,8 +118,8 @@ - - + + @@ -228,7 +234,7 @@ return `
-
${td} @@ -238,7 +244,7 @@ } // チェックボックスのチェックされている場合、施設情報ボタンを活性化させる - function resultBtDisablead(){ + function resultBtDisabled(){ var checkboxes = $('input[name="data"]:checked').length; if(checkboxes == 0) { $(".info_bt").prop('disabled',true); @@ -248,7 +254,7 @@ } } - // // 検索結果のうち、チェックされている行のデータを非表示項目に詰め込む + // 検索結果のうち、チェックされている行のデータを非表示項目に詰め込む function CheckBoxListProcessing() { let vals = []; // 配列を定義 From 5595717d0c47f2ca951b37deb6efb9f9b98ef59f Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 19:06:25 +0900 Subject: [PATCH 099/103] =?UTF-8?q?style:=20=E3=83=9A=E3=83=BC=E3=82=B8?= =?UTF-8?q?=E3=83=8D=E3=83=BC=E3=82=B7=E3=83=A7=E3=83=B3=E3=81=A8=E3=83=AA?= =?UTF-8?q?=E3=82=B9=E3=83=88=E3=81=AE=E9=96=93=E3=82=92=E9=96=8B=E3=81=91?= =?UTF-8?q?=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/static/css/bioStyle.css | 6 ---- ecs/jskult-webapp/src/static/css/ultStyle.css | 29 ------------------- .../src/templates/docSearch.html | 2 +- 3 files changed, 1 insertion(+), 36 deletions(-) diff --git a/ecs/jskult-webapp/src/static/css/bioStyle.css b/ecs/jskult-webapp/src/static/css/bioStyle.css index c0ab2ca5..26bf416e 100644 --- a/ecs/jskult-webapp/src/static/css/bioStyle.css +++ b/ecs/jskult-webapp/src/static/css/bioStyle.css @@ -113,12 +113,6 @@ table{ width : 450px; } -.docSearchScroll_div { - overflow: auto; - height: 200px; - width: 1132px; -} - .transition{ text-align: right; margin-right: 60px; diff --git a/ecs/jskult-webapp/src/static/css/ultStyle.css b/ecs/jskult-webapp/src/static/css/ultStyle.css index c31553f1..af29f38a 100644 --- a/ecs/jskult-webapp/src/static/css/ultStyle.css +++ b/ecs/jskult-webapp/src/static/css/ultStyle.css @@ -403,29 +403,6 @@ table{ width: 100%; } -.docSearchScroll{ - overflow: auto; - white-space: nowrap; - margin-bottom: 2%; - width: 100%; - height: 270px; -} - -.docSearchScroll::-webkit-scrollbar { - height: 5px; - width: 10px; -} - -.docSearchScroll::-webkit-scrollbar-track { - border-radius: 5px; - background: #eee; -} - -.docSearchScroll::-webkit-scrollbar-thumb { - border-radius: 5px; - background: #666; -} - .allOnOffButton{ width: 6%; } @@ -558,12 +535,6 @@ table{ width : 450px; } -.docSearchScroll_div { - overflow: auto; - height: 200px; - width: 1132px; -} - /*共通:施設詳細、医師詳細*/ .transition{ text-align: right; diff --git a/ecs/jskult-webapp/src/templates/docSearch.html b/ecs/jskult-webapp/src/templates/docSearch.html index 292dd409..21da4a9c 100644 --- a/ecs/jskult-webapp/src/templates/docSearch.html +++ b/ecs/jskult-webapp/src/templates/docSearch.html @@ -127,7 +127,7 @@ -
+
From a114aacf9510e75fce2fd2bf4e21adaaa1d89cf2 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 6 Jul 2023 10:41:16 +0900 Subject: [PATCH 100/103] =?UTF-8?q?feat:=20=E3=83=AA=E3=83=9D=E3=82=B8?= =?UTF-8?q?=E3=83=88=E3=83=AA=E3=82=AF=E3=83=A9=E3=82=B9=E3=81=AE=E3=82=A8?= =?UTF-8?q?=E3=83=A9=E3=83=BC=E3=83=AD=E3=82=B0=E5=87=BA=E5=8A=9B=E3=82=92?= =?UTF-8?q?logger=E3=81=AB=E7=B5=B1=E4=B8=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/model/db/ultmarc_trt_course.py | 2 +- .../model/view/ultmarc_doctor_info_view_model.py | 4 ++-- .../src/repositories/inst_master_repository.py | 7 ++++--- .../src/repositories/prefc_master_repository.py | 7 ++++--- .../repositories/ultmarc_doctor_repository.py | 16 +++++++++------- .../ultmarc_dr_wrkplace_his_repository.py | 6 ++++-- .../ultmarc_dr_wrkplace_repository.py | 9 +++++---- .../src/repositories/ultmarc_inst_repository.py | 14 ++++++++------ .../ultmarc_inst_trt_course_repository.py | 6 ++++-- .../repositories/ultmarc_sosiety_repository.py | 6 ++++-- .../ultmarc_specialist_license_repository.py | 6 ++++-- .../ultmarc_trt_course_repository.py | 14 ++++++++------ .../src/services/ultmarc_view_service.py | 6 +++--- 13 files changed, 60 insertions(+), 43 deletions(-) diff --git a/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py b/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py index 788d9748..12ced11c 100644 --- a/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py +++ b/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py @@ -5,5 +5,5 @@ from src.util.sanitize import sanitize @sanitize -class UltmarcTrtCourseDBModel(BaseDBModel): +class UltmarcDrTrtCourseDBModel(BaseDBModel): trt_course_name: Optional[str] diff --git a/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py b/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py index 5de18e11..f517e4a3 100644 --- a/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py +++ b/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py @@ -10,7 +10,7 @@ from src.model.db.ultmarc_doctor_wrkplace_his import \ from src.model.db.ultmarc_sosiety import UltmarcSosietyDBModel from src.model.db.ultmarc_specialist_license import \ UltmarcSpecialistLicenseDBModel -from src.model.db.ultmarc_trt_course import UltmarcTrtCourseDBModel +from src.model.db.ultmarc_trt_course import UltmarcDrTrtCourseDBModel from src.system_var import environment @@ -18,7 +18,7 @@ class UltmarcDoctorInfoViewModel(BaseModel): subtitle: str = '医師情報' is_batch_processing: Optional[bool] doctor_info_data: Optional[UltmarcDoctorInfoDBModel] - trt_coursed_data: Optional[list[UltmarcTrtCourseDBModel]] + trt_coursed_data: Optional[list[UltmarcDrTrtCourseDBModel]] sosiety_data: Optional[list[UltmarcSosietyDBModel]] specialist_license_data: Optional[list[UltmarcSpecialistLicenseDBModel]] doctor_wrkplace_data: Optional[list[UltmarcDoctorWrkplaceDBModel]] diff --git a/ecs/jskult-webapp/src/repositories/inst_master_repository.py b/ecs/jskult-webapp/src/repositories/inst_master_repository.py index d4b18505..4de6b732 100644 --- a/ecs/jskult-webapp/src/repositories/inst_master_repository.py +++ b/ecs/jskult-webapp/src/repositories/inst_master_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.inst_div_master import InstDivMasterModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_施設区分取得') + class InstDivMasterRepository(BaseRepository): @@ -21,9 +24,7 @@ class InstDivMasterRepository(BaseRepository): models = [InstDivMasterModel(**r) for r in result_data] return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] getOroshiData DB Error. ") - print(f"[ERROR] ErrorMessage: {e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/prefc_master_repository.py b/ecs/jskult-webapp/src/repositories/prefc_master_repository.py index c304f59f..e83e5d78 100644 --- a/ecs/jskult-webapp/src/repositories/prefc_master_repository.py +++ b/ecs/jskult-webapp/src/repositories/prefc_master_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.prefc_master import PrefcMasterModel from src.repositories.base_repository import BaseRepository +logger = get_logger('都道府県マスタ取得') + class PrefcMasterRepository(BaseRepository): @@ -23,9 +26,7 @@ class PrefcMasterRepository(BaseRepository): models = [PrefcMasterModel(**r) for r in result_data] return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] getOroshiData DB Error. ") - print(f"[ERROR] ErrorMessage: {e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py index 901078ab..03ee8271 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py @@ -1,11 +1,15 @@ +import mojimoji + from src.db import sql_condition as condition from src.db.sql_condition import SQLCondition +from src.logging.get_logger import get_logger from src.model.db.ultmarc_doctor import UltmarcDoctorDBModel from src.model.db.ultmarc_doctor_info import UltmarcDoctorInfoDBModel from src.model.request.ultmarc_doctor import UltmarcDoctorSearchModel from src.repositories.base_repository import BaseRepository from src.util.string_util import is_not_empty -import mojimoji + +logger = get_logger('COM_医師取得') class UltmarcDoctorRepository(BaseRepository): @@ -56,8 +60,7 @@ class UltmarcDoctorRepository(BaseRepository): return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() @@ -79,7 +82,7 @@ class UltmarcDoctorRepository(BaseRepository): if is_not_empty(parameter.dr_name_kana): # 必ず部分一致で検索 # ひらがなを全角カタカナへ変換 - zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) + zenkaku_katakana = ''.join([chr(n + 96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) for n in [ord(c) for c in parameter.dr_name_kana]]) # 全角カタカナを半角カタカナへ変換 hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana) @@ -101,7 +104,7 @@ class UltmarcDoctorRepository(BaseRepository): if is_not_empty(parameter.form_inst_name_kana): # 必ず部分一致で検索 # ひらがなを全角カタカナへ変換 - zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) + zenkaku_katakana = ''.join([chr(n + 96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) for n in [ord(c) for c in parameter.form_inst_name_kana]]) # 全角カタカナを半角カタカナへ変換 hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana) @@ -178,8 +181,7 @@ class UltmarcDoctorRepository(BaseRepository): return None return models[0] except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py index 1fab4d45..daa82533 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py @@ -1,7 +1,10 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_doctor_wrkplace_his import \ UltmarcDoctorWrkplaceHisDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_医師勤務先履歴取得') + class UltmarcDoctorWrkplaceHisRepository(BaseRepository): @@ -35,8 +38,7 @@ class UltmarcDoctorWrkplaceHisRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py index b05ce6fd..4798c467 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py @@ -1,8 +1,11 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_doctor_wrkplace import UltmarcDoctorWrkplaceDBModel from src.model.db.ultmarc_doctor_wrkplace_count import \ UltmarcDoctorWrkplaceCountDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_医師勤務先取得') + class UltmarcDoctorWrkplaceRepository(BaseRepository): @@ -34,8 +37,7 @@ class UltmarcDoctorWrkplaceRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() @@ -56,8 +58,7 @@ class UltmarcDoctorWrkplaceRepository(BaseRepository): return 0 return models[0].count except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py index 2522006f..ef1cab76 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py @@ -1,11 +1,15 @@ +import mojimoji + from src.db import sql_condition as condition from src.db.sql_condition import SQLCondition +from src.logging.get_logger import get_logger from src.model.db.ultmarc_inst import UltmarcInstDBModel from src.model.db.ultmarc_inst_info import UltmarcInstInfoDBModel from src.model.request.ultmarc_inst import UltmarcInstSearchModel from src.repositories.base_repository import BaseRepository from src.util.string_util import is_not_empty -import mojimoji + +logger = get_logger('COM_施設取得') class UltmarcInstRepository(BaseRepository): @@ -43,8 +47,7 @@ class UltmarcInstRepository(BaseRepository): return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() @@ -72,7 +75,7 @@ class UltmarcInstRepository(BaseRepository): if is_not_empty(parameter.form_inst_name_kana): # 部分一致検索 # ひらがなを全角カタカナへ変換 - zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) + zenkaku_katakana = ''.join([chr(n + 96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) for n in [ord(c) for c in parameter.form_inst_name_kana]]) # 全角カタカナを半角カタカナへ変換 hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana) @@ -187,8 +190,7 @@ class UltmarcInstRepository(BaseRepository): return None return models[0] except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py index eaee391a..4b3c0385 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_inst_trt_course import UltmarcInstTrtCourseDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_施設診療科目取得') + class UltmarcInstTrtCourseRepository(BaseRepository): @@ -24,8 +27,7 @@ class UltmarcInstTrtCourseRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py index e3c9ac13..0cd55e8c 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_sosiety import UltmarcSosietyDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_学会取得') + class UltmarcSosietyRepository(BaseRepository): @@ -23,8 +26,7 @@ class UltmarcSosietyRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py index a4927b44..7024b616 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py @@ -1,7 +1,10 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_specialist_license import \ UltmarcSpecialistLicenseDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_専門医資格取得') + class UltmarcSpecialistLicenseRepository(BaseRepository): @@ -25,8 +28,7 @@ class UltmarcSpecialistLicenseRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py index c76032a3..a5a1c9f4 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py @@ -1,8 +1,11 @@ -from src.model.db.ultmarc_trt_course import UltmarcTrtCourseDBModel +from src.logging.get_logger import get_logger +from src.model.db.ultmarc_trt_course import UltmarcDrTrtCourseDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_医師診療科目取得') -class UltmarcTrtCourseRepository(BaseRepository): + +class UltmarcDrTrtCourseRepository(BaseRepository): FETCH_SQL = """\ SELECT trt_course_name @@ -13,19 +16,18 @@ class UltmarcTrtCourseRepository(BaseRepository): ORDER BY com_trt_course.trt_course_cd """ - def fetch_many(self, id) -> list[UltmarcTrtCourseDBModel]: + def fetch_many(self, id) -> list[UltmarcDrTrtCourseDBModel]: try: self._database.connect() query = self.FETCH_SQL result = self._database.execute_select(query, {'id': id}) - models = [UltmarcTrtCourseDBModel(**r) for r in result] + models = [UltmarcDrTrtCourseDBModel(**r) for r in result] if len(models) == 0: return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/services/ultmarc_view_service.py b/ecs/jskult-webapp/src/services/ultmarc_view_service.py index 5c441421..24cf1d75 100644 --- a/ecs/jskult-webapp/src/services/ultmarc_view_service.py +++ b/ecs/jskult-webapp/src/services/ultmarc_view_service.py @@ -23,7 +23,7 @@ from src.repositories.ultmarc_sosiety_repository import \ from src.repositories.ultmarc_specialist_license_repository import \ UltmarcSpecialistLicenseRepository from src.repositories.ultmarc_trt_course_repository import \ - UltmarcTrtCourseRepository + UltmarcDrTrtCourseRepository from src.services.base_service import BaseService @@ -33,7 +33,7 @@ class UltmarcViewService(BaseService): 'prefc_repository': PrefcMasterRepository, 'inst_div_repository': InstDivMasterRepository, 'ultmarc_inst_repository': UltmarcInstRepository, - 'ultmarc_trt_course_repository': UltmarcTrtCourseRepository, + 'ultmarc_trt_course_repository': UltmarcDrTrtCourseRepository, 'ultmarc_inst_trt_course_repository': UltmarcInstTrtCourseRepository, 'ultmarc_sosiety_repository': UltmarcSosietyRepository, 'ultmarc_doctor_wrkplace_repository': UltmarcDoctorWrkplaceRepository, @@ -45,7 +45,7 @@ class UltmarcViewService(BaseService): prefc_repository: PrefcMasterRepository inst_div_repository: InstDivMasterRepository ultmarc_inst_repository: UltmarcInstRepository - ultmarc_trt_course_repository: UltmarcTrtCourseRepository + ultmarc_trt_course_repository: UltmarcDrTrtCourseRepository ultmarc_inst_trt_course_repository: UltmarcInstTrtCourseRepository ultmarc_sosiety_repository: UltmarcSosietyRepository ultmarc_doctor_wrkplace_repository: UltmarcDoctorWrkplaceRepository From 7fa640a4c1dd61dfef37c1dda977c6ebe6929053 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 6 Jul 2023 11:42:20 +0900 Subject: [PATCH 101/103] =?UTF-8?q?style:=20=E3=82=BF=E3=83=96=E3=82=A4?= =?UTF-8?q?=E3=83=B3=E3=83=87=E3=83=B3=E3=83=88=E2=86=92=E3=82=B9=E3=83=9A?= =?UTF-8?q?=E3=83=BC=E3=82=B9=E3=82=A4=E3=83=B3=E3=83=87=E3=83=B3=E3=83=88?= =?UTF-8?q?=E3=81=AB=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../ultmarc_trt_course_repository.py | 2 +- ecs/jskult-webapp/src/static/css/bioStyle.css | 244 ++++----- .../src/static/css/masterMainte.css | 146 ++--- .../src/static/css/menuStyle.css | 44 +- ecs/jskult-webapp/src/static/css/ultStyle.css | 484 ++++++++-------- .../static/function/businessLogicScript.js | 20 +- ecs/jskult-webapp/src/templates/docInfo.html | 422 +++++++------- .../src/templates/docSearch.html | 424 +++++++------- ecs/jskult-webapp/src/templates/instInfo.html | 518 +++++++++--------- .../src/templates/instSearch.html | 388 ++++++------- 10 files changed, 1346 insertions(+), 1346 deletions(-) diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py index a5a1c9f4..a88ffbbe 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py @@ -11,7 +11,7 @@ class UltmarcDrTrtCourseRepository(BaseRepository): SELECT trt_course_name FROM src05.com_dr LEFT JOIN src05.com_dr_trt_course ON com_dr.dcf_pcf_dr_cd = com_dr_trt_course.dcf_pcf_dr_cd - LEFT JOIN src05.com_trt_course ON com_dr_trt_course.trt_course_cd = com_trt_course.trt_course_cd + LEFT JOIN src05.com_trt_course ON com_dr_trt_course.trt_course_cd = com_trt_course.trt_course_cd WHERE com_dr.dcf_pcf_dr_cd = :id ORDER BY com_trt_course.trt_course_cd """ diff --git a/ecs/jskult-webapp/src/static/css/bioStyle.css b/ecs/jskult-webapp/src/static/css/bioStyle.css index 26bf416e..7ecde3c5 100644 --- a/ecs/jskult-webapp/src/static/css/bioStyle.css +++ b/ecs/jskult-webapp/src/static/css/bioStyle.css @@ -5,99 +5,99 @@ } body { - white-space: nowrap; - background-color: LightCyan; - font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + white-space: nowrap; + background-color: LightCyan; + font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } h1 { - font-size: 155%; - margin-left: 2%; - margin-top: 0%; - margin-bottom: 0%; + font-size: 155%; + margin-left: 2%; + margin-top: 0%; + margin-bottom: 0%; } .title { - width: 800px; + width: 800px; } table{ - border-collapse : collapse; + border-collapse : collapse; } .search_table { - margin-bottom: 30px; - padding-bottom: 15px; - border-bottom: solid 1px gray; - width: 1132px; + margin-bottom: 30px; + padding-bottom: 15px; + border-bottom: solid 1px gray; + width: 1132px; } ._form { - width: 1132px; - margin-left: 10px; - margin-right: 20px; + width: 1132px; + margin-left: 10px; + margin-right: 20px; } .back_bt { - padding-bottom: 10px; + padding-bottom: 10px; } ._form input[type=text] { - width: 193px; - height: 25px; + width: 193px; + height: 25px; } ._form input[type=checkbox] { - width: 13px; - height: 13px; + width: 13px; + height: 13px; } ._form select { - width: 193px; - height: 25px; + width: 193px; + height: 25px; } .result_info { - text-align: right; + text-align: right; } .search_tb { - padding-right: 25px; + padding-right: 25px; } .search_bt { - /* width: 60px; */ - margin-left: 10px; + /* width: 60px; */ + margin-left: 10px; } .clear_bt{ - margin-left: 120px; - /* width: 60px */ + margin-left: 120px; + /* width: 60px */ } .search_dropdown { - width: 175px; + width: 175px; } .bioScroll_div { - overflow: auto; - margin-top: 1%; - height: 250px; - width: 1132px; + overflow: auto; + margin-top: 1%; + height: 250px; + width: 1132px; } .noLine{ - text-decoration: none; + text-decoration: none; } .resultAreaMsg { - margin-top: 5%; - text-align: center; - font-size: 150%; + margin-top: 5%; + text-align: center; + font-size: 150%; } .search_btTd { - text-align: right; + text-align: right; } .selection { @@ -109,70 +109,70 @@ table{ } .search_middleTd { - padding-right: 25px; - width : 450px; + padding-right: 25px; + width : 450px; } .transition{ - text-align: right; - margin-right: 60px; + text-align: right; + margin-right: 60px; } .transition_bt{ - width: 110px; - height: 40px; - margin-left: 15px; - margin-right: 15px; + width: 110px; + height: 40px; + margin-left: 15px; + margin-right: 15px; } .instutionInfo_table{ - width: 1132px; - margin-bottom: 50px; + width: 1132px; + margin-bottom: 50px; } .institution_column { - width : 160px; - background : rgb(225, 233, 250); - border : solid 1px; + width : 160px; + background : rgb(225, 233, 250); + border : solid 1px; } .institution_data { - background : rgb(244, 244, 244); - border : solid 1px; - padding-left : 0.5em; - padding-right : 0.5em; + background : rgb(244, 244, 244); + border : solid 1px; + padding-left : 0.5em; + padding-right : 0.5em; } .data_width_long { - width : 500px; + width : 500px; } .data_width_middle { - width : 300px; + width : 300px; } .data_width_short { - width : 100px; + width : 100px; } .checkbox_margin { - margin-left : 20px; + margin-left : 20px; } .border_top_none { - border-top-style:none; + border-top-style:none; } .border_bottom_none { - border-bottom-style:none; + border-bottom-style:none; } .textbox_margin { - margin-left : 20px; + margin-left : 20px; } .textbox_margin_short { - margin-left : 5px; + margin-left : 5px; } .label_margin { @@ -181,110 +181,110 @@ table{ } .trt_course{ - width: 70px; + width: 70px; } .small_tb{ - width: 100px; + width: 100px; } .docBelongScroll_div { - overflow: auto; - height: 100px; - width: 500px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 100px; + width: 500px; + margin: 0px 30px 0px 30px; } .rightPadding_table{ - padding-right: 50px; + padding-right: 50px; } .verticalBar_td{ - width: 1px; - height: 150px; - background-color: gray; + width: 1px; + height: 150px; + background-color: gray; } .docPlaceScroll_div { - overflow: auto; - height: 150px; - width: 700px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 150px; + width: 700px; + margin: 0px 30px 0px 30px; } .result_tr{ - overflow-y: scroll; - overflow-x: scroll; + overflow-y: scroll; + overflow-x: scroll; } .result_data{ - overflow-y: scroll; - overflow-x: scroll; - width: 50px; + overflow-y: scroll; + overflow-x: scroll; + width: 50px; } /* tablesoter */ table.tablesorter { - font-family:arial; - background-color: #CDCDCD; - font-size: 12pt; - text-align: left; + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; } table.tablesorter thead tr th, table.tablesorter tfoot tr th { - background-color: #e6EEEE; - border: 0.1px solid silver; - font-size: 12pt; - padding: 4px; - padding-right: 20px; + background-color: #e6EEEE; + border: 0.1px solid silver; + font-size: 12pt; + padding: 4px; + padding-right: 20px; } table.tablesorter thead tr .header { - background-image: url(bg.gif); - background-repeat: no-repeat; - background-position: center right; - cursor: pointer; + background-image: url(bg.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; } table.tablesorter tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; } table.tablesorter tbody td div{ - float: right; + float: right; } table.tablesorter tbody tr.odd td { - background-color:#F0F0F6; + background-color:#F0F0F6; } table.tablesorter thead tr .headerSortUp { - background-image: url(asc.gif); + background-image: url(asc.gif); } table.tablesorter thead tr .headerSortDown { - background-image: url(desc.gif); + background-image: url(desc.gif); } table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp { - background-color: #8dbdd8; + background-color: #8dbdd8; } #loading { - z-index: 10000; - position: fixed; - top: 0; - left: 0; - width: 100%; - height: 100%; - background-color: #FFF; - overflow-x: hidden; - overflow-y: auto; - outline: 0; - text-align: center; - display: none; - opacity: 0.7; + z-index: 10000; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: #FFF; + overflow-x: hidden; + overflow-y: auto; + outline: 0; + text-align: center; + display: none; + opacity: 0.7; } #loading_content { - position: absolute; - top: 50%; - left: 50%; + position: absolute; + top: 50%; + left: 50%; } \ No newline at end of file diff --git a/ecs/jskult-webapp/src/static/css/masterMainte.css b/ecs/jskult-webapp/src/static/css/masterMainte.css index a59c1681..3c15e03d 100644 --- a/ecs/jskult-webapp/src/static/css/masterMainte.css +++ b/ecs/jskult-webapp/src/static/css/masterMainte.css @@ -1,153 +1,153 @@ body{ - background-color: LightCyan; - font-family : "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + background-color: LightCyan; + font-family : "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } h1{ - margin-left : 1%; + margin-left : 1%; } /*ヘッダー*/ .headerTable{ - width: 100%; + width: 100%; } .headerTdLeft{ - width: 80%; + width: 80%; } .headerTdRight{ - text-align: right; - padding-right: 2%; - width: 20%; + text-align: right; + padding-right: 2%; + width: 20%; } .buttonSize{ - width: 85px; + width: 85px; } /*////////////////////////*/ /*施設担当者データCSVダウンロード*/ /*////////////////////////*/ .searchColumnTd{ - width: 14%; + width: 14%; } .searchTextboxTd{ - width: 18%; + width: 18%; } .searchTable{ - margin-left: 3%; - margin-right: 3%; - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 94%; + margin-left: 3%; + margin-right: 3%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 94%; } .searchLabelTd{ - text-align: right; - width: 10%; + text-align: right; + width: 10%; } .searchInputTd{ - width: 19%; + width: 19%; } .searchTextbox{ - width: 90%; - margin-left: 2.5%; - margin-right: 2.5%; - margin-top: 0.8%; - margin-bottom: 0.8%; + width: 90%; + margin-left: 2.5%; + margin-right: 2.5%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .searchDateTextbox{ - width: 37%; - margin-left: 2.5%; - margin-right: 2.5%; - margin-top: 0.8%; - margin-bottom: 0.8%; + width: 37%; + margin-left: 2.5%; + margin-right: 2.5%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .searchButtonTd{ - text-align: right; - padding-top: 1%; + text-align: right; + padding-top: 1%; } .csvOutputMessage{ - margin-left: 3%; + margin-left: 3%; } .errorColor{ - color: red; + color: red; } /*//////////////////////////*/ /*施設担当者データExcelアップロード*/ /*//////////////////////////*/ .inputTable{ - margin-left: 3%; - margin-right: 3%; - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 94%; + margin-left: 3%; + margin-right: 3%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 94%; } .inputLabelTd{ - width: 10%; + width: 10%; } .inputTd{ - width:20%; + width:20%; } .inputButtonTd{ - width: 50%; - text-align: right; + width: 50%; + text-align: right; } .dataCntDisp{ - text-align: right; - margin-right: 3%; + text-align: right; + margin-right: 3%; } table.inputData { - font-family:arial; - background-color: #CDCDCD; - font-size: 12pt; - text-align: left; - white-space: nowrap; - border: 0.1px solid silver; - padding: 4px; - padding-right: 20px; - border-collapse: collapse; - margin-left: 3%; - width: 94%; + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; + white-space: nowrap; + border: 0.1px solid silver; + padding: 4px; + padding-right: 20px; + border-collapse: collapse; + margin-left: 3%; + width: 94%; } table.inputData tbody th { - color: #3D3D3D; - padding: 4px; - background-color: #e6EEEE; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #e6EEEE; + border: 0.1px solid silver; + vertical-align: top; } table.inputData tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; } .footerMsg{ - margin-left: 3%; + margin-left: 3%; } @@ -155,10 +155,10 @@ table.inputData tbody td { /*データ上書きコピー */ /*//////////////////////////*/ .tableOverRide{ - margin-right: 3%; - margin-left: 3%; - margin-bottom: 2%; - border-bottom: solid 1px gray; - width: 94%; + margin-right: 3%; + margin-left: 3%; + margin-bottom: 2%; + border-bottom: solid 1px gray; + width: 94%; } diff --git a/ecs/jskult-webapp/src/static/css/menuStyle.css b/ecs/jskult-webapp/src/static/css/menuStyle.css index 3a07d9fc..cabd5197 100644 --- a/ecs/jskult-webapp/src/static/css/menuStyle.css +++ b/ecs/jskult-webapp/src/static/css/menuStyle.css @@ -1,49 +1,49 @@ body{ - background-color: LightCyan; - background-size: 220%,220%; - font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + background-color: LightCyan; + background-size: 220%,220%; + font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } .background{ - margin-top: 5%; - padding: 2%; - background-color: white; - width: 40%; - border-radius: 25px; - box-shadow:5px 5px rgba(0,0,0,0.4);; + margin-top: 5%; + padding: 2%; + background-color: white; + width: 40%; + border-radius: 25px; + box-shadow:5px 5px rgba(0,0,0,0.4);; } .btn_width { - width: 80%; + width: 80%; } .form_login{ - width: 80%; - font-size: 180%; - margin: 1%; + width: 80%; + font-size: 180%; + margin: 1%; } .form_login::-webkit-input-placeholder{ - color: gray; + color: gray; } .form_login:-ms-input-placeholder{ - color: gray; + color: gray; } .form_login::-moz-placeholder{ - color: gray; + color: gray; } .logout_p{ - font-size: 160%; + font-size: 160%; } .notUseBioMsg,.notUseMainteMsg{ - font-size: 143%; - color: red; + font-size: 143%; + color: red; } .batchMsg{ - color: red; - font-size: 120%; - text-align: center; + color: red; + font-size: 120%; + text-align: center; } \ No newline at end of file diff --git a/ecs/jskult-webapp/src/static/css/ultStyle.css b/ecs/jskult-webapp/src/static/css/ultStyle.css index af29f38a..f5eea0e0 100644 --- a/ecs/jskult-webapp/src/static/css/ultStyle.css +++ b/ecs/jskult-webapp/src/static/css/ultStyle.css @@ -5,480 +5,480 @@ } body { - background-color: LightCyan; - font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + background-color: LightCyan; + font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } h1 { - font-size: 150%; - margin-left: 2%; - margin-top: 0%; - margin-bottom: 0%; + font-size: 150%; + margin-left: 2%; + margin-top: 0%; + margin-bottom: 0%; } table{ - border-collapse : collapse; + border-collapse : collapse; } .header_bt{ - width: 8%; - margin-bottom: 0.8%; - margin-left: 78.5%; + width: 8%; + margin-bottom: 0.8%; + margin-left: 78.5%; } .search_textbox{ - width: 100%; + width: 100%; } .search_dropdown{ - width: 100%; - height: 1.8em; + width: 100%; + height: 1.8em; } .search_longtextbox{ - width: 100% + width: 100% } .instSearchResult { - width: 100%; + width: 100%; } .scroll_table{ - overflow: auto; - white-space: nowrap; - margin-top: 1%; - margin-bottom: 1%; - width: 100%; - height: 250px; + overflow: auto; + white-space: nowrap; + margin-top: 1%; + margin-bottom: 1%; + width: 100%; + height: 250px; } .scroll_table::-webkit-scrollbar { - height: 5px; - width: 10px; + height: 5px; + width: 10px; } .scroll_table::-webkit-scrollbar-track { - border-radius: 5px; - background: #eee; + border-radius: 5px; + background: #eee; } .scroll_table::-webkit-scrollbar-thumb { - border-radius: 5px; - background: #666; + border-radius: 5px; + background: #666; } .ult_bt { - width: 20%; - height: 80%; + width: 20%; + height: 80%; } .info_bt{ - width: 10% + width: 10% } .search_bt{ - margin-left: 3%; - margin-top: 0.8%; - margin-bottom: 0.8%; + margin-left: 3%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .notFind{ - margin-top: 5%; - text-align: center; - font-size: 150%; + margin-top: 5%; + text-align: center; + font-size: 150%; } .search_table { - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 100%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 100%; } .search_tb { - padding-right: 2%; - padding-top: 0.2%; - padding-bottom: 0.2%; + padding-right: 2%; + padding-top: 0.2%; + padding-bottom: 0.2%; } .leftSearch_tb{ - width: 35%; + width: 35%; } .batchMsg{ - color: red; - font-size: 120%; - text-align: center; + color: red; + font-size: 120%; + text-align: center; } ._form { - width: 95%; - margin-left: 3%; + width: 95%; + margin-left: 3%; } .result_info { - text-align: right; + text-align: right; } /*施設検索一覧ヘッダー*/ .instSearchHeaderTable{ - width: 100%; + width: 100%; } .instSearchHeaderTd{ - width: 24%; + width: 24%; } .instSearchHeaderTdCenter{ - text-align: center; - width: 50%; + text-align: center; + width: 50%; } .instSearchHeaderTdRight{ - text-align: right; - padding-right: 2%; + text-align: right; + padding-right: 2%; } .instSearchButchMsg{ - /* font-size: 80%; */ - color: red; + /* font-size: 80%; */ + color: red; } .instSearchHeader_bt{ - width: 40%; + width: 40%; } /*施設詳細*/ .instInfoTable{ - margin-top: 1%; - margin-left: 5%; - margin-right: 2%; - margin-bottom: 2%; - width: 93%; + margin-top: 1%; + margin-left: 5%; + margin-right: 2%; + margin-bottom: 2%; + width: 93%; } .instInfoTableHalf1{ - margin-top: 1%; - margin-left: 5%; - margin-right: 2%; - width: 93%; + margin-top: 1%; + margin-left: 5%; + margin-right: 2%; + width: 93%; } .instInfoTableHalf2{ - margin-top: -0.05%; - margin-left: 5%; - margin-right: 2%; - margin-bottom: 2%; - width: 93%; + margin-top: -0.05%; + margin-left: 5%; + margin-right: 2%; + margin-bottom: 2%; + width: 93%; } .instInfoColumn { - width : 9%; - height: 40px; - background : rgb(225, 233, 250); - border : solid 1px; + width : 9%; + height: 40px; + background : rgb(225, 233, 250); + border : solid 1px; } .instData { - background : rgb(244, 244, 244); - border : solid 1px; - padding-left : 0.5%; - padding-right : 0.5%; - padding-top: 0.25%; - padding-bottom: 0.25%; + background : rgb(244, 244, 244); + border : solid 1px; + padding-left : 0.5%; + padding-right : 0.5%; + padding-top: 0.25%; + padding-bottom: 0.25%; } .instDataMid{ - /*NO5修正前 width: 51%;*/ - width: 20%; + /*NO5修正前 width: 51%;*/ + width: 20%; } /*NO5にて追加 START*/ .instDataLarge{ - width: 85%; + width: 85%; } .instDataLeft{ - width: 20%; + width: 20%; } .instDataCenter{ - width: 7%; + width: 7%; } .instDataRight{ - width: 25%; + width: 25%; } /*NO5にて追加 END*/ .instDataSmallTextbox{ - width: 45%; + width: 45%; } /*NO5にて追加 START*/ .instDataCenterTextbox{ - width: 80%; + width: 80%; } /*NO5にて追加 END*/ .instInfoTextbox{ - width: 98%; - padding-right: 1%; - padding-left: 1%; + width: 98%; + padding-right: 1%; + padding-left: 1%; } .instCdTextbox{ - /*NO5修正前 width: 13%;*/ - width: 35%; - margin-left: 0.5%; - margin-right: 2%; + /*NO5修正前 width: 13%;*/ + width: 35%; + margin-left: 0.5%; + margin-right: 2%; } .delReasonCdTextbox{ - /*NO5修正前 width: 2%;*/ - width: 5%; - margin-left: 0.5%; - margin-right: 1%; + /*NO5修正前 width: 2%;*/ + width: 5%; + margin-left: 0.5%; + margin-right: 1%; } .delReasonTextbox{ - /*NO5修正前 width: 43%;*/ - width: 88%; - margin-left: 0.5%; - margin-right: 2%; + /*NO5修正前 width: 43%;*/ + width: 88%; + margin-left: 0.5%; + margin-right: 2%; } .manageTextbox{ - width: 40%; + width: 40%; } .textboxMargin { - margin-left : 0.1%; + margin-left : 0.1%; } .transitionBt{ - width: 98%; - height: 30px; + width: 98%; + height: 30px; } .instHeaderTable{ - margin-left: 40%; + margin-left: 40%; } .instHeaderTd{ - width: 10%; - font-size: 140%; - text-align: center; - padding-right: 2%; + width: 10%; + font-size: 140%; + text-align: center; + padding-right: 2%; } .trtCourseTextbox{ - width: 6%; + width: 6%; } .bedTd{ - width: 46%; + width: 46%; } .bedTextbox{ - width: 70%; + width: 70%; } .xSmallTd{ - width: 9%; + width: 9%; } .xSmallTextbox{ - width: 75%; + width: 75%; } .reExamTd{ - width: 13%; + width: 13%; } .repreTd{ - width: 50%; + width: 50%; } .repreTextbox{ - width: 95%; + width: 95%; } .trtTextbox{ - width: 5%; - margin-right: 1.2%; + width: 5%; + margin-right: 1.2%; } .parentCdTextBox{ - width: 15%; + width: 15%; } .parentNameTextBox{ - width: 75%; + width: 75%; } .hpInfoColumn{ - width : 12%; - height: 40px; - background : rgb(225, 233, 250); - border : solid 1px; + width : 12%; + height: 40px; + background : rgb(225, 233, 250); + border : solid 1px; } .hpAssrtTd{ - width: 12%; + width: 12%; } .hpAssrtTextbox{ - width: 85%; + width: 85%; } .border_bottom_none { - border-bottom-style:none; + border-bottom-style:none; } .numberBox{ - text-align: right; + text-align: right; } /*医師検索*/ /*ヘッダー*/ .docHeaderTable{ - width: 100%; + width: 100%; } .docHeaderTd{ - width: 24%; + width: 24%; } .docHeaderTdCenter{ - text-align: center; - width: 50%; + text-align: center; + width: 50%; } .docHeaderTdRight{ - text-align: right; - padding-right: 2%; + text-align: right; + padding-right: 2%; } .docButchMsg{ - /* font-size: 80%; */ - color: red; + /* font-size: 80%; */ + color: red; } .docHeader_bt{ - width: 40%; + width: 40%; } /* アルトマーク課題管理表No.2の修正 8% → 10% */ /* アルトマーク課題管理表No.8の修正 10% → 14% */ .docSearchColumnTd{ - width: 14%; + width: 14%; } .docSearchTextboxTd{ - width: 18%; + width: 18%; } .docSearchTextbox_td{ - width: 94%; + width: 94%; } .docSearchTextbox{ - width: 90%; - margin-right: 5%; - margin-top: 0.8%; - margin-bottom: 0.8%; + width: 90%; + margin-right: 5%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .docSearchTableDivOne{ - width: 100%; + width: 100%; } .docSearchTableDivTwo{ - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 100%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 100%; } .allOnOffButton{ - width: 6%; + width: 6%; } /*医師情報*/ .docInfoTable{ - margin-top: 1%; - margin-left: 5%; - margin-right: 2%; - margin-bottom: 1%; - width: 93%; - border-bottom: solid 1px gray; + margin-top: 1%; + margin-left: 5%; + margin-right: 2%; + margin-bottom: 1%; + width: 93%; + border-bottom: solid 1px gray; } .docInfoTd{ - padding-bottom: 0.5%; + padding-bottom: 0.5%; } .docInfoTextBox{ - margin-left: 0.5%; - margin-right: 2%; - width: 8%; + margin-left: 0.5%; + margin-right: 2%; + width: 8%; } .docInfoTrtTextBox{ - margin-left: 0.5%; + margin-left: 0.5%; } .docBelongTable{ - margin-left: 1%; - width: 98%; - border-bottom: solid 1px gray; + margin-left: 1%; + width: 98%; + border-bottom: solid 1px gray; } .docBelongTd{ - width: 49%; - height: 150px; + width: 49%; + height: 150px; } .docSocietyTable{ - width: 100%; + width: 100%; } .scroll{ - overflow: auto; - height: 120px; - width: 90%; - margin-left: 7%; - margin-bottom: 4%; + overflow: auto; + height: 120px; + width: 90%; + margin-left: 7%; + margin-bottom: 4%; } .scroll::-webkit-scrollbar { - height: 5px; - width: 10px; + height: 5px; + width: 10px; } .scroll::-webkit-scrollbar-track { - border-radius: 5px; - background: #eee; + border-radius: 5px; + background: #eee; } .scroll::-webkit-scrollbar-thumb { - border-radius: 5px; - background: #666; + border-radius: 5px; + background: #666; } .rightBoderLine{ - border-right: solid 1px gray; + border-right: solid 1px gray; } .wrkplaceH1{ - margin-top: 0.3%; + margin-top: 0.3%; } .wrkplaceTable{ - width: 100%; + width: 100%; } @@ -492,17 +492,17 @@ table{ .clear_bt{ - margin-left: 120px; - width: 60px + margin-left: 120px; + width: 60px } .back_bt{ - margin-left: 1042px; - width: 80px + margin-left: 1042px; + width: 80px } .noLine{ - text-decoration: none; + text-decoration: none; } @@ -511,13 +511,13 @@ table{ /*共通:アルトマーク施設検索,医師検索,施設詳細*/ .maxWidth_tb { - width: 100%; + width: 100%; } /*アルトマーク施設検索,医師検索共通*/ .search_btTd { - text-align: right; + text-align: right; } .selection { @@ -531,14 +531,14 @@ table{ /*医師検索*/ .search_middleTd { - padding-right: 25px; - width : 450px; + padding-right: 25px; + width : 450px; } /*共通:施設詳細、医師詳細*/ .transition{ - text-align: right; - margin-right: 60px; + text-align: right; + margin-right: 60px; } @@ -546,18 +546,18 @@ table{ .data_width_middle { - width : 300px; + width : 300px; } .border_top_none { - border-top-style:none; + border-top-style:none; } .textbox_margin_short { - margin-left : 5px; + margin-left : 5px; } .label_margin { @@ -568,82 +568,82 @@ table{ /*医師詳細*/ .docInfo_table{ - margin-bottom: 30px; - border-bottom: solid 1px gray; - width: 1132px; + margin-bottom: 30px; + border-bottom: solid 1px gray; + width: 1132px; } .small_tb{ - width: 100px; + width: 100px; } .docBelongScroll_div { - overflow: auto; - height: 100px; - width: 500px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 100px; + width: 500px; + margin: 0px 30px 0px 30px; } .rightPadding_table{ - padding-right: 50px; + padding-right: 50px; } .docPlaceScroll_div { - overflow: auto; - height: 150px; - width: 700px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 150px; + width: 700px; + margin: 0px 30px 0px 30px; } .result_tr{ - overflow-y: scroll; - overflow-x: scroll; + overflow-y: scroll; + overflow-x: scroll; } .result_data{ - overflow-y: scroll; - overflow-x: scroll; - width: 50px; + overflow-y: scroll; + overflow-x: scroll; + width: 50px; } /* tablesoter */ table.tablesorter { - font-family:arial; - background-color: #CDCDCD; - font-size: 12pt; - text-align: left; + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; } table.tablesorter thead tr th, table.tablesorter tfoot tr th { - background-color: #e6EEEE; - border: 0.1px solid silver; - font-size: 12pt; - padding: 4px; - padding-right: 20px; + background-color: #e6EEEE; + border: 0.1px solid silver; + font-size: 12pt; + padding: 4px; + padding-right: 20px; } table.tablesorter thead tr .header { - background-image: url(bg.gif); - background-repeat: no-repeat; - background-position: center right; - cursor: pointer; + background-image: url(bg.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; } table.tablesorter tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; } table.tablesorter tbody tr.odd td { - background-color:#F0F0F6; + background-color:#F0F0F6; } table.tablesorter thead tr .headerSortUp { - background-image: url(asc.gif); + background-image: url(asc.gif); } table.tablesorter thead tr .headerSortDown { - background-image: url(desc.gif); + background-image: url(desc.gif); } table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp { - background-color: #8dbdd8; + background-color: #8dbdd8; } diff --git a/ecs/jskult-webapp/src/static/function/businessLogicScript.js b/ecs/jskult-webapp/src/static/function/businessLogicScript.js index f410d5ab..7ce3b9ba 100644 --- a/ecs/jskult-webapp/src/static/function/businessLogicScript.js +++ b/ecs/jskult-webapp/src/static/function/businessLogicScript.js @@ -96,7 +96,7 @@ function enableDatePicker() { // 日付入力チェック // 引数:チェックするテキストボックスNo function autoModifyDate($this){ - // 日付フォーマットチェック + // 日付フォーマットチェック if($this.value === "" || (!$this.value.match(/^\d{4}\/\d{2}\/\d{2}$/) && !$this.value.match(/^\d{4}\d{2}\d{2}$/))) @@ -110,22 +110,22 @@ function autoModifyDate($this){ // yyyyMMddの場合→yyyy/MM/dd const datePatternMatches = strFormat.match(/^(\d{4})(\d{2})(\d{2})$/); if (datePatternMatches){ - strFormat = `${datePatternMatches[1]}/${datePatternMatches[2]}/${datePatternMatches[3]}`; + strFormat = `${datePatternMatches[1]}/${datePatternMatches[2]}/${datePatternMatches[3]}`; } // yyyy/00/00~yyyy/00/00の場合→yyyy/01/01~yyyy/12/31 // yyyy/MM/00~yyyy/MM/01の場合→yyyy/MM/01~yyyy/MM/末日 - // 開始日の場合 - if ($this.name.includes('from')){ + // 開始日の場合 + if ($this.name.includes('from')){ strFormat = strFormat.replace("00/00", "01/01"); strFormat = strFormat.replace("00", "01"); } // 終了日の場合 - else if ($this.name.includes('to')){ + else if ($this.name.includes('to')){ strFormat = strFormat.replace("00/00", "12/31"); const date = new Date(strFormat.slice(0, 4), strFormat.slice(5, 7), 0).getDate(); strFormat = strFormat.replace("00", date.toString()); } - $this.value = strFormat; + $this.value = strFormat; } @@ -168,14 +168,14 @@ function checkPassForm($this) // 条件:チェックボックスのクラス名に"selected"というのがついていること // 条件:ボタンにクラス名 send がついていること function allOn(){ - $(".selected").prop("checked", true); - $(".send").prop('disabled',false); + $(".selected").prop("checked", true); + $(".send").prop('disabled',false); } // チェックボックス全解除関数 // 条件:チェックボックスのクラス名に"selectedページ数"というのがついていること // 条件:ボタンにクラス名 send がついていること function allOff(){ - $(".selected").prop("checked", false); - $(".send").prop('disabled',true); + $(".selected").prop("checked", false); + $(".send").prop('disabled',true); } diff --git a/ecs/jskult-webapp/src/templates/docInfo.html b/ecs/jskult-webapp/src/templates/docInfo.html index 8f9beb21..71b82b75 100644 --- a/ecs/jskult-webapp/src/templates/docInfo.html +++ b/ecs/jskult-webapp/src/templates/docInfo.html @@ -4,228 +4,228 @@ {% with subtitle = ultmarc.subtitle %} {% include '_header.html' %} {% endwith %} - 医師情報 - - - + } + + -
- - - - - -

{{ultmarc.subtitle}}

- {% if ultmarc.is_batch_processing %} -
日次バッチ処理中のため、データが正しく表示されない可能性があります
- {% endif %} -
+ + + + + + +

{{ultmarc.subtitle}}

+ {% if ultmarc.is_batch_processing %} +
日次バッチ処理中のため、データが正しく表示されない可能性があります
+ {% endif %} +
- - - - - - - - - - - - - - - -
- - - {{ultmarc.is_page_num_view()}}/{{ultmarc.post_cnt}} - - -
- -
+ + + + + + + - - - - - - - - - - - - - - - - - - - +
医師コード:氏名(漢字):氏名(カナ):
性別:生年月日:
- 出身大学: - - 出身県: - - 卒年: - - 登録年: - - 開業年: - -
+ + + + + + + + + + + + + + + + + + - - - - -
医師コード:氏名(漢字):氏名(カナ):
性別:生年月日:
+ 出身大学: + + 出身県: + + 卒年: + + 登録年: + + 開業年: + +
- 診療科目: - {% for trt_coursed_data in ultmarc.trt_coursed_data %} - - {% endfor %} - {% for i in range(5-ultmarc.is_input_trt_course_data_size())%} - - {% endfor %} -
- - - - - - - - -
-

所属学会

-
- - - - - - - - - {% for sosiety_data in ultmarc.sosiety_data %} - - - - - {% endfor %} - -
コード所属学会
{{sosiety_data.sosiety_cd or ' '}}{{sosiety_data.sosiety_name or ' '}}
-
-
-

所属学会専門医

-
- - - - - - - - - {% for specialist_license_data in ultmarc.specialist_license_data %} - - - - - {% endfor %} - -
コード専門医資格名
{{specialist_license_data.specialist_cd or ' '}}{{specialist_license_data.specialist_license_name or ' '}}
-
-
- -

勤務先履歴

-
- - - - - - - - - - - - - - - {% for doctor_wrkplace_data in ultmarc.doctor_wrkplace_data %} - {% if doctor_wrkplace_data.dcf_dsf_inst_cd %} - - - - - - - - - - {% endif %} - {% endfor %} - {% for doctor_wrkplace_his_data in ultmarc.doctor_wrkplace_his_data %} - {% if doctor_wrkplace_his_data.dcf_dsf_inst_cd %} - - - - - - - - - - {% endif %} - {% endfor %} - -
ULT施設コード勤務先略名所属部科名役職名職位開始年月日終了年月日
- {{doctor_wrkplace_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_data.inst_name_kanji or ''}}{{doctor_wrkplace_data.blng_sec_name or ''}}{{doctor_wrkplace_data.univ_post_name or ''}}{{doctor_wrkplace_data.post_name or ''}}{{ultmarc.is_input_aply_start_ymd_format(doctor_wrkplace_data.aply_start_ymd)}}9999/99/99
- {{doctor_wrkplace_his_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_his_data.inst_name_kanji or ''}}{{doctor_wrkplace_his_data.blng_sec_name or ''}}{{doctor_wrkplace_his_data.univ_post_name or ''}}{{doctor_wrkplace_his_data.post_name or ''}}{{ultmarc.is_input_his_aply_start_ymd_format(doctor_wrkplace_his_data.aply_start_ymd)}}{{ultmarc.is_input_his_aply_end_ymd_format(doctor_wrkplace_his_data.aply_end_ymd)}}
-
+ + + 診療科目: + {% for trt_coursed_data in ultmarc.trt_coursed_data %} + + {% endfor %} + {% for i in range(5-ultmarc.is_input_trt_course_data_size())%} + + {% endfor %} + + + + + + + + + + + + +
+

所属学会

+
+ + + + + + + + + {% for sosiety_data in ultmarc.sosiety_data %} + + + + + {% endfor %} + +
コード所属学会
{{sosiety_data.sosiety_cd or ' '}}{{sosiety_data.sosiety_name or ' '}}
+
+
+

所属学会専門医

+
+ + + + + + + + + {% for specialist_license_data in ultmarc.specialist_license_data %} + + + + + {% endfor %} + +
コード専門医資格名
{{specialist_license_data.specialist_cd or ' '}}{{specialist_license_data.specialist_license_name or ' '}}
+
+
+ +

勤務先履歴

+
+ + + + + + + + + + + + + + + {% for doctor_wrkplace_data in ultmarc.doctor_wrkplace_data %} + {% if doctor_wrkplace_data.dcf_dsf_inst_cd %} + + + + + + + + + + {% endif %} + {% endfor %} + {% for doctor_wrkplace_his_data in ultmarc.doctor_wrkplace_his_data %} + {% if doctor_wrkplace_his_data.dcf_dsf_inst_cd %} + + + + + + + + + + {% endif %} + {% endfor %} + +
ULT施設コード勤務先略名所属部科名役職名職位開始年月日終了年月日
+ {{doctor_wrkplace_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_data.inst_name_kanji or ''}}{{doctor_wrkplace_data.blng_sec_name or ''}}{{doctor_wrkplace_data.univ_post_name or ''}}{{doctor_wrkplace_data.post_name or ''}}{{ultmarc.is_input_aply_start_ymd_format(doctor_wrkplace_data.aply_start_ymd)}}9999/99/99
+ {{doctor_wrkplace_his_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_his_data.inst_name_kanji or ''}}{{doctor_wrkplace_his_data.blng_sec_name or ''}}{{doctor_wrkplace_his_data.univ_post_name or ''}}{{doctor_wrkplace_his_data.post_name or ''}}{{ultmarc.is_input_his_aply_start_ymd_format(doctor_wrkplace_his_data.aply_start_ymd)}}{{ultmarc.is_input_his_aply_end_ymd_format(doctor_wrkplace_his_data.aply_end_ymd)}}
+
\ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/docSearch.html b/ecs/jskult-webapp/src/templates/docSearch.html index 21da4a9c..7f6cc551 100644 --- a/ecs/jskult-webapp/src/templates/docSearch.html +++ b/ecs/jskult-webapp/src/templates/docSearch.html @@ -6,191 +6,191 @@ {% endwith %} - + // Enter押下時にsubmitさせなくする + $(function() { + $(document).on("keypress", "input:not(.allow_submit)", function(event) { + return event.which !== 13; + }); + }); + } + - - - - - - -

{{ultmarc.subtitle}}

- {% if ultmarc.is_batch_processing %} -
日次バッチ処理中のため、データが正しく表示されない可能性があります
- {% endif %} -
-
+ + + - \ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/instInfo.html b/ecs/jskult-webapp/src/templates/instInfo.html index d93b619c..00f8e9ab 100644 --- a/ecs/jskult-webapp/src/templates/instInfo.html +++ b/ecs/jskult-webapp/src/templates/instInfo.html @@ -6,282 +6,282 @@ {% endwith %} - - + } + + -

+

{{ultmarc.subtitle}}

- {% if ultmarc.is_batch_processing %} -
日次バッチ処理中のため、データが正しく表示されない可能性があります
- {% endif %} + {% if ultmarc.is_batch_processing %} +
日次バッチ処理中のため、データが正しく表示されない可能性があります
+ {% endif %} - - - - - - - - - - - - - - - + - - - - -
- - -
- - - {{ultmarc.is_page_num_view()}}/{{ultmarc.post_cnt}} - - -
- -
+ + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
施設コード - - 未確認 - 施設コード変換先 - - 休院店開始年月 - - 休院店 -
削除予定理由 - - 削除日 - - 開業予定年月 - - 開業 -
正式施設名(カナ)
正式施設名(漢字)
略式施設名(カナ)施設区分名
略式施設名(漢字)経営体 - - -
郵便番号 - - 住所不明 - 施設電話番号 - - 電話なし -
住所(カナ)
住所(漢字)
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
施設コード + + 未確認 + 施設コード変換先 + + 休院店開始年月 + + 休院店 +
削除予定理由 + + 削除日 + + 開業予定年月 + + 開業 +
正式施設名(カナ)
正式施設名(漢字)
略式施設名(カナ)施設区分名
略式施設名(漢字)経営体 + + +
郵便番号 + + 住所不明 + 施設電話番号 + + 電話なし +
住所(カナ)
住所(漢字)

- - - - - - - - - - - - + + + + + + +
病院種別再審査区分関連大学親名 - - -
診療科目 - {% if ultmarc.inst_trt_coursed_data != None %} - {% for inst_trt_course_data in ultmarc.inst_trt_coursed_data %} - - {% endfor %} - {% endif %} - {% for i in range(60-ultmarc.is_input_inst_trt_course_data_size()) %} - - {% endfor %} + + + + + + + + + + + + - - - - - - -
病院種別再審査区分関連大学親名 + + +
診療科目 + {% if ultmarc.inst_trt_coursed_data != None %} + {% for inst_trt_course_data in ultmarc.inst_trt_coursed_data %} + + {% endfor %} + {% endif %} + {% for i in range(60-ultmarc.is_input_inst_trt_course_data_size()) %} + + {% endfor %} -
検査工程 - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - -
特務医務室許可病床数 - - - - - - - - - - - - - - - - - - - - - -
一般療養精神感染症結核その他合計
-
病棟閉鎖  一部病棟閉鎖
病床数(定員)メンテ年月日
- - - - - - - - - - - - - - - - - - - - -
代表者個人コード
施設代表者(カナ)
施設代表者(漢字)
修正年月日
+
検査工程 + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + +
特務医務室許可病床数 + + + + + + + + + + + + + + + + + + + + + +
一般療養精神感染症結核その他合計
+
病棟閉鎖  一部病棟閉鎖
病床数(定員)メンテ年月日
+ + + + + + + + + + + + + + + + + + + + +
代表者個人コード
施設代表者(カナ)
施設代表者(漢字)
修正年月日
\ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/instSearch.html b/ecs/jskult-webapp/src/templates/instSearch.html index 8a866777..93018a91 100644 --- a/ecs/jskult-webapp/src/templates/instSearch.html +++ b/ecs/jskult-webapp/src/templates/instSearch.html @@ -6,189 +6,189 @@ {% endwith %} - + // Enter押下時にsubmitさせなくする + $(function() { + $(document).on("keypress", "input:not(.allow_submit)", function(event) { + return event.which !== 13; + }); + }); + } + - - - - - + + + +

{{ultmarc.subtitle}}

+ + + + + - - -

{{ultmarc.subtitle}}

{% if ultmarc.is_batch_processing %}
日次バッチ処理中のため、データが正しく表示されない可能性があります
{% endif %} -
- - +
+ + +
+ - -
- - - - + + + + + + - - - - -
- - - - - - - - - - - - - - - - -
ULT施設コード削除ULT施設名(漢字)ULT施設住所(漢字)郵便番号施設電話番号施設区分名病院種別都道府県
- {% if ultmarc.is_form_submitted() and ultmarc.is_data_overflow_max_length() %} -
- 検索件数が500件を超えています 検索項目を見直してください -
- {% endif %} - {% if ultmarc.is_form_submitted() and ultmarc.is_data_empty() %} -
- 対象のデータが存在しません -
- {% endif %} -
+ + + + +
+ + + + + + + + + + + + + + + + +
ULT施設コード削除ULT施設名(漢字)ULT施設住所(漢字)郵便番号施設電話番号施設区分名病院種別都道府県
+ {% if ultmarc.is_form_submitted() and ultmarc.is_data_overflow_max_length() %} +
+ 検索件数が500件を超えています 検索項目を見直してください +
+ {% endif %} + {% if ultmarc.is_form_submitted() and ultmarc.is_data_empty() %} +
+ 対象のデータが存在しません +
+ {% endif %} +
- - -
+ + + - + \ No newline at end of file From a8de16a3bd5492ac1a5538fa77079486e3b3f394 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Thu, 6 Jul 2023 13:03:58 +0900 Subject: [PATCH 102/103] =?UTF-8?q?=E6=8C=87=E6=91=98=E4=BA=8B=E9=A0=85?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 4 ++-- .../src/batch/ultmarc/output_vjsk_inst_pharm_data.py | 10 ++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 86ffc169..764b600c 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -159,14 +159,14 @@ class VjskSendBucket(S3Bucket): _bucket_name = environment.VJSK_DATA_BUCKET _send_folder = environment.VJSK_DATA_SEND_FOLDER - def upload_vjsk_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): # S3バケットにファイルを移動 csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' s3_client = S3Client() s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) return - def backup_vjsk_csv_file(self, dat_file_key: str, datetime_key: str): + def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): # バックアップバケットにコピー vjsk_backup_bucket = VjskBackupBucket() dat_key = f'{self._send_folder}/{dat_file_key}' diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py index f72e8d92..838dae34 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py @@ -45,15 +45,14 @@ def exec(): vjsk_bucket = VjskSendBucket() try: # s3へデータ移動 - vjsk_bucket.upload_vjsk_csv_file(vjsk_csv_file_name, csv_file_path) + vjsk_bucket.upload_inst_pharm_csv_file(vjsk_csv_file_name, csv_file_path) except Exception as e: logger.info('S3バケットにCSVデータを作成できませんでした。') raise e try: # 処理後ファイルをバックアップ - batch_context = BatchContext.get_instance() - vjsk_bucket.backup_vjsk_csv_file(vjsk_csv_file_name, batch_context.syor_date) + vjsk_bucket.backup_inst_pharm_csv_file(vjsk_csv_file_name, batch_context.syor_date) except Exception as e: logger.info('バックアップバケットへCSVデータをコピーできませんでした。') raise e @@ -142,7 +141,7 @@ def select_inst_record(db): """ return db.execute_select(sql) except Exception as e: - logger.debug(f'{sql_err_msg}') + logger.debug(sql_err_msg) raise e @@ -220,7 +219,7 @@ def select_pharm_record(db): """ return db.execute_select(sql) except Exception as e: - logger.debug(f'{sql_err_msg}') + logger.debug(sql_err_msg) raise e @@ -266,7 +265,6 @@ def make_csv_data(record_inst: list, record_pharm: list, vjsk_csv_file_name: str writer.writerow(csv_data) except Exception as e: - logger.info('ワークデータの作成に失敗しました。') logger.info('CSVデータの作成に失敗しました。') raise e From 732557cb47eb1a7862c9a68b6659ee6014d7758d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E9=96=93?= Date: Mon, 10 Jul 2023 19:12:01 +0900 Subject: [PATCH 103/103] =?UTF-8?q?staging=E3=81=A7=E3=81=AE=E5=AE=9F?= =?UTF-8?q?=E8=A1=8C=E3=81=AE=E7=82=BA=E3=81=AB=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 2 +- ecs/jskult-batch-daily/src/aws/s3.py | 2 +- ecs/jskult-batch-daily/src/system_var/environment.py | 2 +- .../tests/batch/vjsk/vjsk_file_check/conftest.py | 4 ++-- ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py | 4 ++-- s3/config/jskult/task_settings/batch_daily_task_settings.env | 3 ++- 6 files changed, 9 insertions(+), 8 deletions(-) diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 1683e935..1e15cdb0 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -15,7 +15,7 @@ JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt VJSK_DATA_SEND_FOLDER=send VJSK_DATA_BUCKET=************* JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt -JSKULT_DATA_FOLDER_RECV=********************** +VJSK_DATA_RECEIVE_FOLDER=********************** # 連携データ抽出期間 SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 # 洗替対象テーブル名 diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 764b600c..a4ebc30a 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -117,7 +117,7 @@ class VjskBackupBucket(JskUltBackupBucket): class VjskReceiveBucket(S3Bucket): _bucket_name = environment.VJSK_DATA_BUCKET - _recv_folder = environment.JSKULT_DATA_FOLDER_RECV + _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER _s3_file_list = None diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index dd45ae01..42ed6073 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -19,7 +19,7 @@ JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALEND VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] -JSKULT_DATA_FOLDER_RECV = os.environ['JSKULT_DATA_FOLDER_RECV'] +VJSK_DATA_RECEIVE_FOLDER = os.environ['VJSK_DATA_RECEIVE_FOLDER'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index c315147a..dccdd0df 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -15,12 +15,12 @@ def s3_client(): @pytest.fixture def bucket_name(): - return os.environ["JSKULT_DATA_BUCKET"] + return os.environ["VJSK_DATA_BUCKET"] @pytest.fixture def receive_folder(): - return os.environ["JSKULT_DATA_FOLDER_RECV"] + return os.environ["VJSK_DATA_RECEIVE_FOLDER"] # TODO 共通fixtureにして15個固定でput/delete、各個別fixtureで15個から引き算でdeleteする diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py index ea29eb63..dc77a65f 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py @@ -15,12 +15,12 @@ def s3_client(): @pytest.fixture def bucket_name(): - return os.environ["JSKULT_DATA_BUCKET"] + return os.environ["VJSK_DATA_BUCKET"] @pytest.fixture def receive_folder(): - return os.environ["JSKULT_DATA_FOLDER_RECV"] + return os.environ["VJSK_DATA_RECEIVE_FOLDER"] @pytest.fixture diff --git a/s3/config/jskult/task_settings/batch_daily_task_settings.env b/s3/config/jskult/task_settings/batch_daily_task_settings.env index 2590fe79..12c1e83c 100644 --- a/s3/config/jskult/task_settings/batch_daily_task_settings.env +++ b/s3/config/jskult/task_settings/batch_daily_task_settings.env @@ -7,10 +7,11 @@ ULTMARC_BACKUP_FOLDER=ultmarc VJSK_BACKUP_FOLDER=vjsk JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILENAME=jskult_wholesaler_stock_input_day_list.txt +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau DB_CONNECTION_MAX_RETRY_ATTEMPT=4 DB_CONNECTION_RETRY_INTERVAL_INIT=5 DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5 DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50 +SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 \ No newline at end of file