From 9ba4eda8a32afa1e1c275032d56cb22bc3f4544c Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 25 Apr 2023 12:37:08 +0900 Subject: [PATCH 01/86] =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E5=AD=98=E5=9C=A8=E3=83=81=E3=82=A7=E3=83=83=E3=82=AF=E3=81=BE?= =?UTF-8?q?=E3=81=A7=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 3 + ecs/jskult-batch-daily/src/aws/s3.py | 27 ++- .../src/batch/common/batch_context.py | 9 + .../common/calendar_wholestocksaler_file.py | 32 ++++ .../src/batch/vjsk/vjsk_importer.py | 143 +++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 166 ++++++++++++++++++ ecs/jskult-batch-daily/src/jobctrl_daily.py | 2 + .../src/system_var/environment.py | 3 + 8 files changed, 384 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 95aef7fe..d95322fb 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -11,3 +11,6 @@ ULTMARC_BACKUP_FOLDER=ultmarc JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt +JSKULT_DATA_BUCKET=********************** +JSKULT_DATA_FOLDER_RECV=********************** diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2ac3efe6..2aebff4b 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -1,3 +1,4 @@ +import io import os.path as path import tempfile @@ -16,7 +17,8 @@ class S3Client: return [] contents = response['Contents'] # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする - objects = [{'filename': content['Key'], 'size': content['Size']} for content in contents if not content['Key'].endswith('/')] + objects = [{'filename': content['Key'], 'size': content['Size']} + for content in contents if not content['Key'].endswith('/')] return objects def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: @@ -89,6 +91,16 @@ class ConfigBucket(S3Bucket): f.seek(0) return temporary_file_path + def download_wholesaler_stock_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + f.seek(0) + return temporary_file_path + class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET @@ -96,3 +108,16 @@ class JskUltBackupBucket(S3Bucket): class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER + + +class VjskBucket(S3Bucket): + # TODO:V実消化バケットから見たり取ってきたりする実装をやる + _bucket_name = environment.JSKULT_DATA_BUCKET + _recv_folder = environment.JSKULT_DATA_FOLDER_RECV + + def get_file_list(self): + return self._s3_client.list_objects(self._bucket_name, self._recv_folder) + + # def download_data_file(self, data_filename: str): + # temporary_dir = tempfile.mkdtemp() + # temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') diff --git a/ecs/jskult-batch-daily/src/batch/common/batch_context.py b/ecs/jskult-batch-daily/src/batch/common/batch_context.py index 3b3ac157..b493ecca 100644 --- a/ecs/jskult-batch-daily/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-daily/src/batch/common/batch_context.py @@ -3,6 +3,7 @@ class BatchContext: __syor_date: str # 処理日(yyyy/mm/dd形式) __is_not_business_day: bool # 日次バッチ起動日フラグ __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ + __is_import_target_vjsk_stockslipdata: bool # 卸在庫データ取込対象フラグ def __init__(self) -> None: self.__is_not_business_day = False @@ -37,3 +38,11 @@ class BatchContext: @is_ultmarc_imported.setter def is_ultmarc_imported(self, flag: bool): self.__is_ultmarc_imported = flag + + @property + def is_import_target_vjsk_stockslipdata(self): + return self.__is_import_target_vjsk_stockslipdata + + @is_import_target_vjsk_stockslipdata.setter + def is_import_target_vjsk_stockslipdata(self, flag: bool): + self.__is_import_target_vjsk_stockslipdata = flag diff --git a/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py b/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py new file mode 100644 index 00000000..ba687514 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py @@ -0,0 +1,32 @@ +from src.system_var import constants + + +class CalendarWholwSalerStockFile: + """V実消化卸在庫データ連携日ファイル""" + + __calendar_file_lines: list[str] + + def __init__(self, calendar_file_path): + with open(calendar_file_path) as f: + self.__calendar_file_lines: list[str] = f.readlines() + + def compare_date(self, date_str: str) -> bool: + """与えられた日付がV実消化卸在庫データ連携日ファイル内に含まれているかどうか + V実消化卸在庫データ連携日ファイル内の日付はyyyy/mm/ddで書かれている前提 + コメント(#)が含まれている行は無視される + + Args: + date_str (str): yyyy/mm/dd文字列 + + Returns: + bool: 含まれていればTrue + """ + for calendar_date in self.__calendar_file_lines: + # コメント行が含まれている場合はスキップ + if constants.CALENDAR_COMMENT_SYMBOL in calendar_date: + continue + + if date_str in calendar_date: + return True + + return False diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py new file mode 100644 index 00000000..4c2cb0ee --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -0,0 +1,143 @@ +from src.aws.s3 import ConfigBucket, VjskBucket +from src.batch.common.batch_context import BatchContext +from src.batch.common.calendar_wholestocksaler_file import \ + CalendarWholwSalerStockFile +from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger + +# from src.batch.datachange import emp_chg_inst_lau + +logger = get_logger('V実消化データ取込') +batch_context = BatchContext.get_instance() +vjsk_recv_bucket = VjskBucket() +vjsk_mapper = VjskRecvFileMapper() + + +def _check_if_file_exists(src_list: list, key: str) -> bool: + pref = vjsk_mapper.get_file_prefix(key) + suff = vjsk_mapper.get_file_suffix(key) + for idx, elem in enumerate(src_list): + buf = elem.get("filename") + filename = buf[buf.rfind("/") + 1:] + if filename.startswith(pref) and filename.endswith(suff): + return True + return False + + +def _check_received_files(): + """V実消化連携データ存在確認処理""" + logger.debug('V実消化連携データ存在確認処理:開始') + + # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 + received_files = vjsk_recv_bucket.get_file_list() + logger.debug(f'ファイル一覧{received_files}') + + # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) + if batch_context.is_import_target_vjsk_stockslipdata: + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): + raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸販売データ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_SLIP_DATA): + raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸組織変換マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): + raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 施設統合マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): + raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_MST): + raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸ホールディングスマスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_HLD_MST): + raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 施設マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_FCL_MST): + raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 メーカー卸組織展開表 + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): + raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 取引区分マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): + raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 製品マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): + raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 製品価格マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): + raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 卸得意先情報マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): + raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_files}') + + # ファイル存在確認 MDBコード変換マスタ + if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): + raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_files}') + + logger.debug('V実消化連携データ存在確認処理:終了') + + return True + + +def _import_file_to_db(): + logger.debug('V実消化取込処理:開始') + + # diff_upsertに変わるやつを呼び出す + # emp_chg_inst_lau.batch_process() みたいに + + logger.debug('V実消化取込処理:終了') + + +def _determine_today_is_stockslipdata_target(): + try: + # 設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する + today = batch_context.syor_date + + holiday_list_file_path = ConfigBucket().download_wholesaler_stock_list() + targetdays = CalendarWholwSalerStockFile(holiday_list_file_path) + ret = targetdays.compare_date(today) + except Exception as e: + logger.error(f'{e}') + raise e + return ret + + +def exec(): + """V実消化データ取込""" + logger.info('Start Jitsusyouka Torikomi PGM.') + + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる + logger.debug('卸在庫データ取込対象日であるかを判定') + batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() + logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') + if batch_context.is_import_target_vjsk_stockslipdata: + logger.info('卸在庫データ取込対象日です') + + # V実消化データファイル受領チェック + logger.debug('V実消化データファイル受領チェック:開始') + try: + _check_received_files() + except BatchOperationException as e: + logger.error('受領したV実消化データファイルに欠落があります') + raise e + logger.debug('V実消化データファイル受領チェック:終了') + + # データベース取込 + logger.debug('V実消化データ取込:開始') + try: + _import_file_to_db() + except Exception as e: + logger.error(f'データベース登録失敗 {e}') + logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py new file mode 100644 index 00000000..6a997c9f --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -0,0 +1,166 @@ +class VjskRecvFileMapper: + CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ + CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ + CONDKEY_WHS_MST = "WHS_MST" # V卸マスタ + CONDKEY_MKR_ORG_HORIZON = "MKR_ORG_HORIZON" # Vメーカー卸組織展開表 + CONDKEY_ORG_CNV_MST = "ORG_CNV_MST" # V卸組織変換マスタ + CONDKEY_TRAN_KBN_MST = "TRAN_KBN_MST" # V取引区分マスタ + CONDKEY_FCL_MST = "FCL_MST" # V施設マスタ + CONDKEY_PHM_PRD_MST = "PHM_PRD_MST" # V製品マスタ + CONDKEY_PHM_PRICE_MST = "PHM_PRICE_MST" # V製品価格マスタ + CONDKEY_VOP_HCO_MERGE = "VOP_HCO_MERGE" # V施設統合マスタ + CONDKEY_WHS_CUSTOMER_MST = "WHS_CUSTOMER_MST" # V卸得意先情報マスタ + CONDKEY_MDB_CONV_MST = "MDB_CONV_MST" # MDBコード変換表 + CONDKEY_STOCK_SLIP_DATA = "STOCK_SLIP_DATA" # 卸在庫データ + CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ + CONDKEY_LOT_NUM_MS = "LOT_NUM_MS" # ロットマスタデータ + + _KEY_FILE_PREFIX = "file_prefix" + _KEY_FILE_SUFFIX = "file_suffix" + _KEY_ORG_TABLE = "org_table" + _KEY_SRC_TABLE = "src_table" + _VJSK_INTERFACE_MAPPING = { + # 販売実績データ + CONDKEY_SLIP_DATA: { + _KEY_FILE_PREFIX: "slip_data_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.sales", + _KEY_SRC_TABLE: "src05.sales" + }, + + # V卸ホールディングスマスタ + CONDKEY_HLD_MST: { + _KEY_FILE_PREFIX: "hld_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.hld_mst_v", + _KEY_SRC_TABLE: "src05.hld_mst_v" + }, + + # V卸マスタ + CONDKEY_WHS_MST: { + _KEY_FILE_PREFIX: "whs_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.whs_mst_v", + _KEY_SRC_TABLE: "src05.whs_mst_v" + }, + + # Vメーカー卸組織展開表 + CONDKEY_MKR_ORG_HORIZON: { + _KEY_FILE_PREFIX: "mkr_org_horizon_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", + _KEY_SRC_TABLE: "src05.mkr_org_horizon_v" + }, + + # V卸組織変換マスタ + CONDKEY_ORG_CNV_MST: { + _KEY_FILE_PREFIX: "org_cnv_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.org_cnv_mst_v", + _KEY_SRC_TABLE: "src05.org_cnv_mst_v" + }, + + # V取引区分マスタ + CONDKEY_TRAN_KBN_MST: { + _KEY_FILE_PREFIX: "tran_kbn_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", + _KEY_SRC_TABLE: "src05.tran_kbn_mst_v" + }, + + # V施設マスタ + CONDKEY_FCL_MST: { + _KEY_FILE_PREFIX: "fcl_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.fcl_mst_v", + _KEY_SRC_TABLE: "src05.fcl_mst_v" + }, + + # V製品マスタ + CONDKEY_PHM_PRD_MST: { + _KEY_FILE_PREFIX: "phm_prd_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.phm_prd_mst_v", + _KEY_SRC_TABLE: "src05.phm_prd_mst_v" + }, + + # V製品価格マスタ + CONDKEY_PHM_PRICE_MST: { + _KEY_FILE_PREFIX: "phm_price_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.phm_price_mst_v", + _KEY_SRC_TABLE: "src05.phm_price_mst_v" + }, + + # V施設統合マスタ + CONDKEY_VOP_HCO_MERGE: { + _KEY_FILE_PREFIX: "vop_hco_merge_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.vop_hco_merge_v", + _KEY_SRC_TABLE: "src05.vop_hco_merge_v" + }, + + # V卸得意先情報マスタ + CONDKEY_WHS_CUSTOMER_MST: { + _KEY_FILE_PREFIX: "whs_customer_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.whs_customer_mst_v", + _KEY_SRC_TABLE: "src05.whs_customer_mst_v" + }, + + # MDBコード変換表 + CONDKEY_MDB_CONV_MST: { + _KEY_FILE_PREFIX: "mdb_conv_mst_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.mdb_conv_mst_v", + _KEY_SRC_TABLE: "src05.mdb_conv_mst_v" + }, + + # 卸在庫データ + CONDKEY_STOCK_SLIP_DATA: { + _KEY_FILE_PREFIX: "stock_slip_data_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.whole_stock", + _KEY_SRC_TABLE: "src05.whole_stock" + }, + + # 生物由来データ + CONDKEY_BIO_SLIP_DATA: { + _KEY_FILE_PREFIX: "bio_slip_data_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.bio_sales", + _KEY_SRC_TABLE: "src05.bio_sales" + }, + + # ロットマスタデータ + CONDKEY_LOT_NUM_MS: { + _KEY_FILE_PREFIX: "lot_num_ms_", + _KEY_FILE_SUFFIX: ".tsv", + _KEY_ORG_TABLE: "org05.lot_num_mst", + _KEY_SRC_TABLE: "src05.lot_num_mst" + }, + } + + def get_file_prefix(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_PREFIX) + return ret + + def get_file_suffix(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_SUFFIX) + return ret + + def get_org_table(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_ORG_TABLE) + return ret + + def get_src_table(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) + return ret diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 370f2179..dc7146b7 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -9,6 +9,7 @@ from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile from src.batch.laundering import create_dcf_inst_merge, create_mst_inst from src.batch.ultmarc import ultmarc_process +from src.batch.vjsk import vjsk_importer from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants @@ -84,6 +85,7 @@ def exec(): logger.info('日次処理(V実消化)') try: logger.info('V実消化取込:起動') + vjsk_importer.exec() logger.info('V実消化取込:終了') except BatchOperationException as e: logger.exception(f'V実消化取込処理エラー(異常終了){e}') diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index b1730224..6a2fca0b 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -15,6 +15,9 @@ ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] +JSKULT_DATA_BUCKET = os.environ['JSKULT_DATA_BUCKET'] +JSKULT_DATA_FOLDER_RECV = os.environ['JSKULT_DATA_FOLDER_RECV'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') From 1fd6633bc838c9a626c2daa76a565ebe468b3a5b Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 28 Apr 2023 19:51:59 +0900 Subject: [PATCH 02/86] =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=83=AD=E3=83=BC=E3=83=89=E3=81=8B=E3=82=89DB=E7=99=BB?= =?UTF-8?q?=E9=8C=B2=E3=81=AE=E5=AE=9F=E8=A3=85(=E5=AE=9F=E8=A1=8C?= =?UTF-8?q?=E7=A2=BA=E8=AA=8D=E3=81=AF=E6=9C=AA)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 17 +++-- .../src/batch/vjsk/vjsk_data_load_manager.py | 56 +++++++++++++++++ .../src/batch/vjsk/vjsk_importer.py | 63 +++++++++++++++++-- .../src/batch/vjsk/vjsk_recv_file_manager.py | 60 ++++++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 9 +++ 5 files changed, 196 insertions(+), 9 deletions(-) create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py create mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2aebff4b..79c80db9 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -115,9 +115,16 @@ class VjskBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV - def get_file_list(self): - return self._s3_client.list_objects(self._bucket_name, self._recv_folder) + _s3_file_list = None - # def download_data_file(self, data_filename: str): - # temporary_dir = tempfile.mkdtemp() - # temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') + def get_s3_file_list(self): + self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) + return self._s3_file_list + + def download_data_file(self, data_filename: str): + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, data_filename, f) + f.seek(0) + return temporary_file_path diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py new file mode 100644 index 00000000..a412c3c7 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -0,0 +1,56 @@ +from src.batch.vjsk.vjsk_recv_file_manager import (VjskDatFile, + VjskRecvFileManager) +from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.db.database import Database +from src.logging.get_logger import get_logger + +logger = get_logger('V実消化データ取込(DB登録)') +mapper = VjskRecvFileMapper() + + +class JjskDataLoadManager: + def _import_to_db(dat_file: VjskDatFile, condkey: str): + db = Database.get_instance() + table_name_org = mapper.get_org_table(condkey) + table_name_src = mapper.get_org_table(condkey) + + try: + db.connect() # TODO:接続オプション local_infile = True が必要? + db.begin() + + # orgをtruncate + f"TRUNCATE TABLE {table_name_org};" + + # orgにload ※warningは1148エラーになるらしい + sql = f"LOAD DATA LOCAL INFILE {dat_file} INTO TABLE {table_name_org} FIELDS TERMINATED BY '\t' ENCLOSED BY ""'"" IGNORE 1 LINES;" + cnt = db.execute(sql) + logger.info(f'tsvデータをorgテーブルにLOAD : 件数({cnt})') + + # org→srcにinsert select + # TODO: INTO句とSELECT句はmapperに持たせてcondkeyで引っ張ってくるようにしたい + f"INSERT INTO {table_name_src} SELECT * FROM {table_name_org};" + + db.commit() + except Exception as e: # TODO:DB例外だけキャッチしたい + db.rollback() + logger.error(e) + raise e + finally: + db.disconnect() + return + + def Load(self, target: dict): + # target : {"condkey": key, "src_file_path":local_file_path} + + # データファイルオープン + dat_file = VjskRecvFileManager.file_open(target["local_file_path"]) + + # TODO: tsvファイルをload投入用のDMLに加工(システム日時つけたり、エンコードをUTF-8に変換したり) + # TODO: ファイルオンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 + # TODO: エンコード変換の参考 https://dev.classmethod.jp/articles/python-encoding/ + dat_file = dat_file + + # データベース登録 + self._import_to_db(dat_file, target["condkey"]) + + return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 4c2cb0ee..839058ab 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -2,6 +2,7 @@ from src.aws.s3 import ConfigBucket, VjskBucket from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_wholestocksaler_file import \ CalendarWholwSalerStockFile +from src.batch.vjsk.vjsk_data_load_manager import JjskDataLoadManager from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger @@ -30,7 +31,7 @@ def _check_received_files(): logger.debug('V実消化連携データ存在確認処理:開始') # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 - received_files = vjsk_recv_bucket.get_file_list() + received_files = vjsk_recv_bucket.get_s3_file_list() logger.debug(f'ファイル一覧{received_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) @@ -94,15 +95,67 @@ def _check_received_files(): def _import_file_to_db(): logger.debug('V実消化取込処理:開始') - # diff_upsertに変わるやつを呼び出す - # emp_chg_inst_lau.batch_process() みたいに + # 実消化&アルトマーク V実消化データ受領バケットにあるファイルパス一覧を取得 + received_s3_files = vjsk_recv_bucket.get_s3_file_list() + + # ファイルパス一覧にマッピング情報を参照するためのキーを持たせて辞書可する + target_dict = {} + for s3_file_path in received_s3_files: + local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path) + key = vjsk_mapper.get_condkey_by_s3_file_path(local_file_path) + if key is not None: + target_dict[key] = {"condkey": key, "src_file_path": local_file_path} + logger.debug(f'S3ファイルパス辞書{target_dict}') + + # TODO: diff_upsertに変わるやつを呼び出す + # TODO: emp_chg_inst_lau.batch_process() みたいに + + # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) + if batch_context.is_import_target_vjsk_stockslipdata: + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + + # # # ファイル存在確認 卸販売データ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + + # # # ファイル存在確認 卸組織変換マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + + # # # ファイル存在確認 施設統合マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + + # # # ファイル存在確認 卸マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + + # # # ファイル存在確認 卸ホールディングスマスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + + # # # ファイル存在確認 施設マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + + # # # ファイル存在確認 メーカー卸組織展開表 + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + + # # # ファイル存在確認 取引区分マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + + # # # ファイル存在確認 製品マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + + # # # ファイル存在確認 製品価格マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + + # # # ファイル存在確認 卸得意先情報マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + + # # # ファイル存在確認 MDBコード変換マスタ + # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) logger.debug('V実消化取込処理:終了') def _determine_today_is_stockslipdata_target(): + """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する""" try: - # 設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する today = batch_context.syor_date holiday_list_file_path = ConfigBucket().download_wholesaler_stock_list() @@ -128,7 +181,9 @@ def exec(): # V実消化データファイル受領チェック logger.debug('V実消化データファイル受領チェック:開始') try: + # S3バケット上でV実消化データファイルの存在チェックをする _check_received_files() + except BatchOperationException as e: logger.error('受領したV実消化データファイルに欠落があります') raise e diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py new file mode 100644 index 00000000..5a0dd54f --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py @@ -0,0 +1,60 @@ +import csv +from io import TextIOWrapper + + +class VjskRecvFileManager: + layout_class: str + records: list[str] + + def __init__(self, dat_line: list[str]) -> None: + self.layout_class = dat_line[0] + self.records = dat_line + + +class VjskDatFile: + """V実消化データファイル""" + + lines: list[VjskRecvFileManager] + success_count: int = 0 + error_count: int = 0 + total_count: int = 0 + __i: int = 0 + + def __iter__(self): + return self + + def __next__(self) -> VjskRecvFileManager: + if self.__i == len(self.lines): + raise StopIteration() + line = self.lines[self.__i] + self.__i += 1 + return line + + def __init__(self, file: TextIOWrapper) -> None: + reader = csv.reader(file) + csv_rows = [VjskRecvFileManager(row) for row in reader] + + self.lines = csv_rows + self.total_count = len(csv_rows) + + def count_up_success(self): + self.success_count += 1 + + def count_up_error(self): + self.error_count += 1 + + @classmethod + def file_open(cls, local_file_path: str): + """V実消化データファイルを読み込み、新しいインスタンスを作成する + + Args: + local_file_path (str): ローカルのファイルパス + + Returns: + VjskDatFile: このクラスのインスタンス + """ + # cp932(Shift-JIS Windows拡張)でファイルを読み込む + file = open(local_file_path, encoding='cp932') + instance = cls(file) + file.close() + return instance diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 6a997c9f..d89f5063 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -164,3 +164,12 @@ class VjskRecvFileMapper: if condkey in self._VJSK_INTERFACE_MAPPING: ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) return ret + + def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: + ret = None + filename = s3_file_path[s3_file_path.rfind("/") + 1:] + for element in self._VJSK_INTERFACE_MAPPING: + if filename.startswith(element.get(self._KEY_FILE_PREFIX)) and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): + ret = element + break + return ret From aca85704dafb90c5bbb68e6ed3d11b45222fe433 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 8 May 2023 16:25:11 +0900 Subject: [PATCH 03/86] =?UTF-8?q?LOAD=20DATA=20=20LOCAL=20INFILE=E3=81=A73?= =?UTF-8?q?948=E3=82=A8=E3=83=A9=E3=83=BC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 2 +- .../src/batch/vjsk/vjsk_data_load_manager.py | 28 +++--- .../src/batch/vjsk/vjsk_importer.py | 42 ++++----- .../src/batch/vjsk/vjsk_recv_file_manager.py | 90 +++++++++---------- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 5 +- 5 files changed, 89 insertions(+), 78 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 79c80db9..2ee93eb8 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -123,7 +123,7 @@ class VjskBucket(S3Bucket): def download_data_file(self, data_filename: str): temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._folder}/", "")}') + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') with open(temporary_file_path, mode='wb') as f: self._s3_client.download_file(self._bucket_name, data_filename, f) f.seek(0) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index a412c3c7..9aa80070 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -1,5 +1,4 @@ -from src.batch.vjsk.vjsk_recv_file_manager import (VjskDatFile, - VjskRecvFileManager) +# from src.batch.vjsk.vjsk_recv_file_manager import VjskDatFile from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.db.database import Database from src.logging.get_logger import get_logger @@ -9,20 +8,26 @@ mapper = VjskRecvFileMapper() class JjskDataLoadManager: - def _import_to_db(dat_file: VjskDatFile, condkey: str): + def __init__(self): + pass + + def _import_to_db(src_file_name: str, condkey: str): db = Database.get_instance() table_name_org = mapper.get_org_table(condkey) - table_name_src = mapper.get_org_table(condkey) + table_name_src = mapper.get_src_table(condkey) try: db.connect() # TODO:接続オプション local_infile = True が必要? db.begin() # orgをtruncate - f"TRUNCATE TABLE {table_name_org};" + db.execute(f"TRUNCATE TABLE {table_name_org};") + + # load DATA local infileステートメント実行許可設定 + db.execute("SET GLOBAL local_infile=on;") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE {dat_file} INTO TABLE {table_name_org} FIELDS TERMINATED BY '\t' ENCLOSED BY ""'"" IGNORE 1 LINES;" + sql = f"LOAD DATA LOCAL INFILE '{src_file_name}' INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY \"'\" IGNORE 1 LINES;" cnt = db.execute(sql) logger.info(f'tsvデータをorgテーブルにLOAD : 件数({cnt})') @@ -39,18 +44,21 @@ class JjskDataLoadManager: db.disconnect() return + @classmethod def Load(self, target: dict): + logger.debug(f'JjskDataLoadManager#load start target:{target}') # target : {"condkey": key, "src_file_path":local_file_path} # データファイルオープン - dat_file = VjskRecvFileManager.file_open(target["local_file_path"]) + local_file_name = target["src_file_path"] + # dat_file = VjskDatFile.retrieve_from_file(local_file_name) # TODO: tsvファイルをload投入用のDMLに加工(システム日時つけたり、エンコードをUTF-8に変換したり) - # TODO: ファイルオンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 + # TODO: ファイルエンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 # TODO: エンコード変換の参考 https://dev.classmethod.jp/articles/python-encoding/ - dat_file = dat_file # データベース登録 - self._import_to_db(dat_file, target["condkey"]) + self._import_to_db(local_file_name, target["condkey"]) + logger.debug('JjskDataLoadManager#load end') return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 839058ab..7db2983e 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -9,7 +9,7 @@ from src.logging.get_logger import get_logger # from src.batch.datachange import emp_chg_inst_lau -logger = get_logger('V実消化データ取込') +_logger = get_logger('V実消化データ取込') batch_context = BatchContext.get_instance() vjsk_recv_bucket = VjskBucket() vjsk_mapper = VjskRecvFileMapper() @@ -28,11 +28,11 @@ def _check_if_file_exists(src_list: list, key: str) -> bool: def _check_received_files(): """V実消化連携データ存在確認処理""" - logger.debug('V実消化連携データ存在確認処理:開始') + _logger.debug('V実消化連携データ存在確認処理:開始') # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 received_files = vjsk_recv_bucket.get_s3_file_list() - logger.debug(f'ファイル一覧{received_files}') + _logger.debug(f'ファイル一覧{received_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: @@ -87,13 +87,13 @@ def _check_received_files(): if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_files}') - logger.debug('V実消化連携データ存在確認処理:終了') + _logger.debug('V実消化連携データ存在確認処理:終了') return True def _import_file_to_db(): - logger.debug('V実消化取込処理:開始') + _logger.debug('V実消化取込処理:開始') # 実消化&アルトマーク V実消化データ受領バケットにあるファイルパス一覧を取得 received_s3_files = vjsk_recv_bucket.get_s3_file_list() @@ -101,11 +101,11 @@ def _import_file_to_db(): # ファイルパス一覧にマッピング情報を参照するためのキーを持たせて辞書可する target_dict = {} for s3_file_path in received_s3_files: - local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path) - key = vjsk_mapper.get_condkey_by_s3_file_path(local_file_path) + local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path.get('filename')) + key = vjsk_mapper.get_condkey_by_s3_file_path(s3_file_path.get('filename')) if key is not None: target_dict[key] = {"condkey": key, "src_file_path": local_file_path} - logger.debug(f'S3ファイルパス辞書{target_dict}') + _logger.debug(f'S3ファイルパス辞書{target_dict}') # TODO: diff_upsertに変わるやつを呼び出す # TODO: emp_chg_inst_lau.batch_process() みたいに @@ -150,7 +150,7 @@ def _import_file_to_db(): # # # ファイル存在確認 MDBコード変換マスタ # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) - logger.debug('V実消化取込処理:終了') + _logger.debug('V実消化取込処理:終了') def _determine_today_is_stockslipdata_target(): @@ -162,37 +162,39 @@ def _determine_today_is_stockslipdata_target(): targetdays = CalendarWholwSalerStockFile(holiday_list_file_path) ret = targetdays.compare_date(today) except Exception as e: - logger.error(f'{e}') + _logger.error(f'{e}') raise e return ret def exec(): """V実消化データ取込""" - logger.info('Start Jitsusyouka Torikomi PGM.') + _logger.info('Start Jitsusyouka Torikomi PGM.') # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる - logger.debug('卸在庫データ取込対象日であるかを判定') + _logger.debug('卸在庫データ取込対象日であるかを判定') batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() - logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') + _logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') if batch_context.is_import_target_vjsk_stockslipdata: - logger.info('卸在庫データ取込対象日です') + _logger.info('卸在庫データ取込対象日です') # V実消化データファイル受領チェック - logger.debug('V実消化データファイル受領チェック:開始') + _logger.debug('V実消化データファイル受領チェック:開始') try: # S3バケット上でV実消化データファイルの存在チェックをする _check_received_files() except BatchOperationException as e: - logger.error('受領したV実消化データファイルに欠落があります') + _logger.error('受領したV実消化データファイルに欠落があります') raise e - logger.debug('V実消化データファイル受領チェック:終了') + _logger.debug('V実消化データファイル受領チェック:終了') # データベース取込 - logger.debug('V実消化データ取込:開始') + _logger.debug('V実消化データ取込:開始') try: _import_file_to_db() except Exception as e: - logger.error(f'データベース登録失敗 {e}') - logger.debug('V実消化データ取込:終了') + _logger.error(f'データベース登録失敗 {e}') + raise e + + _logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py index 5a0dd54f..7ddbb766 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py @@ -1,60 +1,60 @@ -import csv -from io import TextIOWrapper +# import csv +# from io import TextIOWrapper -class VjskRecvFileManager: - layout_class: str - records: list[str] +# class VjskRecvFileManager: +# layout_class: str +# records: list[str] - def __init__(self, dat_line: list[str]) -> None: - self.layout_class = dat_line[0] - self.records = dat_line +# def __init__(self, dat_line: list[str]) -> None: +# self.layout_class = dat_line[0] +# self.records = dat_line -class VjskDatFile: - """V実消化データファイル""" +# class VjskDatFile: +# """V実消化データファイル""" - lines: list[VjskRecvFileManager] - success_count: int = 0 - error_count: int = 0 - total_count: int = 0 - __i: int = 0 +# lines: list[VjskRecvFileManager] +# success_count: int = 0 +# error_count: int = 0 +# total_count: int = 0 +# __i: int = 0 - def __iter__(self): - return self +# def __iter__(self): +# return self - def __next__(self) -> VjskRecvFileManager: - if self.__i == len(self.lines): - raise StopIteration() - line = self.lines[self.__i] - self.__i += 1 - return line +# def __next__(self) -> VjskRecvFileManager: +# if self.__i == len(self.lines): +# raise StopIteration() +# line = self.lines[self.__i] +# self.__i += 1 +# return line - def __init__(self, file: TextIOWrapper) -> None: - reader = csv.reader(file) - csv_rows = [VjskRecvFileManager(row) for row in reader] +# def __init__(self, file: TextIOWrapper) -> None: +# reader = csv.reader(file) +# csv_rows = [VjskRecvFileManager(row) for row in reader] - self.lines = csv_rows - self.total_count = len(csv_rows) +# self.lines = csv_rows +# self.total_count = len(csv_rows) - def count_up_success(self): - self.success_count += 1 +# def count_up_success(self): +# self.success_count += 1 - def count_up_error(self): - self.error_count += 1 +# def count_up_error(self): +# self.error_count += 1 - @classmethod - def file_open(cls, local_file_path: str): - """V実消化データファイルを読み込み、新しいインスタンスを作成する +# @classmethod +# def retrieve_from_file(cls, local_file_path: str): +# """V実消化データファイルを読み込み、新しいインスタンスを作成する - Args: - local_file_path (str): ローカルのファイルパス +# Args: +# local_file_path (str): ローカルのファイルパス - Returns: - VjskDatFile: このクラスのインスタンス - """ - # cp932(Shift-JIS Windows拡張)でファイルを読み込む - file = open(local_file_path, encoding='cp932') - instance = cls(file) - file.close() - return instance +# Returns: +# VjskDatFile: このクラスのインスタンス +# """ +# # cp932(Shift-JIS Windows拡張)でファイルを読み込む +# file = open(local_file_path, encoding='cp932') +# instance = cls(file) +# file.close() +# return instance diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index d89f5063..4aeea24f 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -168,8 +168,9 @@ class VjskRecvFileMapper: def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: ret = None filename = s3_file_path[s3_file_path.rfind("/") + 1:] - for element in self._VJSK_INTERFACE_MAPPING: + for condkey in self._VJSK_INTERFACE_MAPPING: + element = self._VJSK_INTERFACE_MAPPING.get(condkey) if filename.startswith(element.get(self._KEY_FILE_PREFIX)) and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): - ret = element + ret = condkey break return ret From 4226ad9db1dc9d07cb3798b4f6da5e28e1958e59 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 8 May 2023 18:13:23 +0900 Subject: [PATCH 04/86] =?UTF-8?q?org=E3=81=AE=E3=83=86=E3=83=BC=E3=83=96?= =?UTF-8?q?=E3=83=AB=E3=81=ABLOAD=20DATA=20LOCAL=20INFILE=E3=81=8C?= =?UTF-8?q?=E9=80=9A=E3=81=A3=E3=81=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 9 +++------ ecs/jskult-batch-daily/src/db/database.py | 2 +- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 9aa80070..af35a2aa 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -23,13 +23,10 @@ class JjskDataLoadManager: # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") - # load DATA local infileステートメント実行許可設定 - db.execute("SET GLOBAL local_infile=on;") - # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE '{src_file_name}' INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY \"'\" IGNORE 1 LINES;" - cnt = db.execute(sql) - logger.info(f'tsvデータをorgテーブルにLOAD : 件数({cnt})') + sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" + result = db.execute(sql, {"src_file_name": src_file_name}) + logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select # TODO: INTO句とSELECT句はmapperに持たせてcondkeyで引っ張ってくるようにしたい diff --git a/ecs/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-daily/src/db/database.py index f67a21b9..b67c3a3d 100644 --- a/ecs/jskult-batch-daily/src/db/database.py +++ b/ecs/jskult-batch-daily/src/db/database.py @@ -44,7 +44,7 @@ class Database: host=self.__host, port=self.__port, database=self.__schema, - query={"charset": "utf8mb4"} + query={"charset": "utf8mb4", "local_infile": "1"}, ) self.__engine = create_engine( From 0962172f4d7328757ec1977d28a3300bbdae9bab Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 8 May 2023 23:08:58 +0900 Subject: [PATCH 05/86] =?UTF-8?q?=E3=81=A8=E3=82=8A=E3=81=82=E3=81=88?= =?UTF-8?q?=E3=81=9A=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AA=E3=81=97=E3=81=A7?= =?UTF-8?q?=E9=80=9A=E3=81=9B=E3=82=8B=E7=A8=8B=E5=BA=A6=E3=81=AE=E4=BB=AE?= =?UTF-8?q?=E5=AE=9F=E8=A3=85=E5=AE=8C=E4=BA=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 16 +- .../src/batch/vjsk/vjsk_importer.py | 48 +- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 1349 ++++++++++++++++- 3 files changed, 1362 insertions(+), 51 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index af35a2aa..4b574dce 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -14,7 +14,7 @@ class JjskDataLoadManager: def _import_to_db(src_file_name: str, condkey: str): db = Database.get_instance() table_name_org = mapper.get_org_table(condkey) - table_name_src = mapper.get_src_table(condkey) + upsert_sql = mapper.get_upsert_sql(condkey) try: db.connect() # TODO:接続オプション local_infile = True が必要? @@ -24,13 +24,14 @@ class JjskDataLoadManager: db.execute(f"TRUNCATE TABLE {table_name_org};") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" + sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org}" \ + " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" result = db.execute(sql, {"src_file_name": src_file_name}) logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select - # TODO: INTO句とSELECT句はmapperに持たせてcondkeyで引っ張ってくるようにしたい - f"INSERT INTO {table_name_src} SELECT * FROM {table_name_org};" + result = db.execute(upsert_sql) + logger.info(f'orgテーブルをsrcテーブルにUPSERT : 件数({result.rowcount})') db.commit() except Exception as e: # TODO:DB例外だけキャッチしたい @@ -46,13 +47,8 @@ class JjskDataLoadManager: logger.debug(f'JjskDataLoadManager#load start target:{target}') # target : {"condkey": key, "src_file_path":local_file_path} - # データファイルオープン + # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 local_file_name = target["src_file_path"] - # dat_file = VjskDatFile.retrieve_from_file(local_file_name) - - # TODO: tsvファイルをload投入用のDMLに加工(システム日時つけたり、エンコードをUTF-8に変換したり) - # TODO: ファイルエンコード判定の参考 https://zenn.dev/takedato/articles/c3a491546f8c58 - # TODO: エンコード変換の参考 https://dev.classmethod.jp/articles/python-encoding/ # データベース登録 self._import_to_db(local_file_name, target["condkey"]) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 7db2983e..8b4b5197 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -114,41 +114,41 @@ def _import_file_to_db(): if batch_context.is_import_target_vjsk_stockslipdata: JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) - # # # ファイル存在確認 卸販売データ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + # DB登録 卸販売データ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) - # # # ファイル存在確認 卸組織変換マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + # DB登録 卸組織変換マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) - # # # ファイル存在確認 施設統合マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + # DB登録 施設統合マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) - # # # ファイル存在確認 卸マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + # DB登録 卸マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) - # # # ファイル存在確認 卸ホールディングスマスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + # DB登録 卸ホールディングスマスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) - # # # ファイル存在確認 施設マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + # DB登録 施設マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) - # # # ファイル存在確認 メーカー卸組織展開表 - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + # DB登録 メーカー卸組織展開表 + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) - # # # ファイル存在確認 取引区分マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + # DB登録 取引区分マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) - # # # ファイル存在確認 製品マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + # DB登録 製品マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) - # # # ファイル存在確認 製品価格マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + # DB登録 製品価格マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) - # # # ファイル存在確認 卸得意先情報マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + # DB登録 卸得意先情報マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) - # # # ファイル存在確認 MDBコード変換マスタ - # JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + # DB登録 MDBコード変換マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) _logger.debug('V実消化取込処理:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 4aeea24f..a6736e16 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -1,3 +1,6 @@ +import textwrap + + class VjskRecvFileMapper: CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ @@ -19,13 +22,274 @@ class VjskRecvFileMapper: _KEY_FILE_SUFFIX = "file_suffix" _KEY_ORG_TABLE = "org_table" _KEY_SRC_TABLE = "src_table" + _KEY_UPSERT_SQL = "upsert_sql" _VJSK_INTERFACE_MAPPING = { # 販売実績データ CONDKEY_SLIP_DATA: { _KEY_FILE_PREFIX: "slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.sales", - _KEY_SRC_TABLE: "src05.sales" + _KEY_SRC_TABLE: "src05.sales", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.sales ( + REC_DATA + ,REC_WHS_CD + ,REC_WHS_SUB_CD + ,REC_WHS_ORG_CD + ,REC_CUST_CD + ,REC_COMM_CD + ,REC_TRAN_KBN + ,REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK + ,REC_URAG_NUM + ,REC_QTY + ,REC_NONYU_PRICE + ,REC_NONYU_AMT + ,REC_COMM_NAME + ,REC_NONYU_FCL_NAME + ,FREE_ITEM + ,REC_NONYU_FCL_ADDR + ,REC_NONYU_FCL_POST + ,REC_NONYU_FCL_TEL + ,REC_BEF_HSDN_YMD + ,REC_BEF_SLIP_NUM + ,REC_YMD + ,SALE_DATA_CAT + ,SLIP_FILE_NAME + ,SLIP_MGT_NUM + ,ROW_NUM + ,HSDN_YMD + ,EXEC_DT + ,V_TRAN_CD + ,TRAN_KBN_NAME + ,WHS_ORG_CD + ,V_WHSORG_CD + ,WHS_ORG_NAME + ,WHS_ORG_KN + ,V_WHS_CD + ,WHS_NAME + ,NONYU_FCL_CD + ,V_INST_CD + ,V_INST_KN + ,V_INST_NAME + ,V_INST_ADDR + ,COMM_CD + ,COMM_NAME + ,NONYU_QTY + ,NONYU_PRICE + ,NONYU_AMT + ,SHIKIRI_PRICE + ,SHIKIRI_AMT + ,NHI_PRICE + ,NHI_AMT + ,WHSPOS_ERR_KBN + ,HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN + ,FCL_EXIS_KBN + ,BEF_HSDN_YMD + ,BEF_SLIP_NUM + ,SLIP_ORG_KBN + ,ERR_FLG1 + ,ERR_FLG2 + ,ERR_FLG3 + ,ERR_FLG4 + ,ERR_FLG5 + ,ERR_FLG6 + ,ERR_FLG7 + ,ERR_FLG8 + ,ERR_FLG9 + ,ERR_FLG10 + ,ERR_FLG11 + ,ERR_FLG12 + ,ERR_FLG13 + ,ERR_FLG14 + ,ERR_FLG15 + ,ERR_FLG16 + ,ERR_FLG17 + ,ERR_FLG18 + ,ERR_FLG19 + ,ERR_FLG20 + ,KJYO_YM + ,TKSNBK_KBN + ,FCL_EXEC_KBN + ,REC_STS_KBN + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.REC_DATA + ,t.REC_WHS_CD + ,t.REC_WHS_SUB_CD + ,t.REC_WHS_ORG_CD + ,t.REC_CUST_CD + ,t.REC_COMM_CD + ,t.REC_TRAN_KBN + ,t.REV_HSDNYMD_WRK + ,t.REV_HSDNYMD_SRK + ,t.REC_URAG_NUM + ,t.REC_QTY + ,t.REC_NONYU_PRICE + ,t.REC_NONYU_AMT + ,t.REC_COMM_NAME + ,t.REC_NONYU_FCL_NAME + ,t.FREE_ITEM + ,t.REC_NONYU_FCL_ADDR + ,t.REC_NONYU_FCL_POST + ,t.REC_NONYU_FCL_TEL + ,t.REC_BEF_HSDN_YMD + ,t.REC_BEF_SLIP_NUM + ,t.REC_YMD + ,t.SALE_DATA_CAT + ,t.SLIP_FILE_NAME + ,t.SLIP_MGT_NUM + ,t.ROW_NUM + ,t.HSDN_YMD + ,t.EXEC_DT + ,t.V_TRAN_CD + ,t.TRAN_KBN_NAME + ,t.WHS_ORG_CD + ,t.V_WHSORG_CD + ,t.WHS_ORG_NAME + ,t.WHS_ORG_KN + ,t.V_WHS_CD + ,t.WHS_NAME + ,t.NONYU_FCL_CD + ,t.V_INST_CD + ,t.V_INST_KN + ,t.V_INST_NAME + ,t.V_INST_ADDR + ,t.COMM_CD + ,t.COMM_NAME + ,t.NONYU_QTY + ,t.NONYU_PRICE + ,t.NONYU_AMT + ,t.SHIKIRI_PRICE + ,t.SHIKIRI_AMT + ,t.NHI_PRICE + ,t.NHI_AMT + ,t.WHSPOS_ERR_KBN + ,t.HTDNYMD_ERR_KBN + ,t.PRD_EXIS_KBN + ,t.FCL_EXIS_KBN + ,t.BEF_HSDN_YMD + ,t.BEF_SLIP_NUM + ,t.SLIP_ORG_KBN + ,t.ERR_FLG1 + ,t.ERR_FLG2 + ,t.ERR_FLG3 + ,t.ERR_FLG4 + ,t.ERR_FLG5 + ,t.ERR_FLG6 + ,t.ERR_FLG7 + ,t.ERR_FLG8 + ,t.ERR_FLG9 + ,t.ERR_FLG10 + ,t.ERR_FLG11 + ,t.ERR_FLG12 + ,t.ERR_FLG13 + ,t.ERR_FLG14 + ,t.ERR_FLG15 + ,t.ERR_FLG16 + ,t.ERR_FLG17 + ,t.ERR_FLG18 + ,t.ERR_FLG19 + ,t.ERR_FLG20 + ,t.KJYO_YM + ,t.TKSNBK_KBN + ,t.FCL_EXEC_KBN + ,t.REC_STS_KBN + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.sales AS t + ON DUPLICATE KEY UPDATE + REC_DATA=t.REC_DATA + ,REC_WHS_CD=t.REC_WHS_CD + ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD + ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD + ,REC_CUST_CD=t.REC_CUST_CD + ,REC_COMM_CD=t.REC_COMM_CD + ,REC_TRAN_KBN=t.REC_TRAN_KBN + ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK + ,REC_URAG_NUM=t.REC_URAG_NUM + ,REC_QTY=t.REC_QTY + ,REC_NONYU_PRICE=t.REC_NONYU_PRICE + ,REC_NONYU_AMT=t.REC_NONYU_AMT + ,REC_COMM_NAME=t.REC_COMM_NAME + ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME + ,FREE_ITEM=t.FREE_ITEM + ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR + ,REC_NONYU_FCL_POST=t.REC_NONYU_FCL_POST + ,REC_NONYU_FCL_TEL=t.REC_NONYU_FCL_TEL + ,REC_BEF_HSDN_YMD=t.REC_BEF_HSDN_YMD + ,REC_BEF_SLIP_NUM=t.REC_BEF_SLIP_NUM + ,REC_YMD=t.REC_YMD + ,SALE_DATA_CAT=t.SALE_DATA_CAT + ,SLIP_FILE_NAME=t.SLIP_FILE_NAME + ,SLIP_MGT_NUM=t.SLIP_MGT_NUM + ,ROW_NUM=t.ROW_NUM + ,HSDN_YMD=t.HSDN_YMD + ,EXEC_DT=t.EXEC_DT + ,V_TRAN_CD=t.V_TRAN_CD + ,TRAN_KBN_NAME=t.TRAN_KBN_NAME + ,WHS_ORG_CD=t.WHS_ORG_CD + ,V_WHSORG_CD=t.V_WHSORG_CD + ,WHS_ORG_NAME=t.WHS_ORG_NAME + ,WHS_ORG_KN=t.WHS_ORG_KN + ,V_WHS_CD=t.V_WHS_CD + ,WHS_NAME=t.WHS_NAME + ,NONYU_FCL_CD=t.NONYU_FCL_CD + ,V_INST_CD=t.V_INST_CD + ,V_INST_KN=t.V_INST_KN + ,V_INST_NAME=t.V_INST_NAME + ,V_INST_ADDR=t.V_INST_ADDR + ,COMM_CD=t.COMM_CD + ,COMM_NAME=t.COMM_NAME + ,NONYU_QTY=t.NONYU_QTY + ,NONYU_PRICE=t.NONYU_PRICE + ,NONYU_AMT=t.NONYU_AMT + ,SHIKIRI_PRICE=t.SHIKIRI_PRICE + ,SHIKIRI_AMT=t.SHIKIRI_AMT + ,NHI_PRICE=t.NHI_PRICE + ,NHI_AMT=t.NHI_AMT + ,WHSPOS_ERR_KBN=t.WHSPOS_ERR_KBN + ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN=t.PRD_EXIS_KBN + ,FCL_EXIS_KBN=t.FCL_EXIS_KBN + ,BEF_HSDN_YMD=t.BEF_HSDN_YMD + ,BEF_SLIP_NUM=t.BEF_SLIP_NUM + ,SLIP_ORG_KBN=t.SLIP_ORG_KBN + ,ERR_FLG1=t.ERR_FLG1 + ,ERR_FLG2=t.ERR_FLG2 + ,ERR_FLG3=t.ERR_FLG3 + ,ERR_FLG4=t.ERR_FLG4 + ,ERR_FLG5=t.ERR_FLG5 + ,ERR_FLG6=t.ERR_FLG6 + ,ERR_FLG7=t.ERR_FLG7 + ,ERR_FLG8=t.ERR_FLG8 + ,ERR_FLG9=t.ERR_FLG9 + ,ERR_FLG10=t.ERR_FLG10 + ,ERR_FLG11=t.ERR_FLG11 + ,ERR_FLG12=t.ERR_FLG12 + ,ERR_FLG13=t.ERR_FLG13 + ,ERR_FLG14=t.ERR_FLG14 + ,ERR_FLG15=t.ERR_FLG15 + ,ERR_FLG16=t.ERR_FLG16 + ,ERR_FLG17=t.ERR_FLG17 + ,ERR_FLG18=t.ERR_FLG18 + ,ERR_FLG19=t.ERR_FLG19 + ,ERR_FLG20=t.ERR_FLG20 + ,KJYO_YM=t.KJYO_YM + ,TKSNBK_KBN=t.TKSNBK_KBN + ,FCL_EXEC_KBN=t.FCL_EXEC_KBN + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸ホールディングスマスタ @@ -33,7 +297,50 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "hld_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.hld_mst_v", - _KEY_SRC_TABLE: "src05.hld_mst_v" + _KEY_SRC_TABLE: "src05.hld_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.hld_mst_v ( + V_HLD_CD + ,SUB_NUM + ,NAME + ,KN_NAME + ,ABB_NAME + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_HLD_CD + ,t.SUB_NUM + ,t.NAME + ,t.KN_NAME + ,t.ABB_NAME + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() FROM org05.hld_mst_v AS t + ON DUPLICATE KEY UPDATE + V_HLD_CD=t.V_HLD_CD + ,SUB_NUM=t.SUB_NUM + ,NAME=t.NAME + ,KN_NAME=t.KN_NAME + ,ABB_NAME=t.ABB_NAME + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸マスタ @@ -41,7 +348,66 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "whs_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_mst_v", - _KEY_SRC_TABLE: "src05.whs_mst_v" + _KEY_SRC_TABLE: "src05.whs_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whs_mst_v ( + V_WHS_CD + ,SUB_NUM + ,NAME + ,KN_NAME + ,ABB_NAME + ,POSTAL_CD + ,ADDR + ,KN_ADDR + ,TEL_NUM + ,V_HLD_CD + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_WHS_CD + ,t.SUB_NUM + ,t.NAME + ,t.KN_NAME + ,t.ABB_NAME + ,t.POSTAL_CD + ,t.ADDR + ,t.KN_ADDR + ,t.TEL_NUM + ,t.V_HLD_CD + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.whs_mst_v AS t + ON DUPLICATE KEY UPDATE + V_WHS_CD=t.V_WHS_CD + ,SUB_NUM=t.SUB_NUM + ,NAME=t.NAME + ,KN_NAME=t.KN_NAME + ,ABB_NAME=t.ABB_NAME + ,POSTAL_CD=t.POSTAL_CD + ,ADDR=t.ADDR + ,KN_ADDR=t.KN_ADDR + ,TEL_NUM=t.TEL_NUM + ,V_HLD_CD=t.V_HLD_CD + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # Vメーカー卸組織展開表 @@ -49,7 +415,156 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "mkr_org_horizon_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", - _KEY_SRC_TABLE: "src05.mkr_org_horizon_v" + _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.mkr_org_horizon_v ( + VID_KIND_1 + ,V_CD_1 + ,NAME_1 + ,DSP_ODR_1 + ,VID_KIND_2 + ,V_CD_2 + ,NAME_2 + ,DSP_ODR_2 + ,VID_KIND_3 + ,V_CD_3 + ,NAME_3 + ,DSP_ODR_3 + ,VID_KIND_4 + ,V_CD_4 + ,NAME_4 + ,DSP_ODR_4 + ,VID_KIND_5 + ,V_CD_5 + ,NAME_5 + ,DSP_ODR_5 + ,VID_KIND_6 + ,V_CD_6 + ,NAME_6 + ,DSP_ODR_6 + ,VID_KIND_7 + ,V_CD_7 + ,NAME_7 + ,DSP_ODR_7 + ,VID_KIND_8 + ,V_CD_8 + ,NAME_8 + ,DSP_ODR_8 + ,VID_KIND_9 + ,V_CD_9 + ,NAME_9 + ,DSP_ODR_9 + ,VID_KIND_10 + ,V_CD_10 + ,NAME_10 + ,DSP_ODR_10 + ,V_WHS_CD + ,START_DATE + ,END_DATE + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.VID_KIND_1 + ,t.V_CD_1 + ,t.NAME_1 + ,t.DSP_ODR_1 + ,t.VID_KIND_2 + ,t.V_CD_2 + ,t.NAME_2 + ,t.DSP_ODR_2 + ,t.VID_KIND_3 + ,t.V_CD_3 + ,t.NAME_3 + ,t.DSP_ODR_3 + ,t.VID_KIND_4 + ,t.V_CD_4 + ,t.NAME_4 + ,t.DSP_ODR_4 + ,t.VID_KIND_5 + ,t.V_CD_5 + ,t.NAME_5 + ,t.DSP_ODR_5 + ,t.VID_KIND_6 + ,t.V_CD_6 + ,t.NAME_6 + ,t.DSP_ODR_6 + ,t.VID_KIND_7 + ,t.V_CD_7 + ,t.NAME_7 + ,t.DSP_ODR_7 + ,t.VID_KIND_8 + ,t.V_CD_8 + ,t.NAME_8 + ,t.DSP_ODR_8 + ,t.VID_KIND_9 + ,t.V_CD_9 + ,t.NAME_9 + ,t.DSP_ODR_9 + ,t.VID_KIND_10 + ,t.V_CD_10 + ,t.NAME_10 + ,t.DSP_ODR_10 + ,t.V_WHS_CD + ,t.START_DATE + ,t.END_DATE + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.mkr_org_horizon_v AS t + ON DUPLICATE KEY UPDATE + VID_KIND_1=t.VID_KIND_1 + ,V_CD_1=t.V_CD_1 + ,NAME_1=t.NAME_1 + ,DSP_ODR_1=t.DSP_ODR_1 + ,VID_KIND_2=t.VID_KIND_2 + ,V_CD_2=t.V_CD_2 + ,NAME_2=t.NAME_2 + ,DSP_ODR_2=t.DSP_ODR_2 + ,VID_KIND_3=t.VID_KIND_3 + ,V_CD_3=t.V_CD_3 + ,NAME_3=t.NAME_3 + ,DSP_ODR_3=t.DSP_ODR_3 + ,VID_KIND_4=t.VID_KIND_4 + ,V_CD_4=t.V_CD_4 + ,NAME_4=t.NAME_4 + ,DSP_ODR_4=t.DSP_ODR_4 + ,VID_KIND_5=t.VID_KIND_5 + ,V_CD_5=t.V_CD_5 + ,NAME_5=t.NAME_5 + ,DSP_ODR_5=t.DSP_ODR_5 + ,VID_KIND_6=t.VID_KIND_6 + ,V_CD_6=t.V_CD_6 + ,NAME_6=t.NAME_6 + ,DSP_ODR_6=t.DSP_ODR_6 + ,VID_KIND_7=t.VID_KIND_7 + ,V_CD_7=t.V_CD_7 + ,NAME_7=t.NAME_7 + ,DSP_ODR_7=t.DSP_ODR_7 + ,VID_KIND_8=t.VID_KIND_8 + ,V_CD_8=t.V_CD_8 + ,NAME_8=t.NAME_8 + ,DSP_ODR_8=t.DSP_ODR_8 + ,VID_KIND_9=t.VID_KIND_9 + ,V_CD_9=t.V_CD_9 + ,NAME_9=t.NAME_9 + ,DSP_ODR_9=t.DSP_ODR_9 + ,VID_KIND_10=t.VID_KIND_10 + ,V_CD_10=t.V_CD_10 + ,NAME_10=t.NAME_10 + ,DSP_ODR_10=t.DSP_ODR_10 + ,V_WHS_CD=t.V_WHS_CD + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸組織変換マスタ @@ -57,7 +572,51 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "org_cnv_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", - _KEY_SRC_TABLE: "src05.org_cnv_mst_v" + _KEY_SRC_TABLE: "src05.org_cnv_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.org_cnv_mst_v ( + WHS_CD + ,WHS_SUB_CD + ,ORG_CD + ,SUB_NUM + ,V_ORG_CD + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.WHS_CD + ,t.WHS_SUB_CD + ,t.ORG_CD + ,t.SUB_NUM + ,t.V_ORG_CD + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.org_cnv_mst_v AS t + ON DUPLICATE KEY UPDATE + WHS_CD=t.WHS_CD + ,WHS_SUB_CD=t.WHS_SUB_CD + ,ORG_CD=t.ORG_CD + ,SUB_NUM=t.SUB_NUM + ,V_ORG_CD=t.V_ORG_CD + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V取引区分マスタ @@ -65,7 +624,45 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "tran_kbn_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", - _KEY_SRC_TABLE: "src05.tran_kbn_mst_v" + _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.tran_kbn_mst_v ( + V_TRAN_CD + ,SUB_NUM + ,NAME + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_TRAN_CD + ,t.SUB_NUM + ,t.NAME + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.tran_kbn_mst_v AS t + ON DUPLICATE KEY UPDATE + V_TRAN_CD=t.V_TRAN_CD + ,SUB_NUM=t.SUB_NUM + ,NAME=t.NAME + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V施設マスタ @@ -73,7 +670,90 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "fcl_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.fcl_mst_v", - _KEY_SRC_TABLE: "src05.fcl_mst_v" + _KEY_SRC_TABLE: "src05.fcl_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.fcl_mst_v ( + V_INST_CD + ,SUB_NUM + ,START_DATE + ,END_DATE + ,CLOSED_DT + ,FCL_NAME + ,FCL_KN_NAME + ,FCL_ABB_NAME + ,FCL_ABB_KN_NAME + ,MKR_CD + ,JSK_PROC_KBN + ,FMT_ADDR + ,FMT_KN_ADDR + ,POSTAL_CD + ,PRFT_CD + ,PRFT_NAME + ,CITY_NAME + ,ADDR_LINE_1 + ,TEL_NUM + ,ADMIN_KBN + ,FCL_TYPE + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.V_INST_CD + ,t.SUB_NUM + ,t.START_DATE + ,t.END_DATE + ,t.CLOSED_DT + ,t.FCL_NAME + ,t.FCL_KN_NAME + ,t.FCL_ABB_NAME + ,t.FCL_ABB_KN_NAME + ,t.MKR_CD + ,t.JSK_PROC_KBN + ,t.FMT_ADDR + ,t.FMT_KN_ADDR + ,t.POSTAL_CD + ,t.PRFT_CD + ,t.PRFT_NAME + ,t.CITY_NAME + ,t.ADDR_LINE_1 + ,t.TEL_NUM + ,t.ADMIN_KBN + ,t.FCL_TYPE + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.fcl_mst_v AS t + ON DUPLICATE KEY UPDATE + V_INST_CD=t.V_INST_CD + ,SUB_NUM=t.SUB_NUM + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,CLOSED_DT=t.CLOSED_DT + ,FCL_NAME=t.FCL_NAME + ,FCL_KN_NAME=t.FCL_KN_NAME + ,FCL_ABB_NAME=t.FCL_ABB_NAME + ,FCL_ABB_KN_NAME=t.FCL_ABB_KN_NAME + ,MKR_CD=t.MKR_CD + ,JSK_PROC_KBN=t.JSK_PROC_KBN + ,FMT_ADDR=t.FMT_ADDR + ,FMT_KN_ADDR=t.FMT_KN_ADDR + ,POSTAL_CD=t.POSTAL_CD + ,PRFT_CD=t.PRFT_CD + ,PRFT_NAME=t.PRFT_NAME + ,CITY_NAME=t.CITY_NAME + ,ADDR_LINE_1=t.ADDR_LINE_1 + ,TEL_NUM=t.TEL_NUM + ,ADMIN_KBN=t.ADMIN_KBN + ,FCL_TYPE=t.FCL_TYPE + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V製品マスタ @@ -81,7 +761,102 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "phm_prd_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", - _KEY_SRC_TABLE: "src05.phm_prd_mst_v" + _KEY_SRC_TABLE: "src05.phm_prd_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.phm_prd_mst_v ( + PRD_CD + ,SUB_NUM + ,PRD_NAME + ,PRD_E_NAME + ,MKR_CD + ,MKR_INF_1 + ,MKR_INF_2 + ,PHM_ITM_CD + ,ITM_NAME + ,ITM_ABB_NAME + ,FORM_CD + ,FORM_NAME + ,VOL_CD + ,VOL_NAME + ,CONT_CD + ,CONT_NAME + ,PKG_CD + ,PKG_NAME + ,CNV_NUM + ,JSK_START_DT + ,PRD_SALE_KBN + ,JSK_PROC_KBN + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.PRD_CD + ,t.SUB_NUM + ,t.PRD_NAME + ,t.PRD_E_NAME + ,t.MKR_CD + ,t.MKR_INF_1 + ,t.MKR_INF_2 + ,t.PHM_ITM_CD + ,t.ITM_NAME + ,t.ITM_ABB_NAME + ,t.FORM_CD + ,t.FORM_NAME + ,t.VOL_CD + ,t.VOL_NAME + ,t.CONT_CD + ,t.CONT_NAME + ,t.PKG_CD + ,t.PKG_NAME + ,t.CNV_NUM + ,t.JSK_START_DT + ,t.PRD_SALE_KBN + ,t.JSK_PROC_KBN + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.phm_prd_mst_v AS t + ON DUPLICATE KEY UPDATE + PRD_CD=t.PRD_CD + ,SUB_NUM=t.SUB_NUM + ,PRD_NAME=t.PRD_NAME + ,PRD_E_NAME=t.PRD_E_NAME + ,MKR_CD=t.MKR_CD + ,MKR_INF_1=t.MKR_INF_1 + ,MKR_INF_2=t.MKR_INF_2 + ,PHM_ITM_CD=t.PHM_ITM_CD + ,ITM_NAME=t.ITM_NAME + ,ITM_ABB_NAME=t.ITM_ABB_NAME + ,FORM_CD=t.FORM_CD + ,FORM_NAME=t.FORM_NAME + ,VOL_CD=t.VOL_CD + ,VOL_NAME=t.VOL_NAME + ,CONT_CD=t.CONT_CD + ,CONT_NAME=t.CONT_NAME + ,PKG_CD=t.PKG_CD + ,PKG_NAME=t.PKG_NAME + ,CNV_NUM=t.CNV_NUM + ,JSK_START_DT=t.JSK_START_DT + ,PRD_SALE_KBN=t.PRD_SALE_KBN + ,JSK_PROC_KBN=t.JSK_PROC_KBN + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V製品価格マスタ @@ -89,7 +864,48 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "phm_price_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_price_mst_v", - _KEY_SRC_TABLE: "src05.phm_price_mst_v" + _KEY_SRC_TABLE: "src05.phm_price_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.phm_price_mst_v ( + PHM_PRD_CD + ,PHM_PRICE_KIND + ,SUB_NUM + ,PRICE + ,START_DATE + ,END_DATE + ,DSP_ODR + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.PHM_PRD_CD + ,t.PHM_PRICE_KIND + ,t.SUB_NUM + ,t.PRICE + ,t.START_DATE + ,t.END_DATE + ,t.DSP_ODR + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.phm_price_mst_v AS t + ON DUPLICATE KEY UPDATE + PHM_PRD_CD=t.PHM_PRD_CD + ,PHM_PRICE_KIND=t.PHM_PRICE_KIND + ,SUB_NUM=t.SUB_NUM + ,PRICE=t.PRICE + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,DSP_ODR=t.DSP_ODR + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V施設統合マスタ @@ -97,7 +913,30 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "vop_hco_merge_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", - _KEY_SRC_TABLE: "src05.vop_hco_merge_v" + _KEY_SRC_TABLE: "src05.vop_hco_merge_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.vop_hco_merge_v ( + V_INST_CD + ,V_INST_CD_MERG + ,APPLY_DT + ,MERGE_REASON + ,DWH_UPD_DT + ) + SELECT + t.V_INST_CD + ,t.V_INST_CD_MERG + ,t.APPLY_DT + ,t.MERGE_REASON + ,SYSDATE() + FROM org05.vop_hco_merge_v AS t + ON DUPLICATE KEY UPDATE + V_INST_CD=t.V_INST_CD + ,V_INST_CD_MERG=t.V_INST_CD_MERG + ,APPLY_DT=t.APPLY_DT + ,MERGE_REASON=t.MERGE_REASON + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # V卸得意先情報マスタ @@ -105,15 +944,112 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "whs_customer_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", - _KEY_SRC_TABLE: "src05.whs_customer_mst_v" + _KEY_SRC_TABLE: "src05.whs_customer_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whs_customer_mst_v ( + WHS_CD + ,WHS_SUB_CD + ,CUSTOMER_CD + ,SUB_NUM + ,START_DATE + ,END_DATE + ,WHS_ORG_CD + ,SRC_ORG_CD + ,NAME + ,KN_NAME + ,ADDR + ,KN_ADDR + ,POSTAL_CD + ,TEL_NUM + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.WHS_CD + ,t.WHS_SUB_CD + ,t.CUSTOMER_CD + ,t.SUB_NUM + ,t.START_DATE + ,t.END_DATE + ,t.WHS_ORG_CD + ,t.SRC_ORG_CD + ,t.NAME + ,t.KN_NAME + ,t.ADDR + ,t.KN_ADDR + ,t.POSTAL_CD + ,t.TEL_NUM + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.whs_customer_mst_v AS t + ON DUPLICATE KEY UPDATE + WHS_CD=t.WHS_CD + ,WHS_SUB_CD=t.WHS_SUB_CD + ,CUSTOMER_CD=t.CUSTOMER_CD + ,SUB_NUM=t.SUB_NUM + ,START_DATE=t.START_DATE + ,END_DATE=t.END_DATE + ,WHS_ORG_CD=t.WHS_ORG_CD + ,SRC_ORG_CD=t.SRC_ORG_CD + ,NAME=t.NAME + ,KN_NAME=t.KN_NAME + ,ADDR=t.ADDR + ,KN_ADDR=t.KN_ADDR + ,POSTAL_CD=t.POSTAL_CD + ,TEL_NUM=t.TEL_NUM + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # MDBコード変換表 CONDKEY_MDB_CONV_MST: { _KEY_FILE_PREFIX: "mdb_conv_mst_", _KEY_FILE_SUFFIX: ".tsv", - _KEY_ORG_TABLE: "org05.mdb_conv_mst_v", - _KEY_SRC_TABLE: "src05.mdb_conv_mst_v" + _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", + _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.mdb_cnv_mst_v ( + HCO_VID_V + ,SUB_NUM + ,MDB_CD + ,RELIABILITY + ,START_DATE + ,REC_STS_KBN + ,INS_DT + ,UPD_DT + ,DWH_UPD_DT + ) + SELECT + t.HCO_VID_V + ,t.SUB_NUM + ,t.MDB_CD + ,t.RELIABILITY + ,t.START_DATE + ,t.REC_STS_KBN + ,t.INS_DT + ,t.UPD_DT + ,SYSDATE() + FROM org05.mdb_cnv_mst_v AS t + ON DUPLICATE KEY UPDATE + HCO_VID_V=t.HCO_VID_V + ,SUB_NUM=t.SUB_NUM + ,MDB_CD=t.MDB_CD + ,RELIABILITY=t.RELIABILITY + ,START_DATE=t.START_DATE + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,UPD_DT=t.UPD_DT + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # 卸在庫データ @@ -121,7 +1057,105 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "stock_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whole_stock", - _KEY_SRC_TABLE: "src05.whole_stock" + _KEY_SRC_TABLE: "src05.whole_stock", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whole_stock ( + REC_DATA + ,REC_WHS_CD + ,REC_WHS_SUB_CD + ,REC_STO_PLACE + ,REC_STOCK_YMD + ,REC_COMM_CD + ,REC_QTY + ,REC_STOCK_NO_SIGN + ,REC_JAN_CD + ,FREE_ITEM + ,REC_YMD + ,SALE_DATA_CAT + ,SLIP_FILE_NAME + ,SLIP_MGT_NUM + ,ROW_NUM + ,EXEC_DT + ,ERR_FLG1 + ,ERR_FLG2 + ,ERR_FLG3 + ,ERR_FLG4 + ,ERR_FLG5 + ,ERR_FLG6 + ,ERR_FLG7 + ,ERR_FLG8 + ,ERR_FLG9 + ,ERR_FLG10 + ,REC_STS_KBN + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.REC_DATA + ,t.REC_WHS_CD + ,t.REC_WHS_SUB_CD + ,t.REC_STO_PLACE + ,t.REC_STOCK_YMD + ,t.REC_COMM_CD + ,t.REC_QTY + ,t.REC_STOCK_NO_SIGN + ,t.REC_JAN_CD + ,t.FREE_ITEM + ,t.REC_YMD + ,t.SALE_DATA_CAT + ,t.SLIP_FILE_NAME + ,t.SLIP_MGT_NUM + ,t.ROW_NUM + ,t.EXEC_DT + ,t.ERR_FLG1 + ,t.ERR_FLG2 + ,t.ERR_FLG3 + ,t.ERR_FLG4 + ,t.ERR_FLG5 + ,t.ERR_FLG6 + ,t.ERR_FLG7 + ,t.ERR_FLG8 + ,t.ERR_FLG9 + ,t.ERR_FLG10 + ,t.REC_STS_KBN + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.whole_stock AS t + ON DUPLICATE KEY UPDATE + REC_DATA=t.REC_DATA + ,REC_WHS_CD=t.REC_WHS_CD + ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD + ,REC_STO_PLACE=t.REC_STO_PLACE + ,REC_STOCK_YMD=t.REC_STOCK_YMD + ,REC_COMM_CD=t.REC_COMM_CD + ,REC_QTY=t.REC_QTY + ,REC_STOCK_NO_SIGN=t.REC_STOCK_NO_SIGN + ,REC_JAN_CD=t.REC_JAN_CD + ,FREE_ITEM=t.FREE_ITEM + ,REC_YMD=t.REC_YMD + ,SALE_DATA_CAT=t.SALE_DATA_CAT + ,SLIP_FILE_NAME=t.SLIP_FILE_NAME + ,SLIP_MGT_NUM=t.SLIP_MGT_NUM + ,ROW_NUM=t.ROW_NUM + ,EXEC_DT=t.EXEC_DT + ,ERR_FLG1=t.ERR_FLG1 + ,ERR_FLG2=t.ERR_FLG2 + ,ERR_FLG3=t.ERR_FLG3 + ,ERR_FLG4=t.ERR_FLG4 + ,ERR_FLG5=t.ERR_FLG5 + ,ERR_FLG6=t.ERR_FLG6 + ,ERR_FLG7=t.ERR_FLG7 + ,ERR_FLG8=t.ERR_FLG8 + ,ERR_FLG9=t.ERR_FLG9 + ,ERR_FLG10=t.ERR_FLG10 + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # 生物由来データ @@ -129,7 +1163,252 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "bio_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.bio_sales", - _KEY_SRC_TABLE: "src05.bio_sales" + _KEY_SRC_TABLE: "src05.bio_sales", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.bio_sales ( + REC_DATA + ,REC_WHS_CD + ,REC_WHS_SUB_CD + ,REC_WHS_ORG_CD + ,REC_CUST_CD + ,REC_COMM_CD + ,REC_TRAN_KBN + ,REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK + ,REC_URAG_NUM + ,REC_COMM_NAME + ,REC_NONYU_FCL_NAME + ,REC_NONYU_FCL_ADDR + ,REC_LOT_NUM1 + ,REC_QTY1 + ,REC_LOT_NUM2 + ,REC_QTY2 + ,REC_LOT_NUM3 + ,REC_QTY3 + ,REC_YMD + ,SALE_DATA_CAT + ,SLIP_FILE_NAME + ,SLIP_MGT_NUM + ,ROW_NUM + ,HSDN_YMD + ,EXEC_DT + ,V_TRAN_CD + ,TRAN_KBN_NAME + ,WHS_ORG_CD + ,V_WHSORG_CD + ,WHS_ORG_NAME + ,WHS_ORG_KN + ,V_WHS_CD + ,WHS_NAME + ,NONYU_FCL_CD + ,V_INST_CD + ,V_INST_NAME + ,V_INST_KN + ,V_INST_ADDR + ,COMM_CD + ,PRODUCT_NAME + ,HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN + ,FCL_EXIS_KBN + ,QTY1 + ,QTY2 + ,QTY3 + ,SLIP_ORG_KBN + ,BEF_SLIP_MGT_NUM + ,WHS_REP_COMM_NAME + ,WHS_REP_NONYU_FCL_NAME + ,WHS_REP_NONYU_FCL_ADDR + ,ERR_FLG1 + ,ERR_FLG2 + ,ERR_FLG3 + ,ERR_FLG4 + ,ERR_FLG5 + ,ERR_FLG6 + ,ERR_FLG7 + ,ERR_FLG8 + ,ERR_FLG9 + ,ERR_FLG10 + ,ERR_FLG11 + ,ERR_FLG12 + ,ERR_FLG13 + ,ERR_FLG14 + ,ERR_FLG15 + ,ERR_FLG16 + ,ERR_FLG17 + ,ERR_FLG18 + ,ERR_FLG19 + ,ERR_FLG20 + ,KJYO_YM + ,TKSNBK_KBN + ,FCL_EXEC_KBN + ,REC_STS_KBN + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.REC_DATA + ,t.REC_WHS_CD + ,t.REC_WHS_SUB_CD + ,t.REC_WHS_ORG_CD + ,t.REC_CUST_CD + ,t.REC_COMM_CD + ,t.REC_TRAN_KBN + ,t.REV_HSDNYMD_WRK + ,t.REV_HSDNYMD_SRK + ,t.REC_URAG_NUM + ,t.REC_COMM_NAME + ,t.REC_NONYU_FCL_NAME + ,t.REC_NONYU_FCL_ADDR + ,t.REC_LOT_NUM1 + ,t.REC_QTY1 + ,t.REC_LOT_NUM2 + ,t.REC_QTY2 + ,t.REC_LOT_NUM3 + ,t.REC_QTY3 + ,t.REC_YMD + ,t.SALE_DATA_CAT + ,t.SLIP_FILE_NAME + ,t.SLIP_MGT_NUM + ,t.ROW_NUM + ,t.HSDN_YMD + ,t.EXEC_DT + ,t.V_TRAN_CD + ,t.TRAN_KBN_NAME + ,t.WHS_ORG_CD + ,t.V_WHSORG_CD + ,t.WHS_ORG_NAME + ,t.WHS_ORG_KN + ,t.V_WHS_CD + ,t.WHS_NAME + ,t.NONYU_FCL_CD + ,t.V_INST_CD + ,t.V_INST_NAME + ,t.V_INST_KN + ,t.V_INST_ADDR + ,t.COMM_CD + ,t.PRODUCT_NAME + ,t.HTDNYMD_ERR_KBN + ,t.PRD_EXIS_KBN + ,t.FCL_EXIS_KBN + ,t.QTY1 + ,t.QTY2 + ,t.QTY3 + ,t.SLIP_ORG_KBN + ,t.BEF_SLIP_MGT_NUM + ,t.WHS_REP_COMM_NAME + ,t.WHS_REP_NONYU_FCL_NAME + ,t.WHS_REP_NONYU_FCL_ADDR + ,t.ERR_FLG1 + ,t.ERR_FLG2 + ,t.ERR_FLG3 + ,t.ERR_FLG4 + ,t.ERR_FLG5 + ,t.ERR_FLG6 + ,t.ERR_FLG7 + ,t.ERR_FLG8 + ,t.ERR_FLG9 + ,t.ERR_FLG10 + ,t.ERR_FLG11 + ,t.ERR_FLG12 + ,t.ERR_FLG13 + ,t.ERR_FLG14 + ,t.ERR_FLG15 + ,t.ERR_FLG16 + ,t.ERR_FLG17 + ,t.ERR_FLG18 + ,t.ERR_FLG19 + ,t.ERR_FLG20 + ,t.KJYO_YM + ,t.TKSNBK_KBN + ,t.FCL_EXEC_KBN + ,t.REC_STS_KBN + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.bio_sales AS t + ON DUPLICATE KEY UPDATE + REC_DATA=t.REC_DATA + ,REC_WHS_CD=t.REC_WHS_CD + ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD + ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD + ,REC_CUST_CD=t.REC_CUST_CD + ,REC_COMM_CD=t.REC_COMM_CD + ,REC_TRAN_KBN=t.REC_TRAN_KBN + ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK + ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK + ,REC_URAG_NUM=t.REC_URAG_NUM + ,REC_COMM_NAME=t.REC_COMM_NAME + ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME + ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR + ,REC_LOT_NUM1=t.REC_LOT_NUM1 + ,REC_QTY1=t.REC_QTY1 + ,REC_LOT_NUM2=t.REC_LOT_NUM2 + ,REC_QTY2=t.REC_QTY2 + ,REC_LOT_NUM3=t.REC_LOT_NUM3 + ,REC_QTY3=t.REC_QTY3 + ,REC_YMD=t.REC_YMD + ,SALE_DATA_CAT=t.SALE_DATA_CAT + ,SLIP_FILE_NAME=t.SLIP_FILE_NAME + ,SLIP_MGT_NUM=t.SLIP_MGT_NUM + ,ROW_NUM=t.ROW_NUM + ,HSDN_YMD=t.HSDN_YMD + ,EXEC_DT=t.EXEC_DT + ,V_TRAN_CD=t.V_TRAN_CD + ,TRAN_KBN_NAME=t.TRAN_KBN_NAME + ,WHS_ORG_CD=t.WHS_ORG_CD + ,V_WHSORG_CD=t.V_WHSORG_CD + ,WHS_ORG_NAME=t.WHS_ORG_NAME + ,WHS_ORG_KN=t.WHS_ORG_KN + ,V_WHS_CD=t.V_WHS_CD + ,WHS_NAME=t.WHS_NAME + ,NONYU_FCL_CD=t.NONYU_FCL_CD + ,V_INST_CD=t.V_INST_CD + ,V_INST_NAME=t.V_INST_NAME + ,V_INST_KN=t.V_INST_KN + ,V_INST_ADDR=t.V_INST_ADDR + ,COMM_CD=t.COMM_CD + ,PRODUCT_NAME=t.PRODUCT_NAME + ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN + ,PRD_EXIS_KBN=t.PRD_EXIS_KBN + ,FCL_EXIS_KBN=t.FCL_EXIS_KBN + ,QTY1=t.QTY1 + ,QTY2=t.QTY2 + ,QTY3=t.QTY3 + ,SLIP_ORG_KBN=t.SLIP_ORG_KBN + ,BEF_SLIP_MGT_NUM=t.BEF_SLIP_MGT_NUM + ,WHS_REP_COMM_NAME=t.WHS_REP_COMM_NAME + ,WHS_REP_NONYU_FCL_NAME=t.WHS_REP_NONYU_FCL_NAME + ,WHS_REP_NONYU_FCL_ADDR=t.WHS_REP_NONYU_FCL_ADDR + ,ERR_FLG1=t.ERR_FLG1 + ,ERR_FLG2=t.ERR_FLG2 + ,ERR_FLG3=t.ERR_FLG3 + ,ERR_FLG4=t.ERR_FLG4 + ,ERR_FLG5=t.ERR_FLG5 + ,ERR_FLG6=t.ERR_FLG6 + ,ERR_FLG7=t.ERR_FLG7 + ,ERR_FLG8=t.ERR_FLG8 + ,ERR_FLG9=t.ERR_FLG9 + ,ERR_FLG10=t.ERR_FLG10 + ,ERR_FLG11=t.ERR_FLG11 + ,ERR_FLG12=t.ERR_FLG12 + ,ERR_FLG13=t.ERR_FLG13 + ,ERR_FLG14=t.ERR_FLG14 + ,ERR_FLG15=t.ERR_FLG15 + ,ERR_FLG16=t.ERR_FLG16 + ,ERR_FLG17=t.ERR_FLG17 + ,ERR_FLG18=t.ERR_FLG18 + ,ERR_FLG19=t.ERR_FLG19 + ,ERR_FLG20=t.ERR_FLG20 + ,KJYO_YM=t.KJYO_YM + ,TKSNBK_KBN=t.TKSNBK_KBN + ,FCL_EXEC_KBN=t.FCL_EXEC_KBN + ,REC_STS_KBN=t.REC_STS_KBN + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, # ロットマスタデータ @@ -137,7 +1416,36 @@ class VjskRecvFileMapper: _KEY_FILE_PREFIX: "lot_num_ms_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.lot_num_mst", - _KEY_SRC_TABLE: "src05.lot_num_mst" + _KEY_SRC_TABLE: "src05.lot_num_mst", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.lot_num_mst ( + SER_NUM + ,LOT_NUM + ,EXPR_DT + ,FRST_MOV_DT + ,INS_DT + ,INS_USR + ,DWH_UPD_DT + ) + SELECT + t.SER_NUM + ,t.LOT_NUM + ,t.EXPR_DT + ,t.FRST_MOV_DT + ,t.INS_DT + ,t.INS_USR + ,SYSDATE() + FROM org05.lot_num_mst AS t + ON DUPLICATE KEY UPDATE + SER_NUM=t.SER_NUM + ,LOT_NUM=t.LOT_NUM + ,EXPR_DT=t.EXPR_DT + ,FRST_MOV_DT=t.FRST_MOV_DT + ,INS_DT=t.INS_DT + ,INS_USR=t.INS_USR + ,DWH_UPD_DT=SYSDATE() + ; + """) }, } @@ -165,12 +1473,19 @@ class VjskRecvFileMapper: ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) return ret + def get_upsert_sql(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_UPSERT_SQL) + return ret + def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: ret = None filename = s3_file_path[s3_file_path.rfind("/") + 1:] for condkey in self._VJSK_INTERFACE_MAPPING: element = self._VJSK_INTERFACE_MAPPING.get(condkey) - if filename.startswith(element.get(self._KEY_FILE_PREFIX)) and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): + if filename.startswith(element.get(self._KEY_FILE_PREFIX)) \ + and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): ret = condkey break return ret From 62ec4ddebb54de8eb07902ea33dced4ae05964ea Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 18:37:07 +0900 Subject: [PATCH 06/86] =?UTF-8?q?=E3=83=AD=E3=82=B0=E3=81=A8=E3=81=8B?= =?UTF-8?q?=E3=82=B3=E3=83=A1=E3=83=B3=E3=83=88=E3=81=AE=E6=95=B4=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 3 +- .../src/batch/vjsk/vjsk_data_load_manager.py | 4 + .../src/batch/vjsk/vjsk_importer.py | 151 ++++++++++-------- .../src/batch/vjsk/vjsk_recv_file_manager.py | 60 ------- 4 files changed, 89 insertions(+), 129 deletions(-) delete mode 100644 ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2ee93eb8..dde7cd46 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -110,8 +110,7 @@ class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER -class VjskBucket(S3Bucket): - # TODO:V実消化バケットから見たり取ってきたりする実装をやる +class VjskRecieveBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 4b574dce..70f69344 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -12,6 +12,8 @@ class JjskDataLoadManager: pass def _import_to_db(src_file_name: str, condkey: str): + logger.debug(f"_import_to_db start (src_file_name : {src_file_name}, condkey : {condkey})") + db = Database.get_instance() table_name_org = mapper.get_org_table(condkey) upsert_sql = mapper.get_upsert_sql(condkey) @@ -40,6 +42,8 @@ class JjskDataLoadManager: raise e finally: db.disconnect() + + logger.debug("_import_to_db end") return @classmethod diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 8b4b5197..55e9a41b 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -1,4 +1,4 @@ -from src.aws.s3 import ConfigBucket, VjskBucket +from src.aws.s3 import ConfigBucket, VjskRecieveBucket from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_wholestocksaler_file import \ CalendarWholwSalerStockFile @@ -7,108 +7,116 @@ from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger -# from src.batch.datachange import emp_chg_inst_lau - -_logger = get_logger('V実消化データ取込') +logger = get_logger('V実消化データ取込') batch_context = BatchContext.get_instance() -vjsk_recv_bucket = VjskBucket() +vjsk_recv_bucket = VjskRecieveBucket() vjsk_mapper = VjskRecvFileMapper() -def _check_if_file_exists(src_list: list, key: str) -> bool: - pref = vjsk_mapper.get_file_prefix(key) - suff = vjsk_mapper.get_file_suffix(key) +def _check_if_file_exists(src_list: list, condkey: str) -> bool: + logger.debug(f"_check_if_file_exists start (src_list : {src_list} , condkey : {condkey})") + # ファイル接頭辞と拡張子が一致するかで判定する + ret = False + pref = vjsk_mapper.get_file_prefix(condkey) + suff = vjsk_mapper.get_file_suffix(condkey) + for idx, elem in enumerate(src_list): buf = elem.get("filename") filename = buf[buf.rfind("/") + 1:] if filename.startswith(pref) and filename.endswith(suff): - return True - return False + ret = True + break + + logger.debug(f"_check_if_file_exists end (return : {ret})") + return ret def _check_received_files(): - """V実消化連携データ存在確認処理""" - _logger.debug('V実消化連携データ存在確認処理:開始') + """V実消化連携データファイル受領確認処理""" + logger.debug('_check_received_files start') - # 実消化&アルトマーク V実消化データ受領バケットにあるファイル一覧を取得 - received_files = vjsk_recv_bucket.get_s3_file_list() - _logger.debug(f'ファイル一覧{received_files}') + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 + received_s3_files = vjsk_recv_bucket.get_s3_file_list() + logger.debug(f'ファイル一覧{received_s3_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): - raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): + raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸販売データ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_SLIP_DATA): - raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_SLIP_DATA): + raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸組織変換マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): - raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): + raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 施設統合マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): - raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): + raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_MST): - raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_WHS_MST): + raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸ホールディングスマスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_HLD_MST): - raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_HLD_MST): + raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 施設マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_FCL_MST): - raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_FCL_MST): + raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 メーカー卸組織展開表 - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): - raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): + raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 取引区分マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): - raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): + raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 製品マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): - raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): + raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 製品価格マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): - raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): + raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 卸得意先情報マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): - raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): + raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_s3_files}') # ファイル存在確認 MDBコード変換マスタ - if not _check_if_file_exists(received_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): - raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_files}') + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): + raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_s3_files}') - _logger.debug('V実消化連携データ存在確認処理:終了') + logger.debug('_check_received_files end') return True def _import_file_to_db(): - _logger.debug('V実消化取込処理:開始') + """V実消化連携データ取込処理""" + logger.debug('_import_file_to_db start') - # 実消化&アルトマーク V実消化データ受領バケットにあるファイルパス一覧を取得 + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 received_s3_files = vjsk_recv_bucket.get_s3_file_list() - # ファイルパス一覧にマッピング情報を参照するためのキーを持たせて辞書可する + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する target_dict = {} for s3_file_path in received_s3_files: + # S3バケットにある受領ファイルをローカルストレージにdownloadする local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path.get('filename')) + + # データファイル名に該当する辞書アクセス用のキーを取得する key = vjsk_mapper.get_condkey_by_s3_file_path(s3_file_path.get('filename')) + + # 想定されたデータファイルであれば辞書登録する if key is not None: target_dict[key] = {"condkey": key, "src_file_path": local_file_path} - _logger.debug(f'S3ファイルパス辞書{target_dict}') - - # TODO: diff_upsertに変わるやつを呼び出す - # TODO: emp_chg_inst_lau.batch_process() みたいに + logger.debug(f'取込対象データファイル辞書{target_dict}') # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: @@ -150,51 +158,60 @@ def _import_file_to_db(): # DB登録 MDBコード変換マスタ JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) - _logger.debug('V実消化取込処理:終了') + logger.debug('_import_file_to_db end') def _determine_today_is_stockslipdata_target(): - """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日が該当していればTrueを返却する""" + """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日付が該当していればTrueを返却する""" + logger.debug("_determine_today_is_stockslipdata_target start") try: + # 処理日付を取得する today = batch_context.syor_date - holiday_list_file_path = ConfigBucket().download_wholesaler_stock_list() - targetdays = CalendarWholwSalerStockFile(holiday_list_file_path) - ret = targetdays.compare_date(today) + # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする + config_file_path = ConfigBucket().download_wholesaler_stock_list() + + # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する + target_days = CalendarWholwSalerStockFile(config_file_path) + + # 処理日付が、設定ファイル「V実消化卸在庫データ連携日ファイル」の定義に該当するかを判定する + ret = target_days.compare_date(today) except Exception as e: - _logger.error(f'{e}') + logger.error(f'{e}') raise e + logger.debug("_determine_today_is_stockslipdata_target end") return ret def exec(): - """V実消化データ取込""" - _logger.info('Start Jitsusyouka Torikomi PGM.') + """V実消化データ取込処理""" + logger.info('Start Jitsusyouka Torikomi PGM.') # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる - _logger.debug('卸在庫データ取込対象日であるかを判定') + logger.debug('卸在庫データ取込対象日であるかを判定') batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() - _logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') + logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') if batch_context.is_import_target_vjsk_stockslipdata: - _logger.info('卸在庫データ取込対象日です') + logger.info('卸在庫データ取込対象日です') # V実消化データファイル受領チェック - _logger.debug('V実消化データファイル受領チェック:開始') + logger.debug('V実消化データファイル受領チェック:開始') try: - # S3バケット上でV実消化データファイルの存在チェックをする + # S3バケットにある受領済のV実消化データファイルの存在チェックをする _check_received_files() except BatchOperationException as e: - _logger.error('受領したV実消化データファイルに欠落があります') + logger.error('受領したV実消化データファイルに未受領もものがあります') raise e - _logger.debug('V実消化データファイル受領チェック:終了') + logger.debug('V実消化データファイル受領チェック:終了') # データベース取込 - _logger.debug('V実消化データ取込:開始') + logger.debug('V実消化データ取込:開始') try: + # S3バケットにある受領済のV実消化データファイルをデータベースに登録する _import_file_to_db() except Exception as e: - _logger.error(f'データベース登録失敗 {e}') + logger.error(f'データベース登録失敗 {e}') raise e - _logger.debug('V実消化データ取込:終了') + logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py deleted file mode 100644 index 7ddbb766..00000000 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_manager.py +++ /dev/null @@ -1,60 +0,0 @@ -# import csv -# from io import TextIOWrapper - - -# class VjskRecvFileManager: -# layout_class: str -# records: list[str] - -# def __init__(self, dat_line: list[str]) -> None: -# self.layout_class = dat_line[0] -# self.records = dat_line - - -# class VjskDatFile: -# """V実消化データファイル""" - -# lines: list[VjskRecvFileManager] -# success_count: int = 0 -# error_count: int = 0 -# total_count: int = 0 -# __i: int = 0 - -# def __iter__(self): -# return self - -# def __next__(self) -> VjskRecvFileManager: -# if self.__i == len(self.lines): -# raise StopIteration() -# line = self.lines[self.__i] -# self.__i += 1 -# return line - -# def __init__(self, file: TextIOWrapper) -> None: -# reader = csv.reader(file) -# csv_rows = [VjskRecvFileManager(row) for row in reader] - -# self.lines = csv_rows -# self.total_count = len(csv_rows) - -# def count_up_success(self): -# self.success_count += 1 - -# def count_up_error(self): -# self.error_count += 1 - -# @classmethod -# def retrieve_from_file(cls, local_file_path: str): -# """V実消化データファイルを読み込み、新しいインスタンスを作成する - -# Args: -# local_file_path (str): ローカルのファイルパス - -# Returns: -# VjskDatFile: このクラスのインスタンス -# """ -# # cp932(Shift-JIS Windows拡張)でファイルを読み込む -# file = open(local_file_path, encoding='cp932') -# instance = cls(file) -# file.close() -# return instance From f299cb9379a564ba99fae33ce14b1def61d4656e Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 19:06:19 +0900 Subject: [PATCH 07/86] =?UTF-8?q?BIO=E3=81=AE2=E6=9C=AC=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_importer.py | 14 ++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 6 +++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 55e9a41b..9c1b0d72 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -92,6 +92,14 @@ def _check_received_files(): if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_s3_files}') + # ファイル存在確認 生物由来データ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_BIO_SLIP_DATA): + raise BatchOperationException(f'生物由来データファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 製造ロット番号マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): + raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') + logger.debug('_check_received_files end') return True @@ -158,6 +166,12 @@ def _import_file_to_db(): # DB登録 MDBコード変換マスタ JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + # DB登録 生物由来データ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + + # DB登録 製造ロット番号マスタ + JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + logger.debug('_import_file_to_db end') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index a6736e16..cfd73810 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -16,7 +16,7 @@ class VjskRecvFileMapper: CONDKEY_MDB_CONV_MST = "MDB_CONV_MST" # MDBコード変換表 CONDKEY_STOCK_SLIP_DATA = "STOCK_SLIP_DATA" # 卸在庫データ CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ - CONDKEY_LOT_NUM_MS = "LOT_NUM_MS" # ロットマスタデータ + CONDKEY_LOT_NUM_MST = "LOT_NUM_MST" # ロットマスタデータ _KEY_FILE_PREFIX = "file_prefix" _KEY_FILE_SUFFIX = "file_suffix" @@ -1412,8 +1412,8 @@ class VjskRecvFileMapper: }, # ロットマスタデータ - CONDKEY_LOT_NUM_MS: { - _KEY_FILE_PREFIX: "lot_num_ms_", + CONDKEY_LOT_NUM_MST: { + _KEY_FILE_PREFIX: "lot_num_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", From 1bff52f8c7ea1aa7903aeae17bfeae7081b7f2d3 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 22:30:06 +0900 Subject: [PATCH 08/86] =?UTF-8?q?=E3=83=AD=E3=82=B0=E3=81=A8=E3=81=8B?= =?UTF-8?q?=E3=82=A4=E3=83=B3=E3=83=87=E3=83=B3=E3=83=88=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 2 +- .../src/batch/vjsk/vjsk_importer.py | 16 ++++++++++------ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 5 +++-- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 70f69344..9653f858 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -57,5 +57,5 @@ class JjskDataLoadManager: # データベース登録 self._import_to_db(local_file_name, target["condkey"]) - logger.debug('JjskDataLoadManager#load end') + logger.debug('JjskDataLoadManager#load done') return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 9c1b0d72..d667416e 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -27,7 +27,7 @@ def _check_if_file_exists(src_list: list, condkey: str) -> bool: ret = True break - logger.debug(f"_check_if_file_exists end (return : {ret})") + logger.debug(f"_check_if_file_exists done (return : {ret})") return ret @@ -100,7 +100,7 @@ def _check_received_files(): if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') - logger.debug('_check_received_files end') + logger.debug('_check_received_files done') return True @@ -115,11 +115,15 @@ def _import_file_to_db(): # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する target_dict = {} for s3_file_path in received_s3_files: + file_name = s3_file_path.get('filename') + # S3バケットにある受領ファイルをローカルストレージにdownloadする - local_file_path = vjsk_recv_bucket.download_data_file(s3_file_path.get('filename')) + logger.debug(f"download s3 file start : {file_name}") + local_file_path = vjsk_recv_bucket.download_data_file(file_name) + logger.debug(f"download s3 file done : {file_name}") # データファイル名に該当する辞書アクセス用のキーを取得する - key = vjsk_mapper.get_condkey_by_s3_file_path(s3_file_path.get('filename')) + key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) # 想定されたデータファイルであれば辞書登録する if key is not None: @@ -172,7 +176,7 @@ def _import_file_to_db(): # DB登録 製造ロット番号マスタ JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) - logger.debug('_import_file_to_db end') + logger.debug('_import_file_to_db done') def _determine_today_is_stockslipdata_target(): @@ -193,7 +197,7 @@ def _determine_today_is_stockslipdata_target(): except Exception as e: logger.error(f'{e}') raise e - logger.debug("_determine_today_is_stockslipdata_target end") + logger.debug("_determine_today_is_stockslipdata_target done") return ret diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index cfd73810..ec243433 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -325,7 +325,8 @@ class VjskRecvFileMapper: ,t.REC_STS_KBN ,t.INS_DT ,t.UPD_DT - ,SYSDATE() FROM org05.hld_mst_v AS t + ,SYSDATE() + FROM org05.hld_mst_v AS t ON DUPLICATE KEY UPDATE V_HLD_CD=t.V_HLD_CD ,SUB_NUM=t.SUB_NUM @@ -1122,7 +1123,7 @@ class VjskRecvFileMapper: ,t.INS_DT ,t.INS_USR ,SYSDATE() - FROM org05.whole_stock AS t + FROM org05.whole_stock AS t ON DUPLICATE KEY UPDATE REC_DATA=t.REC_DATA ,REC_WHS_CD=t.REC_WHS_CD From 7a1941a7bcdf2144151f3cefa8cd3afefe72836f Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 9 May 2023 22:31:50 +0900 Subject: [PATCH 09/86] =?UTF-8?q?transabtion=E5=BC=B5=E3=81=A3=E3=81=A6LOA?= =?UTF-8?q?D=E3=81=97=E3=81=9F=E3=81=82=E3=81=A8=E3=81=AEUPSERT=E3=81=8C?= =?UTF-8?q?=E3=82=B3=E3=82=B1=E3=81=9F=E3=81=A8=E3=81=8D=E3=81=AB=E3=80=81?= =?UTF-8?q?org=E3=81=8C=E3=83=AD=E3=83=BC=E3=83=AB=E3=83=90=E3=83=83?= =?UTF-8?q?=E3=82=AF=E3=81=95=E3=82=8C=E3=81=AA=E3=81=84=E3=82=88=E3=81=86?= =?UTF-8?q?=E3=81=AB=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 9653f858..86040640 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -19,8 +19,7 @@ class JjskDataLoadManager: upsert_sql = mapper.get_upsert_sql(condkey) try: - db.connect() # TODO:接続オプション local_infile = True が必要? - db.begin() + db.connect() # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") @@ -32,18 +31,19 @@ class JjskDataLoadManager: logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select + db.begin() result = db.execute(upsert_sql) logger.info(f'orgテーブルをsrcテーブルにUPSERT : 件数({result.rowcount})') db.commit() - except Exception as e: # TODO:DB例外だけキャッチしたい + except Exception as e: db.rollback() logger.error(e) raise e finally: db.disconnect() - logger.debug("_import_to_db end") + logger.debug("_import_to_db done") return @classmethod From 6cbee0e8baca40d23cc7d8d3830871daae8ecb95 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 10 May 2023 13:22:40 +0900 Subject: [PATCH 10/86] =?UTF-8?q?=E6=97=A5=E4=BB=98=E3=83=87=E3=83=BC?= =?UTF-8?q?=E3=82=BF=E3=81=8Ctsv=E3=81=A7=E3=83=96=E3=83=A9=E3=83=B3?= =?UTF-8?q?=E3=82=AF=E3=81=A0=E3=81=A3=E3=81=9F=E3=81=A8=E3=81=8D=E3=80=81?= =?UTF-8?q?LOAD=E6=96=87=E3=81=A7'0000-00-00'=E3=81=AB=E5=A4=89=E6=8F=9B?= =?UTF-8?q?=E3=81=95=E3=82=8C=E3=81=A6=E3=81=97=E3=81=BE=E3=81=86=E5=95=8F?= =?UTF-8?q?=E9=A1=8C=E3=81=AE=E5=AF=BE=E5=87=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 8 +++++--- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 86040640..c63ac962 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -25,15 +25,17 @@ class JjskDataLoadManager: db.execute(f"TRUNCATE TABLE {table_name_org};") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org}" \ + sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} " \ " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" result = db.execute(sql, {"src_file_name": src_file_name}) logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') # org→srcにinsert select db.begin() - result = db.execute(upsert_sql) - logger.info(f'orgテーブルをsrcテーブルにUPSERT : 件数({result.rowcount})') + db.execute(upsert_sql) + # TODO: insert+select 実質10件なのに、resultのrowcountは20件になってしまう ※sqlalchemyの仕様 + # https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount + logger.info('orgテーブルをsrcテーブルにUPSERT') db.commit() except Exception as e: diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index ec243433..726e927c 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -815,7 +815,7 @@ class VjskRecvFileMapper: ,t.PKG_CD ,t.PKG_NAME ,t.CNV_NUM - ,t.JSK_START_DT + ,nullif(t.JSK_START_DT, 0) ,t.PRD_SALE_KBN ,t.JSK_PROC_KBN ,t.START_DATE @@ -846,7 +846,7 @@ class VjskRecvFileMapper: ,PKG_CD=t.PKG_CD ,PKG_NAME=t.PKG_NAME ,CNV_NUM=t.CNV_NUM - ,JSK_START_DT=t.JSK_START_DT + ,JSK_START_DT=nullif(t.JSK_START_DT, 0) ,PRD_SALE_KBN=t.PRD_SALE_KBN ,JSK_PROC_KBN=t.JSK_PROC_KBN ,START_DATE=t.START_DATE From a0e858e93f141965c9351c71a8acbd1ea0664ae1 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 10 May 2023 16:50:44 +0900 Subject: [PATCH 11/86] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E3=81=AE=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../common/calendar_wholestocksaler_file.py | 32 ------------ .../src/batch/vjsk/vjsk_data_load_manager.py | 15 ++++-- .../src/batch/vjsk/vjsk_importer.py | 50 ++++++++++--------- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 22 ++++++++ 4 files changed, 59 insertions(+), 60 deletions(-) delete mode 100644 ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py diff --git a/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py b/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py deleted file mode 100644 index ba687514..00000000 --- a/ecs/jskult-batch-daily/src/batch/common/calendar_wholestocksaler_file.py +++ /dev/null @@ -1,32 +0,0 @@ -from src.system_var import constants - - -class CalendarWholwSalerStockFile: - """V実消化卸在庫データ連携日ファイル""" - - __calendar_file_lines: list[str] - - def __init__(self, calendar_file_path): - with open(calendar_file_path) as f: - self.__calendar_file_lines: list[str] = f.readlines() - - def compare_date(self, date_str: str) -> bool: - """与えられた日付がV実消化卸在庫データ連携日ファイル内に含まれているかどうか - V実消化卸在庫データ連携日ファイル内の日付はyyyy/mm/ddで書かれている前提 - コメント(#)が含まれている行は無視される - - Args: - date_str (str): yyyy/mm/dd文字列 - - Returns: - bool: 含まれていればTrue - """ - for calendar_date in self.__calendar_file_lines: - # コメント行が含まれている場合はスキップ - if constants.CALENDAR_COMMENT_SYMBOL in calendar_date: - continue - - if date_str in calendar_date: - return True - - return False diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index c63ac962..909bd041 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -7,7 +7,7 @@ logger = get_logger('V実消化データ取込(DB登録)') mapper = VjskRecvFileMapper() -class JjskDataLoadManager: +class VjskDataLoadManager: def __init__(self): pass @@ -15,7 +15,9 @@ class JjskDataLoadManager: logger.debug(f"_import_to_db start (src_file_name : {src_file_name}, condkey : {condkey})") db = Database.get_instance() + data_name = mapper.get_data_name(condkey) table_name_org = mapper.get_org_table(condkey) + table_name_src = mapper.get_src_table(condkey) upsert_sql = mapper.get_upsert_sql(condkey) try: @@ -28,14 +30,17 @@ class JjskDataLoadManager: sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} " \ " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" result = db.execute(sql, {"src_file_name": src_file_name}) - logger.info(f'tsvデータをorgテーブルにLOAD : 件数({result.rowcount})') + logger.debug(sql) + logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') # org→srcにinsert select db.begin() + logger.debug(upsert_sql) db.execute(upsert_sql) - # TODO: insert+select 実質10件なのに、resultのrowcountは20件になってしまう ※sqlalchemyの仕様 - # https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount - logger.info('orgテーブルをsrcテーブルにUPSERT') + # MEMO: insert+selectの結果件数は、LOAD結果と必ず等しいので、executeの結果件数はログ出力しない + # MEMO: insert+select 実質10件なのに、result.rowcountは20件になってしまう ※sqlalchemyの仕様 + # MEMO: https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount + logger.info(f'{table_name_org}を{table_name_src}にUPSERT') db.commit() except Exception as e: diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index d667416e..31c42874 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -1,8 +1,7 @@ from src.aws.s3 import ConfigBucket, VjskRecieveBucket from src.batch.common.batch_context import BatchContext -from src.batch.common.calendar_wholestocksaler_file import \ - CalendarWholwSalerStockFile -from src.batch.vjsk.vjsk_data_load_manager import JjskDataLoadManager +from src.batch.common.calendar_file import CalendarFile +from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger @@ -102,7 +101,7 @@ def _check_received_files(): logger.debug('_check_received_files done') - return True + return def _import_file_to_db(): @@ -132,49 +131,49 @@ def _import_file_to_db(): # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) if batch_context.is_import_target_vjsk_stockslipdata: - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) # DB登録 卸販売データ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) # DB登録 卸組織変換マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) # DB登録 施設統合マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) # DB登録 卸マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) # DB登録 卸ホールディングスマスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) # DB登録 施設マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) # DB登録 メーカー卸組織展開表 - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) # DB登録 取引区分マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) # DB登録 製品マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) # DB登録 製品価格マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) # DB登録 卸得意先情報マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) # DB登録 MDBコード変換マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) # DB登録 生物由来データ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) # DB登録 製造ロット番号マスタ - JjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) logger.debug('_import_file_to_db done') @@ -187,10 +186,10 @@ def _determine_today_is_stockslipdata_target(): today = batch_context.syor_date # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする - config_file_path = ConfigBucket().download_wholesaler_stock_list() + wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_list() # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する - target_days = CalendarWholwSalerStockFile(config_file_path) + target_days = CalendarFile(wholesaler_stock_list_file_path) # 処理日付が、設定ファイル「V実消化卸在庫データ連携日ファイル」の定義に該当するかを判定する ret = target_days.compare_date(today) @@ -205,6 +204,11 @@ def exec(): """V実消化データ取込処理""" logger.info('Start Jitsusyouka Torikomi PGM.') + # 非営業日なら何もせず終了 + if batch_context.is_not_business_day: + logger.debug('非営業日なので処理をスキップ') + return + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる logger.debug('卸在庫データ取込対象日であるかを判定') batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() @@ -219,7 +223,7 @@ def exec(): _check_received_files() except BatchOperationException as e: - logger.error('受領したV実消化データファイルに未受領もものがあります') + logger.debug('受領したV実消化データファイルに未受領もものがあります') raise e logger.debug('V実消化データファイル受領チェック:終了') @@ -229,7 +233,7 @@ def exec(): # S3バケットにある受領済のV実消化データファイルをデータベースに登録する _import_file_to_db() except Exception as e: - logger.error(f'データベース登録失敗 {e}') + logger.debug(f'データベース登録失敗 {e}') raise e logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 726e927c..2ba87909 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -18,6 +18,7 @@ class VjskRecvFileMapper: CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ CONDKEY_LOT_NUM_MST = "LOT_NUM_MST" # ロットマスタデータ + _KEY_DATA_NAME = "data_name" _KEY_FILE_PREFIX = "file_prefix" _KEY_FILE_SUFFIX = "file_suffix" _KEY_ORG_TABLE = "org_table" @@ -26,6 +27,7 @@ class VjskRecvFileMapper: _VJSK_INTERFACE_MAPPING = { # 販売実績データ CONDKEY_SLIP_DATA: { + _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.sales", @@ -294,6 +296,7 @@ class VjskRecvFileMapper: # V卸ホールディングスマスタ CONDKEY_HLD_MST: { + _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.hld_mst_v", @@ -346,6 +349,7 @@ class VjskRecvFileMapper: # V卸マスタ CONDKEY_WHS_MST: { + _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_mst_v", @@ -413,6 +417,7 @@ class VjskRecvFileMapper: # Vメーカー卸組織展開表 CONDKEY_MKR_ORG_HORIZON: { + _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", @@ -570,6 +575,7 @@ class VjskRecvFileMapper: # V卸組織変換マスタ CONDKEY_ORG_CNV_MST: { + _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", @@ -622,6 +628,7 @@ class VjskRecvFileMapper: # V取引区分マスタ CONDKEY_TRAN_KBN_MST: { + _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", @@ -668,6 +675,7 @@ class VjskRecvFileMapper: # V施設マスタ CONDKEY_FCL_MST: { + _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.fcl_mst_v", @@ -759,6 +767,7 @@ class VjskRecvFileMapper: # V製品マスタ CONDKEY_PHM_PRD_MST: { + _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", @@ -862,6 +871,7 @@ class VjskRecvFileMapper: # V製品価格マスタ CONDKEY_PHM_PRICE_MST: { + _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.phm_price_mst_v", @@ -911,6 +921,7 @@ class VjskRecvFileMapper: # V施設統合マスタ CONDKEY_VOP_HCO_MERGE: { + _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", @@ -942,6 +953,7 @@ class VjskRecvFileMapper: # V卸得意先情報マスタ CONDKEY_WHS_CUSTOMER_MST: { + _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", @@ -1012,6 +1024,7 @@ class VjskRecvFileMapper: # MDBコード変換表 CONDKEY_MDB_CONV_MST: { + _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", @@ -1055,6 +1068,7 @@ class VjskRecvFileMapper: # 卸在庫データ CONDKEY_STOCK_SLIP_DATA: { + _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.whole_stock", @@ -1161,6 +1175,7 @@ class VjskRecvFileMapper: # 生物由来データ CONDKEY_BIO_SLIP_DATA: { + _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.bio_sales", @@ -1414,6 +1429,7 @@ class VjskRecvFileMapper: # ロットマスタデータ CONDKEY_LOT_NUM_MST: { + _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", _KEY_FILE_SUFFIX: ".tsv", _KEY_ORG_TABLE: "org05.lot_num_mst", @@ -1450,6 +1466,12 @@ class VjskRecvFileMapper: }, } + def get_data_name(self, condkey: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_DATA_NAME) + return ret + def get_file_prefix(self, condkey: str) -> str: ret = None if condkey in self._VJSK_INTERFACE_MAPPING: From d370e8c87d92faddcfbf8da7217c2e655d54be9b Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 11 May 2023 15:32:42 +0900 Subject: [PATCH 12/86] =?UTF-8?q?style:=20=E3=82=A8=E3=83=87=E3=82=A3?= =?UTF-8?q?=E3=82=BF=E3=81=AE=E8=AD=A6=E5=91=8A=E8=A7=A3=E6=B6=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 2ba87909..27c47ad5 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -1105,7 +1105,7 @@ class VjskRecvFileMapper: ,INS_DT ,INS_USR ,DWH_UPD_DT - ) + ) SELECT t.REC_DATA ,t.REC_WHS_CD @@ -1138,7 +1138,7 @@ class VjskRecvFileMapper: ,t.INS_USR ,SYSDATE() FROM org05.whole_stock AS t - ON DUPLICATE KEY UPDATE + ON DUPLICATE KEY UPDATE REC_DATA=t.REC_DATA ,REC_WHS_CD=t.REC_WHS_CD ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD From 285c725d5b1571251f82431ab826608e8905e851 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 11 May 2023 21:07:41 +0900 Subject: [PATCH 13/86] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=8F=8D=E6=98=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 8 +- .../src/batch/common/batch_context.py | 12 +- .../src/batch/vjsk/vjsk_data_load_manager.py | 24 +- .../src/batch/vjsk/vjsk_importer.py | 122 +- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 2374 ++++++++--------- 5 files changed, 1266 insertions(+), 1274 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index dde7cd46..62111409 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -91,13 +91,13 @@ class ConfigBucket(S3Bucket): f.seek(0) return temporary_file_path - def download_wholesaler_stock_list(self): + def download_wholesaler_stock_input_day_list(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) - holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) f.seek(0) return temporary_file_path @@ -110,7 +110,7 @@ class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER -class VjskRecieveBucket(S3Bucket): +class VjskReceiveBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV diff --git a/ecs/jskult-batch-daily/src/batch/common/batch_context.py b/ecs/jskult-batch-daily/src/batch/common/batch_context.py index b493ecca..b3fc4967 100644 --- a/ecs/jskult-batch-daily/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-daily/src/batch/common/batch_context.py @@ -3,7 +3,7 @@ class BatchContext: __syor_date: str # 処理日(yyyy/mm/dd形式) __is_not_business_day: bool # 日次バッチ起動日フラグ __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ - __is_import_target_vjsk_stockslipdata: bool # 卸在庫データ取込対象フラグ + __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ def __init__(self) -> None: self.__is_not_business_day = False @@ -40,9 +40,9 @@ class BatchContext: self.__is_ultmarc_imported = flag @property - def is_import_target_vjsk_stockslipdata(self): - return self.__is_import_target_vjsk_stockslipdata + def is_vjsk_stock_import_day(self): + return self.__is_vjsk_stock_import_day - @is_import_target_vjsk_stockslipdata.setter - def is_import_target_vjsk_stockslipdata(self, flag: bool): - self.__is_import_target_vjsk_stockslipdata = flag + @is_vjsk_stock_import_day.setter + def is_vjsk_stock_import_day(self, flag: bool): + self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 909bd041..2d99951b 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -1,10 +1,11 @@ # from src.batch.vjsk.vjsk_recv_file_manager import VjskDatFile -from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper from src.db.database import Database +from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger logger = get_logger('V実消化データ取込(DB登録)') -mapper = VjskRecvFileMapper() +mapper = VjskReceiveFileMapper() class VjskDataLoadManager: @@ -27,8 +28,13 @@ class VjskDataLoadManager: db.execute(f"TRUNCATE TABLE {table_name_org};") # orgにload ※warningは1148エラーになるらしい - sql = f"LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} " \ - " FIELDS TERMINATED BY '\\t' ENCLOSED BY '\"' IGNORE 1 LINES;" + sql = f"""\ + LOAD DATA LOCAL INFILE :src_file_name + INTO TABLE {table_name_org} + FIELDS TERMINATED BY '\\t' + ENCLOSED BY '\"' + IGNORE 1 LINES; + """ result = db.execute(sql, {"src_file_name": src_file_name}) logger.debug(sql) logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') @@ -45,8 +51,7 @@ class VjskDataLoadManager: db.commit() except Exception as e: db.rollback() - logger.error(e) - raise e + raise BatchOperationException(e) finally: db.disconnect() @@ -54,9 +59,8 @@ class VjskDataLoadManager: return @classmethod - def Load(self, target: dict): - logger.debug(f'JjskDataLoadManager#load start target:{target}') - # target : {"condkey": key, "src_file_path":local_file_path} + def load(self, target: dict): + logger.debug(f'load start target:{target}') # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 local_file_name = target["src_file_path"] @@ -64,5 +68,5 @@ class VjskDataLoadManager: # データベース登録 self._import_to_db(local_file_name, target["condkey"]) - logger.debug('JjskDataLoadManager#load done') + logger.debug('load done') return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 31c42874..719e3e62 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -1,15 +1,54 @@ -from src.aws.s3 import ConfigBucket, VjskRecieveBucket +from src.aws.s3 import ConfigBucket, VjskReceiveBucket from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager -from src.batch.vjsk.vjsk_recv_file_mapper import VjskRecvFileMapper +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger logger = get_logger('V実消化データ取込') batch_context = BatchContext.get_instance() -vjsk_recv_bucket = VjskRecieveBucket() -vjsk_mapper = VjskRecvFileMapper() +vjsk_recv_bucket = VjskReceiveBucket() +vjsk_mapper = VjskReceiveFileMapper() + + +def exec(): + """V実消化データ取込処理""" + logger.debug('exec start') + + # 非営業日なら何もせず終了 + if batch_context.is_not_business_day: + logger.debug('非営業日なので処理をスキップ') + return + + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる + logger.debug('卸在庫データ取込対象日であるかを判定') + batch_context.is_vjsk_stock_import_day = _determine_today_is_stockslipdata_target() + logger.debug(f'判定結果 : {batch_context.is_vjsk_stock_import_day}') + if batch_context.is_vjsk_stock_import_day: + logger.info('卸在庫データ取込対象日です') + + # V実消化データファイル受領チェック + logger.debug('V実消化データファイル受領チェック:開始') + try: + # S3バケットにある受領済のV実消化データファイルの存在チェックをする + _check_received_files() + + except BatchOperationException as e: + logger.debug('受領したV実消化データファイルに未受領もものがあります') + raise e + logger.debug('V実消化データファイル受領チェック:終了') + + # データベース取込 + logger.debug('V実消化データ取込:開始') + try: + # S3バケットにある受領済のV実消化データファイルをデータベースに登録する + _import_file_to_db() + except Exception as e: + logger.debug(f'データベース登録失敗 {e}') + raise e + + logger.debug('exec done') def _check_if_file_exists(src_list: list, condkey: str) -> bool: @@ -39,7 +78,7 @@ def _check_received_files(): logger.debug(f'ファイル一覧{received_s3_files}') # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) - if batch_context.is_import_target_vjsk_stockslipdata: + if batch_context.is_vjsk_stock_import_day: if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_s3_files}') @@ -130,50 +169,50 @@ def _import_file_to_db(): logger.debug(f'取込対象データファイル辞書{target_dict}') # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) - if batch_context.is_import_target_vjsk_stockslipdata: - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + if batch_context.is_vjsk_stock_import_day: + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) # DB登録 卸販売データ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) # DB登録 卸組織変換マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) # DB登録 施設統合マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) # DB登録 卸マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) # DB登録 卸ホールディングスマスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) # DB登録 施設マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) # DB登録 メーカー卸組織展開表 - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) # DB登録 取引区分マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) # DB登録 製品マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) # DB登録 製品価格マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) # DB登録 卸得意先情報マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) # DB登録 MDBコード変換マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) # DB登録 生物由来データ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) # DB登録 製造ロット番号マスタ - VjskDataLoadManager.Load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) logger.debug('_import_file_to_db done') @@ -186,7 +225,7 @@ def _determine_today_is_stockslipdata_target(): today = batch_context.syor_date # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする - wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_list() + wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_input_day_list() # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する target_days = CalendarFile(wholesaler_stock_list_file_path) @@ -198,42 +237,3 @@ def _determine_today_is_stockslipdata_target(): raise e logger.debug("_determine_today_is_stockslipdata_target done") return ret - - -def exec(): - """V実消化データ取込処理""" - logger.info('Start Jitsusyouka Torikomi PGM.') - - # 非営業日なら何もせず終了 - if batch_context.is_not_business_day: - logger.debug('非営業日なので処理をスキップ') - return - - # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる - logger.debug('卸在庫データ取込対象日であるかを判定') - batch_context.is_import_target_vjsk_stockslipdata = _determine_today_is_stockslipdata_target() - logger.debug(f'判定結果 : {batch_context.is_import_target_vjsk_stockslipdata}') - if batch_context.is_import_target_vjsk_stockslipdata: - logger.info('卸在庫データ取込対象日です') - - # V実消化データファイル受領チェック - logger.debug('V実消化データファイル受領チェック:開始') - try: - # S3バケットにある受領済のV実消化データファイルの存在チェックをする - _check_received_files() - - except BatchOperationException as e: - logger.debug('受領したV実消化データファイルに未受領もものがあります') - raise e - logger.debug('V実消化データファイル受領チェック:終了') - - # データベース取込 - logger.debug('V実消化データ取込:開始') - try: - # S3バケットにある受領済のV実消化データファイルをデータベースに登録する - _import_file_to_db() - except Exception as e: - logger.debug(f'データベース登録失敗 {e}') - raise e - - logger.debug('V実消化データ取込:終了') diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 27c47ad5..612309d2 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -1,7 +1,7 @@ import textwrap -class VjskRecvFileMapper: +class VjskReceiveFileMapper: CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ CONDKEY_WHS_MST = "WHS_MST" # V卸マスタ @@ -34,262 +34,262 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.sales ( - REC_DATA - ,REC_WHS_CD - ,REC_WHS_SUB_CD - ,REC_WHS_ORG_CD - ,REC_CUST_CD - ,REC_COMM_CD - ,REC_TRAN_KBN - ,REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK - ,REC_URAG_NUM - ,REC_QTY - ,REC_NONYU_PRICE - ,REC_NONYU_AMT - ,REC_COMM_NAME - ,REC_NONYU_FCL_NAME - ,FREE_ITEM - ,REC_NONYU_FCL_ADDR - ,REC_NONYU_FCL_POST - ,REC_NONYU_FCL_TEL - ,REC_BEF_HSDN_YMD - ,REC_BEF_SLIP_NUM - ,REC_YMD - ,SALE_DATA_CAT - ,SLIP_FILE_NAME - ,SLIP_MGT_NUM - ,ROW_NUM - ,HSDN_YMD - ,EXEC_DT - ,V_TRAN_CD - ,TRAN_KBN_NAME - ,WHS_ORG_CD - ,V_WHSORG_CD - ,WHS_ORG_NAME - ,WHS_ORG_KN - ,V_WHS_CD - ,WHS_NAME - ,NONYU_FCL_CD - ,V_INST_CD - ,V_INST_KN - ,V_INST_NAME - ,V_INST_ADDR - ,COMM_CD - ,COMM_NAME - ,NONYU_QTY - ,NONYU_PRICE - ,NONYU_AMT - ,SHIKIRI_PRICE - ,SHIKIRI_AMT - ,NHI_PRICE - ,NHI_AMT - ,WHSPOS_ERR_KBN - ,HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN - ,FCL_EXIS_KBN - ,BEF_HSDN_YMD - ,BEF_SLIP_NUM - ,SLIP_ORG_KBN - ,ERR_FLG1 - ,ERR_FLG2 - ,ERR_FLG3 - ,ERR_FLG4 - ,ERR_FLG5 - ,ERR_FLG6 - ,ERR_FLG7 - ,ERR_FLG8 - ,ERR_FLG9 - ,ERR_FLG10 - ,ERR_FLG11 - ,ERR_FLG12 - ,ERR_FLG13 - ,ERR_FLG14 - ,ERR_FLG15 - ,ERR_FLG16 - ,ERR_FLG17 - ,ERR_FLG18 - ,ERR_FLG19 - ,ERR_FLG20 - ,KJYO_YM - ,TKSNBK_KBN - ,FCL_EXEC_KBN - ,REC_STS_KBN - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_whs_org_cd + ,rec_cust_cd + ,rec_comm_cd + ,rec_tran_kbn + ,rev_hsdnymd_wrk + ,rev_hsdnymd_srk + ,rec_urag_num + ,rec_qty + ,rec_nonyu_price + ,rec_nonyu_amt + ,rec_comm_name + ,rec_nonyu_fcl_name + ,free_item + ,rec_nonyu_fcl_addr + ,rec_nonyu_fcl_post + ,rec_nonyu_fcl_tel + ,rec_bef_hsdn_ymd + ,rec_bef_slip_num + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,hsdn_ymd + ,exec_dt + ,v_tran_cd + ,tran_kbn_name + ,whs_org_cd + ,v_whsorg_cd + ,whs_org_name + ,whs_org_kn + ,v_whs_cd + ,whs_name + ,nonyu_fcl_cd + ,v_inst_cd + ,v_inst_kn + ,v_inst_name + ,v_inst_addr + ,comm_cd + ,comm_name + ,nonyu_qty + ,nonyu_price + ,nonyu_amt + ,shikiri_price + ,shikiri_amt + ,nhi_price + ,nhi_amt + ,whspos_err_kbn + ,htdnymd_err_kbn + ,prd_exis_kbn + ,fcl_exis_kbn + ,bef_hsdn_ymd + ,bef_slip_num + ,slip_org_kbn + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,err_flg11 + ,err_flg12 + ,err_flg13 + ,err_flg14 + ,err_flg15 + ,err_flg16 + ,err_flg17 + ,err_flg18 + ,err_flg19 + ,err_flg20 + ,kjyo_ym + ,tksnbk_kbn + ,fcl_exec_kbn + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.REC_DATA - ,t.REC_WHS_CD - ,t.REC_WHS_SUB_CD - ,t.REC_WHS_ORG_CD - ,t.REC_CUST_CD - ,t.REC_COMM_CD - ,t.REC_TRAN_KBN - ,t.REV_HSDNYMD_WRK - ,t.REV_HSDNYMD_SRK - ,t.REC_URAG_NUM - ,t.REC_QTY - ,t.REC_NONYU_PRICE - ,t.REC_NONYU_AMT - ,t.REC_COMM_NAME - ,t.REC_NONYU_FCL_NAME - ,t.FREE_ITEM - ,t.REC_NONYU_FCL_ADDR - ,t.REC_NONYU_FCL_POST - ,t.REC_NONYU_FCL_TEL - ,t.REC_BEF_HSDN_YMD - ,t.REC_BEF_SLIP_NUM - ,t.REC_YMD - ,t.SALE_DATA_CAT - ,t.SLIP_FILE_NAME - ,t.SLIP_MGT_NUM - ,t.ROW_NUM - ,t.HSDN_YMD - ,t.EXEC_DT - ,t.V_TRAN_CD - ,t.TRAN_KBN_NAME - ,t.WHS_ORG_CD - ,t.V_WHSORG_CD - ,t.WHS_ORG_NAME - ,t.WHS_ORG_KN - ,t.V_WHS_CD - ,t.WHS_NAME - ,t.NONYU_FCL_CD - ,t.V_INST_CD - ,t.V_INST_KN - ,t.V_INST_NAME - ,t.V_INST_ADDR - ,t.COMM_CD - ,t.COMM_NAME - ,t.NONYU_QTY - ,t.NONYU_PRICE - ,t.NONYU_AMT - ,t.SHIKIRI_PRICE - ,t.SHIKIRI_AMT - ,t.NHI_PRICE - ,t.NHI_AMT - ,t.WHSPOS_ERR_KBN - ,t.HTDNYMD_ERR_KBN - ,t.PRD_EXIS_KBN - ,t.FCL_EXIS_KBN - ,t.BEF_HSDN_YMD - ,t.BEF_SLIP_NUM - ,t.SLIP_ORG_KBN - ,t.ERR_FLG1 - ,t.ERR_FLG2 - ,t.ERR_FLG3 - ,t.ERR_FLG4 - ,t.ERR_FLG5 - ,t.ERR_FLG6 - ,t.ERR_FLG7 - ,t.ERR_FLG8 - ,t.ERR_FLG9 - ,t.ERR_FLG10 - ,t.ERR_FLG11 - ,t.ERR_FLG12 - ,t.ERR_FLG13 - ,t.ERR_FLG14 - ,t.ERR_FLG15 - ,t.ERR_FLG16 - ,t.ERR_FLG17 - ,t.ERR_FLG18 - ,t.ERR_FLG19 - ,t.ERR_FLG20 - ,t.KJYO_YM - ,t.TKSNBK_KBN - ,t.FCL_EXEC_KBN - ,t.REC_STS_KBN - ,t.INS_DT - ,t.INS_USR + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_whs_org_cd + ,t.rec_cust_cd + ,t.rec_comm_cd + ,t.rec_tran_kbn + ,t.rev_hsdnymd_wrk + ,t.rev_hsdnymd_srk + ,t.rec_urag_num + ,t.rec_qty + ,t.rec_nonyu_price + ,t.rec_nonyu_amt + ,t.rec_comm_name + ,t.rec_nonyu_fcl_name + ,t.free_item + ,t.rec_nonyu_fcl_addr + ,t.rec_nonyu_fcl_post + ,t.rec_nonyu_fcl_tel + ,t.rec_bef_hsdn_ymd + ,t.rec_bef_slip_num + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.hsdn_ymd + ,t.exec_dt + ,t.v_tran_cd + ,t.tran_kbn_name + ,t.whs_org_cd + ,t.v_whsorg_cd + ,t.whs_org_name + ,t.whs_org_kn + ,t.v_whs_cd + ,t.whs_name + ,t.nonyu_fcl_cd + ,t.v_inst_cd + ,t.v_inst_kn + ,t.v_inst_name + ,t.v_inst_addr + ,t.comm_cd + ,t.comm_name + ,t.nonyu_qty + ,t.nonyu_price + ,t.nonyu_amt + ,t.shikiri_price + ,t.shikiri_amt + ,t.nhi_price + ,t.nhi_amt + ,t.whspos_err_kbn + ,t.htdnymd_err_kbn + ,t.prd_exis_kbn + ,t.fcl_exis_kbn + ,t.bef_hsdn_ymd + ,t.bef_slip_num + ,t.slip_org_kbn + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.err_flg11 + ,t.err_flg12 + ,t.err_flg13 + ,t.err_flg14 + ,t.err_flg15 + ,t.err_flg16 + ,t.err_flg17 + ,t.err_flg18 + ,t.err_flg19 + ,t.err_flg20 + ,t.kjyo_ym + ,t.tksnbk_kbn + ,t.fcl_exec_kbn + ,t.rec_sts_kbn + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.sales AS t ON DUPLICATE KEY UPDATE - REC_DATA=t.REC_DATA - ,REC_WHS_CD=t.REC_WHS_CD - ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD - ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD - ,REC_CUST_CD=t.REC_CUST_CD - ,REC_COMM_CD=t.REC_COMM_CD - ,REC_TRAN_KBN=t.REC_TRAN_KBN - ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK - ,REC_URAG_NUM=t.REC_URAG_NUM - ,REC_QTY=t.REC_QTY - ,REC_NONYU_PRICE=t.REC_NONYU_PRICE - ,REC_NONYU_AMT=t.REC_NONYU_AMT - ,REC_COMM_NAME=t.REC_COMM_NAME - ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME - ,FREE_ITEM=t.FREE_ITEM - ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR - ,REC_NONYU_FCL_POST=t.REC_NONYU_FCL_POST - ,REC_NONYU_FCL_TEL=t.REC_NONYU_FCL_TEL - ,REC_BEF_HSDN_YMD=t.REC_BEF_HSDN_YMD - ,REC_BEF_SLIP_NUM=t.REC_BEF_SLIP_NUM - ,REC_YMD=t.REC_YMD - ,SALE_DATA_CAT=t.SALE_DATA_CAT - ,SLIP_FILE_NAME=t.SLIP_FILE_NAME - ,SLIP_MGT_NUM=t.SLIP_MGT_NUM - ,ROW_NUM=t.ROW_NUM - ,HSDN_YMD=t.HSDN_YMD - ,EXEC_DT=t.EXEC_DT - ,V_TRAN_CD=t.V_TRAN_CD - ,TRAN_KBN_NAME=t.TRAN_KBN_NAME - ,WHS_ORG_CD=t.WHS_ORG_CD - ,V_WHSORG_CD=t.V_WHSORG_CD - ,WHS_ORG_NAME=t.WHS_ORG_NAME - ,WHS_ORG_KN=t.WHS_ORG_KN - ,V_WHS_CD=t.V_WHS_CD - ,WHS_NAME=t.WHS_NAME - ,NONYU_FCL_CD=t.NONYU_FCL_CD - ,V_INST_CD=t.V_INST_CD - ,V_INST_KN=t.V_INST_KN - ,V_INST_NAME=t.V_INST_NAME - ,V_INST_ADDR=t.V_INST_ADDR - ,COMM_CD=t.COMM_CD - ,COMM_NAME=t.COMM_NAME - ,NONYU_QTY=t.NONYU_QTY - ,NONYU_PRICE=t.NONYU_PRICE - ,NONYU_AMT=t.NONYU_AMT - ,SHIKIRI_PRICE=t.SHIKIRI_PRICE - ,SHIKIRI_AMT=t.SHIKIRI_AMT - ,NHI_PRICE=t.NHI_PRICE - ,NHI_AMT=t.NHI_AMT - ,WHSPOS_ERR_KBN=t.WHSPOS_ERR_KBN - ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN=t.PRD_EXIS_KBN - ,FCL_EXIS_KBN=t.FCL_EXIS_KBN - ,BEF_HSDN_YMD=t.BEF_HSDN_YMD - ,BEF_SLIP_NUM=t.BEF_SLIP_NUM - ,SLIP_ORG_KBN=t.SLIP_ORG_KBN - ,ERR_FLG1=t.ERR_FLG1 - ,ERR_FLG2=t.ERR_FLG2 - ,ERR_FLG3=t.ERR_FLG3 - ,ERR_FLG4=t.ERR_FLG4 - ,ERR_FLG5=t.ERR_FLG5 - ,ERR_FLG6=t.ERR_FLG6 - ,ERR_FLG7=t.ERR_FLG7 - ,ERR_FLG8=t.ERR_FLG8 - ,ERR_FLG9=t.ERR_FLG9 - ,ERR_FLG10=t.ERR_FLG10 - ,ERR_FLG11=t.ERR_FLG11 - ,ERR_FLG12=t.ERR_FLG12 - ,ERR_FLG13=t.ERR_FLG13 - ,ERR_FLG14=t.ERR_FLG14 - ,ERR_FLG15=t.ERR_FLG15 - ,ERR_FLG16=t.ERR_FLG16 - ,ERR_FLG17=t.ERR_FLG17 - ,ERR_FLG18=t.ERR_FLG18 - ,ERR_FLG19=t.ERR_FLG19 - ,ERR_FLG20=t.ERR_FLG20 - ,KJYO_YM=t.KJYO_YM - ,TKSNBK_KBN=t.TKSNBK_KBN - ,FCL_EXEC_KBN=t.FCL_EXEC_KBN - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_whs_org_cd=t.rec_whs_org_cd + ,rec_cust_cd=t.rec_cust_cd + ,rec_comm_cd=t.rec_comm_cd + ,rec_tran_kbn=t.rec_tran_kbn + ,rev_hsdnymd_wrk=t.rev_hsdnymd_wrk + ,rev_hsdnymd_srk=t.rev_hsdnymd_srk + ,rec_urag_num=t.rec_urag_num + ,rec_qty=t.rec_qty + ,rec_nonyu_price=t.rec_nonyu_price + ,rec_nonyu_amt=t.rec_nonyu_amt + ,rec_comm_name=t.rec_comm_name + ,rec_nonyu_fcl_name=t.rec_nonyu_fcl_name + ,free_item=t.free_item + ,rec_nonyu_fcl_addr=t.rec_nonyu_fcl_addr + ,rec_nonyu_fcl_post=t.rec_nonyu_fcl_post + ,rec_nonyu_fcl_tel=t.rec_nonyu_fcl_tel + ,rec_bef_hsdn_ymd=t.rec_bef_hsdn_ymd + ,rec_bef_slip_num=t.rec_bef_slip_num + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,hsdn_ymd=t.hsdn_ymd + ,exec_dt=t.exec_dt + ,v_tran_cd=t.v_tran_cd + ,tran_kbn_name=t.tran_kbn_name + ,whs_org_cd=t.whs_org_cd + ,v_whsorg_cd=t.v_whsorg_cd + ,whs_org_name=t.whs_org_name + ,whs_org_kn=t.whs_org_kn + ,v_whs_cd=t.v_whs_cd + ,whs_name=t.whs_name + ,nonyu_fcl_cd=t.nonyu_fcl_cd + ,v_inst_cd=t.v_inst_cd + ,v_inst_kn=t.v_inst_kn + ,v_inst_name=t.v_inst_name + ,v_inst_addr=t.v_inst_addr + ,comm_cd=t.comm_cd + ,comm_name=t.comm_name + ,nonyu_qty=t.nonyu_qty + ,nonyu_price=t.nonyu_price + ,nonyu_amt=t.nonyu_amt + ,shikiri_price=t.shikiri_price + ,shikiri_amt=t.shikiri_amt + ,nhi_price=t.nhi_price + ,nhi_amt=t.nhi_amt + ,whspos_err_kbn=t.whspos_err_kbn + ,htdnymd_err_kbn=t.htdnymd_err_kbn + ,prd_exis_kbn=t.prd_exis_kbn + ,fcl_exis_kbn=t.fcl_exis_kbn + ,bef_hsdn_ymd=t.bef_hsdn_ymd + ,bef_slip_num=t.bef_slip_num + ,slip_org_kbn=t.slip_org_kbn + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,err_flg11=t.err_flg11 + ,err_flg12=t.err_flg12 + ,err_flg13=t.err_flg13 + ,err_flg14=t.err_flg14 + ,err_flg15=t.err_flg15 + ,err_flg16=t.err_flg16 + ,err_flg17=t.err_flg17 + ,err_flg18=t.err_flg18 + ,err_flg19=t.err_flg19 + ,err_flg20=t.err_flg20 + ,kjyo_ym=t.kjyo_ym + ,tksnbk_kbn=t.tksnbk_kbn + ,fcl_exec_kbn=t.fcl_exec_kbn + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dT=SYSDATE() ; """) }, @@ -303,46 +303,46 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.hld_mst_v ( - V_HLD_CD - ,SUB_NUM - ,NAME - ,KN_NAME - ,ABB_NAME - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_hld_cd + ,sub_num + ,name + ,kn_name + ,abb_name + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_HLD_CD - ,t.SUB_NUM - ,t.NAME - ,t.KN_NAME - ,t.ABB_NAME - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_hld_cd + ,t.sub_num + ,t.name + ,t.kn_name + ,t.abb_name + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.hld_mst_v AS t ON DUPLICATE KEY UPDATE - V_HLD_CD=t.V_HLD_CD - ,SUB_NUM=t.SUB_NUM - ,NAME=t.NAME - ,KN_NAME=t.KN_NAME - ,ABB_NAME=t.ABB_NAME - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_hld_cd=t.v_hld_cd + ,sub_num=t.sub_num + ,name=t.name + ,kn_name=t.kn_name + ,abb_name=t.abb_name + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -356,61 +356,61 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.whs_mst_v ( - V_WHS_CD - ,SUB_NUM - ,NAME - ,KN_NAME - ,ABB_NAME - ,POSTAL_CD - ,ADDR - ,KN_ADDR - ,TEL_NUM - ,V_HLD_CD - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_whs_cd + ,sub_num + ,name + ,kn_name + ,abb_name + ,postal_cd + ,addr + ,kn_addr + ,tel_num + ,v_hld_cd + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_WHS_CD - ,t.SUB_NUM - ,t.NAME - ,t.KN_NAME - ,t.ABB_NAME - ,t.POSTAL_CD - ,t.ADDR - ,t.KN_ADDR - ,t.TEL_NUM - ,t.V_HLD_CD - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_whs_cd + ,t.sub_num + ,t.name + ,t.kn_name + ,t.abb_name + ,t.postal_cd + ,t.addr + ,t.kn_addr + ,t.tel_num + ,t.v_hld_cd + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.whs_mst_v AS t ON DUPLICATE KEY UPDATE - V_WHS_CD=t.V_WHS_CD - ,SUB_NUM=t.SUB_NUM - ,NAME=t.NAME - ,KN_NAME=t.KN_NAME - ,ABB_NAME=t.ABB_NAME - ,POSTAL_CD=t.POSTAL_CD - ,ADDR=t.ADDR - ,KN_ADDR=t.KN_ADDR - ,TEL_NUM=t.TEL_NUM - ,V_HLD_CD=t.V_HLD_CD - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_whs_cd=t.v_whs_cd + ,sub_num=t.sub_num + ,name=t.name + ,kn_name=t.kn_name + ,abb_name=t.abb_name + ,postal_cd=t.postal_cd + ,addr=t.addr + ,kn_addr=t.kn_addr + ,tel_num=t.tel_num + ,v_hld_cd=t.v_hld_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -424,151 +424,151 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.mkr_org_horizon_v ( - VID_KIND_1 - ,V_CD_1 - ,NAME_1 - ,DSP_ODR_1 - ,VID_KIND_2 - ,V_CD_2 - ,NAME_2 - ,DSP_ODR_2 - ,VID_KIND_3 - ,V_CD_3 - ,NAME_3 - ,DSP_ODR_3 - ,VID_KIND_4 - ,V_CD_4 - ,NAME_4 - ,DSP_ODR_4 - ,VID_KIND_5 - ,V_CD_5 - ,NAME_5 - ,DSP_ODR_5 - ,VID_KIND_6 - ,V_CD_6 - ,NAME_6 - ,DSP_ODR_6 - ,VID_KIND_7 - ,V_CD_7 - ,NAME_7 - ,DSP_ODR_7 - ,VID_KIND_8 - ,V_CD_8 - ,NAME_8 - ,DSP_ODR_8 - ,VID_KIND_9 - ,V_CD_9 - ,NAME_9 - ,DSP_ODR_9 - ,VID_KIND_10 - ,V_CD_10 - ,NAME_10 - ,DSP_ODR_10 - ,V_WHS_CD - ,START_DATE - ,END_DATE - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + vid_kind_1 + ,v_cd_1 + ,name_1 + ,dsp_odr_1 + ,vid_kind_2 + ,v_cd_2 + ,name_2 + ,dsp_odr_2 + ,vid_kind_3 + ,v_cd_3 + ,name_3 + ,dsp_odr_3 + ,vid_kind_4 + ,v_cd_4 + ,name_4 + ,dsp_odr_4 + ,vid_kind_5 + ,v_cd_5 + ,name_5 + ,dsp_odr_5 + ,vid_kind_6 + ,v_cd_6 + ,name_6 + ,dsp_odr_6 + ,vid_kind_7 + ,v_cd_7 + ,name_7 + ,dsp_odr_7 + ,vid_kind_8 + ,v_cd_8 + ,name_8 + ,dsp_odr_8 + ,vid_kind_9 + ,v_cd_9 + ,name_9 + ,dsp_odr_9 + ,vid_kind_10 + ,v_cd_10 + ,name_10 + ,dsp_odr_10 + ,v_whs_cd + ,start_date + ,end_date + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.VID_KIND_1 - ,t.V_CD_1 - ,t.NAME_1 - ,t.DSP_ODR_1 - ,t.VID_KIND_2 - ,t.V_CD_2 - ,t.NAME_2 - ,t.DSP_ODR_2 - ,t.VID_KIND_3 - ,t.V_CD_3 - ,t.NAME_3 - ,t.DSP_ODR_3 - ,t.VID_KIND_4 - ,t.V_CD_4 - ,t.NAME_4 - ,t.DSP_ODR_4 - ,t.VID_KIND_5 - ,t.V_CD_5 - ,t.NAME_5 - ,t.DSP_ODR_5 - ,t.VID_KIND_6 - ,t.V_CD_6 - ,t.NAME_6 - ,t.DSP_ODR_6 - ,t.VID_KIND_7 - ,t.V_CD_7 - ,t.NAME_7 - ,t.DSP_ODR_7 - ,t.VID_KIND_8 - ,t.V_CD_8 - ,t.NAME_8 - ,t.DSP_ODR_8 - ,t.VID_KIND_9 - ,t.V_CD_9 - ,t.NAME_9 - ,t.DSP_ODR_9 - ,t.VID_KIND_10 - ,t.V_CD_10 - ,t.NAME_10 - ,t.DSP_ODR_10 - ,t.V_WHS_CD - ,t.START_DATE - ,t.END_DATE - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.vid_kind_1 + ,t.v_cd_1 + ,t.name_1 + ,t.dsp_odr_1 + ,t.vid_kind_2 + ,t.v_cd_2 + ,t.name_2 + ,t.dsp_odr_2 + ,t.vid_kind_3 + ,t.v_cd_3 + ,t.name_3 + ,t.dsp_odr_3 + ,t.vid_kind_4 + ,t.v_cd_4 + ,t.name_4 + ,t.dsp_odr_4 + ,t.vid_kind_5 + ,t.v_cd_5 + ,t.name_5 + ,t.dsp_odr_5 + ,t.vid_kind_6 + ,t.v_cd_6 + ,t.name_6 + ,t.dsp_odr_6 + ,t.vid_kind_7 + ,t.v_cd_7 + ,t.name_7 + ,t.dsp_odr_7 + ,t.vid_kind_8 + ,t.v_cd_8 + ,t.name_8 + ,t.dsp_odr_8 + ,t.vid_kind_9 + ,t.v_cd_9 + ,t.name_9 + ,t.dsp_odr_9 + ,t.vid_kind_10 + ,t.v_cd_10 + ,t.name_10 + ,t.dsp_odr_10 + ,t.v_whs_cd + ,t.start_date + ,t.end_date + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.mkr_org_horizon_v AS t ON DUPLICATE KEY UPDATE - VID_KIND_1=t.VID_KIND_1 - ,V_CD_1=t.V_CD_1 - ,NAME_1=t.NAME_1 - ,DSP_ODR_1=t.DSP_ODR_1 - ,VID_KIND_2=t.VID_KIND_2 - ,V_CD_2=t.V_CD_2 - ,NAME_2=t.NAME_2 - ,DSP_ODR_2=t.DSP_ODR_2 - ,VID_KIND_3=t.VID_KIND_3 - ,V_CD_3=t.V_CD_3 - ,NAME_3=t.NAME_3 - ,DSP_ODR_3=t.DSP_ODR_3 - ,VID_KIND_4=t.VID_KIND_4 - ,V_CD_4=t.V_CD_4 - ,NAME_4=t.NAME_4 - ,DSP_ODR_4=t.DSP_ODR_4 - ,VID_KIND_5=t.VID_KIND_5 - ,V_CD_5=t.V_CD_5 - ,NAME_5=t.NAME_5 - ,DSP_ODR_5=t.DSP_ODR_5 - ,VID_KIND_6=t.VID_KIND_6 - ,V_CD_6=t.V_CD_6 - ,NAME_6=t.NAME_6 - ,DSP_ODR_6=t.DSP_ODR_6 - ,VID_KIND_7=t.VID_KIND_7 - ,V_CD_7=t.V_CD_7 - ,NAME_7=t.NAME_7 - ,DSP_ODR_7=t.DSP_ODR_7 - ,VID_KIND_8=t.VID_KIND_8 - ,V_CD_8=t.V_CD_8 - ,NAME_8=t.NAME_8 - ,DSP_ODR_8=t.DSP_ODR_8 - ,VID_KIND_9=t.VID_KIND_9 - ,V_CD_9=t.V_CD_9 - ,NAME_9=t.NAME_9 - ,DSP_ODR_9=t.DSP_ODR_9 - ,VID_KIND_10=t.VID_KIND_10 - ,V_CD_10=t.V_CD_10 - ,NAME_10=t.NAME_10 - ,DSP_ODR_10=t.DSP_ODR_10 - ,V_WHS_CD=t.V_WHS_CD - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + vid_kind_1=t.vid_kind_1 + ,v_cd_1=t.v_cd_1 + ,name_1=t.name_1 + ,dsp_odr_1=t.dsp_odr_1 + ,vid_kind_2=t.vid_kind_2 + ,v_cd_2=t.v_cd_2 + ,name_2=t.name_2 + ,dsp_odr_2=t.dsp_odr_2 + ,vid_kind_3=t.vid_kind_3 + ,v_cd_3=t.v_cd_3 + ,name_3=t.name_3 + ,dsp_odr_3=t.dsp_odr_3 + ,vid_kind_4=t.vid_kind_4 + ,v_cd_4=t.v_cd_4 + ,name_4=t.name_4 + ,dsp_odr_4=t.dsp_odr_4 + ,vid_kind_5=t.vid_kind_5 + ,v_cd_5=t.v_cd_5 + ,name_5=t.name_5 + ,dsp_odr_5=t.dsp_odr_5 + ,vid_kind_6=t.vid_kind_6 + ,v_cd_6=t.v_cd_6 + ,name_6=t.name_6 + ,dsp_odr_6=t.dsp_odr_6 + ,vid_kind_7=t.vid_kind_7 + ,v_cd_7=t.v_cd_7 + ,name_7=t.name_7 + ,dsp_odr_7=t.dsp_odr_7 + ,vid_kind_8=t.vid_kind_8 + ,v_cd_8=t.v_cd_8 + ,name_8=t.name_8 + ,dsp_odr_8=t.dsp_odr_8 + ,vid_kind_9=t.vid_kind_9 + ,v_cd_9=t.v_cd_9 + ,name_9=t.name_9 + ,dsp_odr_9=t.dsp_odr_9 + ,vid_kind_10=t.vid_kind_10 + ,v_cd_10=t.v_cd_10 + ,name_10=t.name_10 + ,dsp_odr_10=t.dsp_odr_10 + ,v_whs_cd=t.v_whs_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -582,46 +582,46 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.org_cnv_mst_v ( - WHS_CD - ,WHS_SUB_CD - ,ORG_CD - ,SUB_NUM - ,V_ORG_CD - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + whs_cd + ,whs_sub_cd + ,org_cd + ,sub_num + ,v_org_cd + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.WHS_CD - ,t.WHS_SUB_CD - ,t.ORG_CD - ,t.SUB_NUM - ,t.V_ORG_CD - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.whs_cd + ,t.whs_sub_cd + ,t.org_cd + ,t.sub_num + ,t.v_org_cd + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.org_cnv_mst_v AS t ON DUPLICATE KEY UPDATE - WHS_CD=t.WHS_CD - ,WHS_SUB_CD=t.WHS_SUB_CD - ,ORG_CD=t.ORG_CD - ,SUB_NUM=t.SUB_NUM - ,V_ORG_CD=t.V_ORG_CD - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + whs_cd=t.whs_cd + ,whs_sub_cd=t.whs_sub_cd + ,org_cd=t.org_cd + ,sub_num=t.sub_num + ,v_org_cd=t.v_org_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -635,40 +635,40 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.tran_kbn_mst_v ( - V_TRAN_CD - ,SUB_NUM - ,NAME - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_tran_cd + ,sub_num + ,name + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_TRAN_CD - ,t.SUB_NUM - ,t.NAME - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_tran_cd + ,t.sub_num + ,t.name + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.tran_kbn_mst_v AS t ON DUPLICATE KEY UPDATE - V_TRAN_CD=t.V_TRAN_CD - ,SUB_NUM=t.SUB_NUM - ,NAME=t.NAME - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_tran_cd=t.v_tran_cd + ,sub_num=t.sub_num + ,name=t.name + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -682,85 +682,85 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.fcl_mst_v ( - V_INST_CD - ,SUB_NUM - ,START_DATE - ,END_DATE - ,CLOSED_DT - ,FCL_NAME - ,FCL_KN_NAME - ,FCL_ABB_NAME - ,FCL_ABB_KN_NAME - ,MKR_CD - ,JSK_PROC_KBN - ,FMT_ADDR - ,FMT_KN_ADDR - ,POSTAL_CD - ,PRFT_CD - ,PRFT_NAME - ,CITY_NAME - ,ADDR_LINE_1 - ,TEL_NUM - ,ADMIN_KBN - ,FCL_TYPE - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + v_inst_cd + ,sub_num + ,start_date + ,end_date + ,closed_dt + ,fcl_name + ,fcl_kn_name + ,fcl_abb_name + ,fcl_abb_kn_name + ,mkr_cd + ,jsk_proc_kbn + ,fmt_addr + ,fmt_kn_addr + ,postal_cd + ,prft_cd + ,prft_name + ,city_name + ,addr_line_1 + ,tel_num + ,admin_kbn + ,fcl_type + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.V_INST_CD - ,t.SUB_NUM - ,t.START_DATE - ,t.END_DATE - ,t.CLOSED_DT - ,t.FCL_NAME - ,t.FCL_KN_NAME - ,t.FCL_ABB_NAME - ,t.FCL_ABB_KN_NAME - ,t.MKR_CD - ,t.JSK_PROC_KBN - ,t.FMT_ADDR - ,t.FMT_KN_ADDR - ,t.POSTAL_CD - ,t.PRFT_CD - ,t.PRFT_NAME - ,t.CITY_NAME - ,t.ADDR_LINE_1 - ,t.TEL_NUM - ,t.ADMIN_KBN - ,t.FCL_TYPE - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.v_inst_cd + ,t.sub_num + ,t.start_date + ,t.end_date + ,t.closed_dt + ,t.fcl_name + ,t.fcl_kn_name + ,t.fcl_abb_name + ,t.fcl_abb_kn_name + ,t.mkr_cd + ,t.jsk_proc_kbn + ,t.fmt_addr + ,t.fmt_kn_addr + ,t.postal_cd + ,t.prft_cd + ,t.prft_name + ,t.city_name + ,t.addr_line_1 + ,t.tel_num + ,t.admin_kbn + ,t.fcl_type + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.fcl_mst_v AS t ON DUPLICATE KEY UPDATE - V_INST_CD=t.V_INST_CD - ,SUB_NUM=t.SUB_NUM - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,CLOSED_DT=t.CLOSED_DT - ,FCL_NAME=t.FCL_NAME - ,FCL_KN_NAME=t.FCL_KN_NAME - ,FCL_ABB_NAME=t.FCL_ABB_NAME - ,FCL_ABB_KN_NAME=t.FCL_ABB_KN_NAME - ,MKR_CD=t.MKR_CD - ,JSK_PROC_KBN=t.JSK_PROC_KBN - ,FMT_ADDR=t.FMT_ADDR - ,FMT_KN_ADDR=t.FMT_KN_ADDR - ,POSTAL_CD=t.POSTAL_CD - ,PRFT_CD=t.PRFT_CD - ,PRFT_NAME=t.PRFT_NAME - ,CITY_NAME=t.CITY_NAME - ,ADDR_LINE_1=t.ADDR_LINE_1 - ,TEL_NUM=t.TEL_NUM - ,ADMIN_KBN=t.ADMIN_KBN - ,FCL_TYPE=t.FCL_TYPE - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + v_inst_cd=t.v_inst_cd + ,sub_num=t.sub_num + ,start_date=t.start_date + ,end_date=t.end_date + ,closed_dt=t.closed_dt + ,fcl_name=t.fcl_name + ,fcl_kn_name=t.fcl_kn_name + ,fcl_abb_name=t.fcl_abb_name + ,fcl_abb_kn_name=t.fcl_abb_kn_name + ,mkr_cd=t.mkr_cd + ,jsk_proc_kbn=t.jsk_proc_kbn + ,fmt_addr=t.fmt_addr + ,fmt_kn_addr=t.fmt_kn_addr + ,postal_cd=t.postal_cd + ,prft_cd=t.prft_cd + ,prft_name=t.prft_name + ,city_name=t.city_name + ,addr_line_1=t.addr_line_1 + ,tel_num=t.tel_num + ,admin_kbn=t.admin_kbn + ,fcl_type=t.fcl_type + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -774,97 +774,97 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.phm_prd_mst_v ( - PRD_CD - ,SUB_NUM - ,PRD_NAME - ,PRD_E_NAME - ,MKR_CD - ,MKR_INF_1 - ,MKR_INF_2 - ,PHM_ITM_CD - ,ITM_NAME - ,ITM_ABB_NAME - ,FORM_CD - ,FORM_NAME - ,VOL_CD - ,VOL_NAME - ,CONT_CD - ,CONT_NAME - ,PKG_CD - ,PKG_NAME - ,CNV_NUM - ,JSK_START_DT - ,PRD_SALE_KBN - ,JSK_PROC_KBN - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + prd_cd + ,sub_num + ,prd_name + ,prd_e_name + ,mkr_cd + ,mkr_inf_1 + ,mkr_inf_2 + ,phm_itm_cd + ,itm_name + ,itm_abb_name + ,form_cd + ,form_name + ,vol_cd + ,vol_name + ,cont_cd + ,cont_name + ,pkg_cd + ,pkg_name + ,cnv_num + ,jsk_start_dt + ,prd_sale_kbn + ,jsk_proc_kbn + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.PRD_CD - ,t.SUB_NUM - ,t.PRD_NAME - ,t.PRD_E_NAME - ,t.MKR_CD - ,t.MKR_INF_1 - ,t.MKR_INF_2 - ,t.PHM_ITM_CD - ,t.ITM_NAME - ,t.ITM_ABB_NAME - ,t.FORM_CD - ,t.FORM_NAME - ,t.VOL_CD - ,t.VOL_NAME - ,t.CONT_CD - ,t.CONT_NAME - ,t.PKG_CD - ,t.PKG_NAME - ,t.CNV_NUM - ,nullif(t.JSK_START_DT, 0) - ,t.PRD_SALE_KBN - ,t.JSK_PROC_KBN - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.prd_cd + ,t.sub_num + ,t.prd_name + ,t.prd_e_name + ,t.mkr_cd + ,t.mkr_inf_1 + ,t.mkr_inf_2 + ,t.phm_itm_cd + ,t.itm_name + ,t.itm_abb_name + ,t.form_cd + ,t.form_name + ,t.vol_cd + ,t.vol_name + ,t.cont_cd + ,t.cont_name + ,t.pkg_cd + ,t.pkg_name + ,t.cnv_num + ,nullif(t.jsk_start_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.prd_sale_kbn + ,t.jsk_proc_kbn + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.phm_prd_mst_v AS t ON DUPLICATE KEY UPDATE - PRD_CD=t.PRD_CD - ,SUB_NUM=t.SUB_NUM - ,PRD_NAME=t.PRD_NAME - ,PRD_E_NAME=t.PRD_E_NAME - ,MKR_CD=t.MKR_CD - ,MKR_INF_1=t.MKR_INF_1 - ,MKR_INF_2=t.MKR_INF_2 - ,PHM_ITM_CD=t.PHM_ITM_CD - ,ITM_NAME=t.ITM_NAME - ,ITM_ABB_NAME=t.ITM_ABB_NAME - ,FORM_CD=t.FORM_CD - ,FORM_NAME=t.FORM_NAME - ,VOL_CD=t.VOL_CD - ,VOL_NAME=t.VOL_NAME - ,CONT_CD=t.CONT_CD - ,CONT_NAME=t.CONT_NAME - ,PKG_CD=t.PKG_CD - ,PKG_NAME=t.PKG_NAME - ,CNV_NUM=t.CNV_NUM - ,JSK_START_DT=nullif(t.JSK_START_DT, 0) - ,PRD_SALE_KBN=t.PRD_SALE_KBN - ,JSK_PROC_KBN=t.JSK_PROC_KBN - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + prd_cd=t.prd_cd + ,sub_num=t.sub_num + ,prd_name=t.prd_name + ,prd_e_name=t.prd_e_name + ,mkr_cd=t.mkr_cd + ,mkr_inf_1=t.mkr_inf_1 + ,mkr_inf_2=t.mkr_inf_2 + ,phm_itm_cd=t.phm_itm_cd + ,itm_name=t.itm_name + ,itm_abb_name=t.itm_abb_name + ,form_cd=t.form_cd + ,form_name=t.form_name + ,vol_cd=t.vol_cd + ,vol_name=t.vol_name + ,cont_cd=t.cont_cd + ,cont_name=t.cont_name + ,pkg_cd=t.pkg_cd + ,pkg_name=t.pkg_name + ,cnv_num=t.cnv_num + ,jsk_start_dt=nullif(t.jsk_start_dt, 0) + ,prd_sale_kbn=t.prd_sale_kbn + ,jsk_proc_kbn=t.jsk_proc_kbn + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -878,43 +878,43 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.phm_price_mst_v ( - PHM_PRD_CD - ,PHM_PRICE_KIND - ,SUB_NUM - ,PRICE - ,START_DATE - ,END_DATE - ,DSP_ODR - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + phm_prd_cd + ,phm_price_kind + ,sub_num + ,price + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.PHM_PRD_CD - ,t.PHM_PRICE_KIND - ,t.SUB_NUM - ,t.PRICE - ,t.START_DATE - ,t.END_DATE - ,t.DSP_ODR - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.phm_prd_cd + ,t.phm_price_kind + ,t.sub_num + ,t.price + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.phm_price_mst_v AS t ON DUPLICATE KEY UPDATE - PHM_PRD_CD=t.PHM_PRD_CD - ,PHM_PRICE_KIND=t.PHM_PRICE_KIND - ,SUB_NUM=t.SUB_NUM - ,PRICE=t.PRICE - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,DSP_ODR=t.DSP_ODR - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + phm_prd_cd=t.phm_prd_cd + ,phm_price_kind=t.phm_price_kind + ,sub_num=t.sub_num + ,price=t.price + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -928,25 +928,25 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.vop_hco_merge_v ( - V_INST_CD - ,V_INST_CD_MERG - ,APPLY_DT - ,MERGE_REASON - ,DWH_UPD_DT + v_inst_cd + ,v_inst_cd_merg + ,apply_dt + ,merge_reason + ,dwh_upd_dt ) SELECT - t.V_INST_CD - ,t.V_INST_CD_MERG - ,t.APPLY_DT - ,t.MERGE_REASON + t.v_inst_cd + ,t.v_inst_cd_merg + ,t.apply_dt + ,t.merge_reason ,SYSDATE() FROM org05.vop_hco_merge_v AS t ON DUPLICATE KEY UPDATE - V_INST_CD=t.V_INST_CD - ,V_INST_CD_MERG=t.V_INST_CD_MERG - ,APPLY_DT=t.APPLY_DT - ,MERGE_REASON=t.MERGE_REASON - ,DWH_UPD_DT=SYSDATE() + v_inst_cd=t.v_inst_cd + ,v_inst_cd_merg=t.v_inst_cd_merg + ,apply_dt=t.apply_dt + ,merge_reason=t.merge_reason + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -960,64 +960,64 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.whs_customer_mst_v ( - WHS_CD - ,WHS_SUB_CD - ,CUSTOMER_CD - ,SUB_NUM - ,START_DATE - ,END_DATE - ,WHS_ORG_CD - ,SRC_ORG_CD - ,NAME - ,KN_NAME - ,ADDR - ,KN_ADDR - ,POSTAL_CD - ,TEL_NUM - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + whs_cd + ,whs_sub_cd + ,customer_cd + ,sub_num + ,start_date + ,end_date + ,whs_org_cd + ,src_org_cd + ,name + ,kn_name + ,addr + ,kn_addr + ,postal_cd + ,tel_num + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.WHS_CD - ,t.WHS_SUB_CD - ,t.CUSTOMER_CD - ,t.SUB_NUM - ,t.START_DATE - ,t.END_DATE - ,t.WHS_ORG_CD - ,t.SRC_ORG_CD - ,t.NAME - ,t.KN_NAME - ,t.ADDR - ,t.KN_ADDR - ,t.POSTAL_CD - ,t.TEL_NUM - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.whs_cd + ,t.whs_sub_cd + ,t.customer_cd + ,t.sub_num + ,t.start_date + ,t.end_date + ,t.whs_org_cd + ,t.src_org_cd + ,t.name + ,t.kn_name + ,t.addr + ,t.kn_addr + ,t.postal_cd + ,t.tel_num + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.whs_customer_mst_v AS t ON DUPLICATE KEY UPDATE - WHS_CD=t.WHS_CD - ,WHS_SUB_CD=t.WHS_SUB_CD - ,CUSTOMER_CD=t.CUSTOMER_CD - ,SUB_NUM=t.SUB_NUM - ,START_DATE=t.START_DATE - ,END_DATE=t.END_DATE - ,WHS_ORG_CD=t.WHS_ORG_CD - ,SRC_ORG_CD=t.SRC_ORG_CD - ,NAME=t.NAME - ,KN_NAME=t.KN_NAME - ,ADDR=t.ADDR - ,KN_ADDR=t.KN_ADDR - ,POSTAL_CD=t.POSTAL_CD - ,TEL_NUM=t.TEL_NUM - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + whs_cd=t.whs_cd + ,whs_sub_cd=t.whs_sub_cd + ,customer_cd=t.customer_cd + ,sub_num=t.sub_num + ,start_date=t.start_date + ,end_date=t.end_date + ,whs_org_cd=t.whs_org_cd + ,src_org_cd=t.src_org_cd + ,name=t.name + ,kn_name=t.kn_name + ,addr=t.addr + ,kn_addr=t.kn_addr + ,postal_cd=t.postal_cd + ,tel_num=t.tel_num + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1031,37 +1031,37 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.mdb_cnv_mst_v ( - HCO_VID_V - ,SUB_NUM - ,MDB_CD - ,RELIABILITY - ,START_DATE - ,REC_STS_KBN - ,INS_DT - ,UPD_DT - ,DWH_UPD_DT + hco_vid_v + ,sub_num + ,mdb_cd + ,reliability + ,start_date + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt ) SELECT - t.HCO_VID_V - ,t.SUB_NUM - ,t.MDB_CD - ,t.RELIABILITY - ,t.START_DATE - ,t.REC_STS_KBN - ,t.INS_DT - ,t.UPD_DT + t.hco_vid_v + ,t.sub_num + ,t.mdb_cd + ,t.reliability + ,t.start_date + ,t.rec_sts_kbn + ,t.ins_dt + ,t.upd_dt ,SYSDATE() FROM org05.mdb_cnv_mst_v AS t ON DUPLICATE KEY UPDATE - HCO_VID_V=t.HCO_VID_V - ,SUB_NUM=t.SUB_NUM - ,MDB_CD=t.MDB_CD - ,RELIABILITY=t.RELIABILITY - ,START_DATE=t.START_DATE - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,UPD_DT=t.UPD_DT - ,DWH_UPD_DT=SYSDATE() + hco_vid_v=t.hco_vid_v + ,sub_num=t.sub_num + ,mdb_cd=t.mdb_cd + ,reliability=t.reliability + ,start_date=t.start_date + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1075,100 +1075,100 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.whole_stock ( - REC_DATA - ,REC_WHS_CD - ,REC_WHS_SUB_CD - ,REC_STO_PLACE - ,REC_STOCK_YMD - ,REC_COMM_CD - ,REC_QTY - ,REC_STOCK_NO_SIGN - ,REC_JAN_CD - ,FREE_ITEM - ,REC_YMD - ,SALE_DATA_CAT - ,SLIP_FILE_NAME - ,SLIP_MGT_NUM - ,ROW_NUM - ,EXEC_DT - ,ERR_FLG1 - ,ERR_FLG2 - ,ERR_FLG3 - ,ERR_FLG4 - ,ERR_FLG5 - ,ERR_FLG6 - ,ERR_FLG7 - ,ERR_FLG8 - ,ERR_FLG9 - ,ERR_FLG10 - ,REC_STS_KBN - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_sto_place + ,rec_stock_ymd + ,rec_comm_cd + ,rec_qty + ,rec_stock_no_sign + ,rec_jan_cd + ,free_item + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,exec_dt + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.REC_DATA - ,t.REC_WHS_CD - ,t.REC_WHS_SUB_CD - ,t.REC_STO_PLACE - ,t.REC_STOCK_YMD - ,t.REC_COMM_CD - ,t.REC_QTY - ,t.REC_STOCK_NO_SIGN - ,t.REC_JAN_CD - ,t.FREE_ITEM - ,t.REC_YMD - ,t.SALE_DATA_CAT - ,t.SLIP_FILE_NAME - ,t.SLIP_MGT_NUM - ,t.ROW_NUM - ,t.EXEC_DT - ,t.ERR_FLG1 - ,t.ERR_FLG2 - ,t.ERR_FLG3 - ,t.ERR_FLG4 - ,t.ERR_FLG5 - ,t.ERR_FLG6 - ,t.ERR_FLG7 - ,t.ERR_FLG8 - ,t.ERR_FLG9 - ,t.ERR_FLG10 - ,t.REC_STS_KBN - ,t.INS_DT - ,t.INS_USR + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_sto_place + ,t.rec_stock_ymd + ,t.rec_comm_cd + ,t.rec_qty + ,t.rec_stock_no_sign + ,t.rec_jan_cd + ,t.free_item + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.exec_dt + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.rec_sts_kbn + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.whole_stock AS t ON DUPLICATE KEY UPDATE - REC_DATA=t.REC_DATA - ,REC_WHS_CD=t.REC_WHS_CD - ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD - ,REC_STO_PLACE=t.REC_STO_PLACE - ,REC_STOCK_YMD=t.REC_STOCK_YMD - ,REC_COMM_CD=t.REC_COMM_CD - ,REC_QTY=t.REC_QTY - ,REC_STOCK_NO_SIGN=t.REC_STOCK_NO_SIGN - ,REC_JAN_CD=t.REC_JAN_CD - ,FREE_ITEM=t.FREE_ITEM - ,REC_YMD=t.REC_YMD - ,SALE_DATA_CAT=t.SALE_DATA_CAT - ,SLIP_FILE_NAME=t.SLIP_FILE_NAME - ,SLIP_MGT_NUM=t.SLIP_MGT_NUM - ,ROW_NUM=t.ROW_NUM - ,EXEC_DT=t.EXEC_DT - ,ERR_FLG1=t.ERR_FLG1 - ,ERR_FLG2=t.ERR_FLG2 - ,ERR_FLG3=t.ERR_FLG3 - ,ERR_FLG4=t.ERR_FLG4 - ,ERR_FLG5=t.ERR_FLG5 - ,ERR_FLG6=t.ERR_FLG6 - ,ERR_FLG7=t.ERR_FLG7 - ,ERR_FLG8=t.ERR_FLG8 - ,ERR_FLG9=t.ERR_FLG9 - ,ERR_FLG10=t.ERR_FLG10 - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_sto_place=t.rec_sto_place + ,rec_stock_ymd=t.rec_stock_ymd + ,rec_comm_cd=t.rec_comm_cd + ,rec_qty=t.rec_qty + ,rec_stock_no_sign=t.rec_stock_no_sign + ,rec_jan_cd=t.rec_jan_cd + ,free_item=t.free_item + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,exec_dt=t.exec_dt + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1182,247 +1182,247 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.bio_sales ( - REC_DATA - ,REC_WHS_CD - ,REC_WHS_SUB_CD - ,REC_WHS_ORG_CD - ,REC_CUST_CD - ,REC_COMM_CD - ,REC_TRAN_KBN - ,REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK - ,REC_URAG_NUM - ,REC_COMM_NAME - ,REC_NONYU_FCL_NAME - ,REC_NONYU_FCL_ADDR - ,REC_LOT_NUM1 - ,REC_QTY1 - ,REC_LOT_NUM2 - ,REC_QTY2 - ,REC_LOT_NUM3 - ,REC_QTY3 - ,REC_YMD - ,SALE_DATA_CAT - ,SLIP_FILE_NAME - ,SLIP_MGT_NUM - ,ROW_NUM - ,HSDN_YMD - ,EXEC_DT - ,V_TRAN_CD - ,TRAN_KBN_NAME - ,WHS_ORG_CD - ,V_WHSORG_CD - ,WHS_ORG_NAME - ,WHS_ORG_KN - ,V_WHS_CD - ,WHS_NAME - ,NONYU_FCL_CD - ,V_INST_CD - ,V_INST_NAME - ,V_INST_KN - ,V_INST_ADDR - ,COMM_CD - ,PRODUCT_NAME - ,HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN - ,FCL_EXIS_KBN - ,QTY1 - ,QTY2 - ,QTY3 - ,SLIP_ORG_KBN - ,BEF_SLIP_MGT_NUM - ,WHS_REP_COMM_NAME - ,WHS_REP_NONYU_FCL_NAME - ,WHS_REP_NONYU_FCL_ADDR - ,ERR_FLG1 - ,ERR_FLG2 - ,ERR_FLG3 - ,ERR_FLG4 - ,ERR_FLG5 - ,ERR_FLG6 - ,ERR_FLG7 - ,ERR_FLG8 - ,ERR_FLG9 - ,ERR_FLG10 - ,ERR_FLG11 - ,ERR_FLG12 - ,ERR_FLG13 - ,ERR_FLG14 - ,ERR_FLG15 - ,ERR_FLG16 - ,ERR_FLG17 - ,ERR_FLG18 - ,ERR_FLG19 - ,ERR_FLG20 - ,KJYO_YM - ,TKSNBK_KBN - ,FCL_EXEC_KBN - ,REC_STS_KBN - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_whs_org_cd + ,rec_cust_cd + ,rec_comm_cd + ,rec_tran_kbn + ,rev_hsdnymd_wrk + ,rev_hsdnymd_srk + ,rec_urag_num + ,rec_comm_name + ,rec_nonyu_fcl_name + ,rec_nonyu_fcl_addr + ,rec_lot_num1 + ,rec_qty1 + ,rec_lot_num2 + ,rec_qty2 + ,rec_lot_num3 + ,rec_qty3 + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,hsdn_ymd + ,exec_dt + ,v_tran_cd + ,tran_kbn_name + ,whs_org_cd + ,v_whsorg_cd + ,whs_org_name + ,whs_org_kn + ,v_whs_cd + ,whs_name + ,nonyu_fcl_cd + ,v_inst_cd + ,v_inst_name + ,v_inst_kn + ,v_inst_addr + ,comm_cd + ,product_name + ,htdnymd_err_kbn + ,prd_exis_kbn + ,fcl_exis_kbn + ,qty1 + ,qty2 + ,qty3 + ,slip_org_kbn + ,bef_slip_mgt_num + ,whs_rep_comm_name + ,whs_rep_nonyu_fcl_name + ,whs_rep_nonyu_fcl_addr + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,err_flg11 + ,err_flg12 + ,err_flg13 + ,err_flg14 + ,err_flg15 + ,err_flg16 + ,err_flg17 + ,err_flg18 + ,err_flg19 + ,err_flg20 + ,kjyo_ym + ,tksnbk_kbn + ,fcl_exec_kbn + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.REC_DATA - ,t.REC_WHS_CD - ,t.REC_WHS_SUB_CD - ,t.REC_WHS_ORG_CD - ,t.REC_CUST_CD - ,t.REC_COMM_CD - ,t.REC_TRAN_KBN - ,t.REV_HSDNYMD_WRK - ,t.REV_HSDNYMD_SRK - ,t.REC_URAG_NUM - ,t.REC_COMM_NAME - ,t.REC_NONYU_FCL_NAME - ,t.REC_NONYU_FCL_ADDR - ,t.REC_LOT_NUM1 - ,t.REC_QTY1 - ,t.REC_LOT_NUM2 - ,t.REC_QTY2 - ,t.REC_LOT_NUM3 - ,t.REC_QTY3 - ,t.REC_YMD - ,t.SALE_DATA_CAT - ,t.SLIP_FILE_NAME - ,t.SLIP_MGT_NUM - ,t.ROW_NUM - ,t.HSDN_YMD - ,t.EXEC_DT - ,t.V_TRAN_CD - ,t.TRAN_KBN_NAME - ,t.WHS_ORG_CD - ,t.V_WHSORG_CD - ,t.WHS_ORG_NAME - ,t.WHS_ORG_KN - ,t.V_WHS_CD - ,t.WHS_NAME - ,t.NONYU_FCL_CD - ,t.V_INST_CD - ,t.V_INST_NAME - ,t.V_INST_KN - ,t.V_INST_ADDR - ,t.COMM_CD - ,t.PRODUCT_NAME - ,t.HTDNYMD_ERR_KBN - ,t.PRD_EXIS_KBN - ,t.FCL_EXIS_KBN - ,t.QTY1 - ,t.QTY2 - ,t.QTY3 - ,t.SLIP_ORG_KBN - ,t.BEF_SLIP_MGT_NUM - ,t.WHS_REP_COMM_NAME - ,t.WHS_REP_NONYU_FCL_NAME - ,t.WHS_REP_NONYU_FCL_ADDR - ,t.ERR_FLG1 - ,t.ERR_FLG2 - ,t.ERR_FLG3 - ,t.ERR_FLG4 - ,t.ERR_FLG5 - ,t.ERR_FLG6 - ,t.ERR_FLG7 - ,t.ERR_FLG8 - ,t.ERR_FLG9 - ,t.ERR_FLG10 - ,t.ERR_FLG11 - ,t.ERR_FLG12 - ,t.ERR_FLG13 - ,t.ERR_FLG14 - ,t.ERR_FLG15 - ,t.ERR_FLG16 - ,t.ERR_FLG17 - ,t.ERR_FLG18 - ,t.ERR_FLG19 - ,t.ERR_FLG20 - ,t.KJYO_YM - ,t.TKSNBK_KBN - ,t.FCL_EXEC_KBN - ,t.REC_STS_KBN - ,t.INS_DT - ,t.INS_USR + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_whs_org_cd + ,t.rec_cust_cd + ,t.rec_comm_cd + ,t.rec_tran_kbn + ,t.rev_hsdnymd_wrk + ,t.rev_hsdnymd_srk + ,t.rec_urag_num + ,t.rec_comm_name + ,t.rec_nonyu_fcl_name + ,t.rec_nonyu_fcl_addr + ,t.rec_lot_num1 + ,t.rec_qty1 + ,t.rec_lot_num2 + ,t.rec_qty2 + ,t.rec_lot_num3 + ,t.rec_qty3 + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.hsdn_ymd + ,t.exec_dt + ,t.v_tran_cd + ,t.tran_kbn_name + ,t.whs_org_cd + ,t.v_whsorg_cd + ,t.whs_org_name + ,t.whs_org_kn + ,t.v_whs_cd + ,t.whs_name + ,t.nonyu_fcl_cd + ,t.v_inst_cd + ,t.v_inst_name + ,t.v_inst_kn + ,t.v_inst_addr + ,t.comm_cd + ,t.product_name + ,t.htdnymd_err_kbn + ,t.prd_exis_kbn + ,t.fcl_exis_kbn + ,t.qty1 + ,t.qty2 + ,t.qty3 + ,t.slip_org_kbn + ,t.bef_slip_mgt_num + ,t.whs_rep_comm_name + ,t.whs_rep_nonyu_fcl_name + ,t.whs_rep_nonyu_fcl_addr + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.err_flg11 + ,t.err_flg12 + ,t.err_flg13 + ,t.err_flg14 + ,t.err_flg15 + ,t.err_flg16 + ,t.err_flg17 + ,t.err_flg18 + ,t.err_flg19 + ,t.err_flg20 + ,t.kjyo_ym + ,t.tksnbk_kbn + ,t.fcl_exec_kbn + ,t.rec_sts_kbn + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.bio_sales AS t ON DUPLICATE KEY UPDATE - REC_DATA=t.REC_DATA - ,REC_WHS_CD=t.REC_WHS_CD - ,REC_WHS_SUB_CD=t.REC_WHS_SUB_CD - ,REC_WHS_ORG_CD=t.REC_WHS_ORG_CD - ,REC_CUST_CD=t.REC_CUST_CD - ,REC_COMM_CD=t.REC_COMM_CD - ,REC_TRAN_KBN=t.REC_TRAN_KBN - ,REV_HSDNYMD_WRK=t.REV_HSDNYMD_WRK - ,REV_HSDNYMD_SRK=t.REV_HSDNYMD_SRK - ,REC_URAG_NUM=t.REC_URAG_NUM - ,REC_COMM_NAME=t.REC_COMM_NAME - ,REC_NONYU_FCL_NAME=t.REC_NONYU_FCL_NAME - ,REC_NONYU_FCL_ADDR=t.REC_NONYU_FCL_ADDR - ,REC_LOT_NUM1=t.REC_LOT_NUM1 - ,REC_QTY1=t.REC_QTY1 - ,REC_LOT_NUM2=t.REC_LOT_NUM2 - ,REC_QTY2=t.REC_QTY2 - ,REC_LOT_NUM3=t.REC_LOT_NUM3 - ,REC_QTY3=t.REC_QTY3 - ,REC_YMD=t.REC_YMD - ,SALE_DATA_CAT=t.SALE_DATA_CAT - ,SLIP_FILE_NAME=t.SLIP_FILE_NAME - ,SLIP_MGT_NUM=t.SLIP_MGT_NUM - ,ROW_NUM=t.ROW_NUM - ,HSDN_YMD=t.HSDN_YMD - ,EXEC_DT=t.EXEC_DT - ,V_TRAN_CD=t.V_TRAN_CD - ,TRAN_KBN_NAME=t.TRAN_KBN_NAME - ,WHS_ORG_CD=t.WHS_ORG_CD - ,V_WHSORG_CD=t.V_WHSORG_CD - ,WHS_ORG_NAME=t.WHS_ORG_NAME - ,WHS_ORG_KN=t.WHS_ORG_KN - ,V_WHS_CD=t.V_WHS_CD - ,WHS_NAME=t.WHS_NAME - ,NONYU_FCL_CD=t.NONYU_FCL_CD - ,V_INST_CD=t.V_INST_CD - ,V_INST_NAME=t.V_INST_NAME - ,V_INST_KN=t.V_INST_KN - ,V_INST_ADDR=t.V_INST_ADDR - ,COMM_CD=t.COMM_CD - ,PRODUCT_NAME=t.PRODUCT_NAME - ,HTDNYMD_ERR_KBN=t.HTDNYMD_ERR_KBN - ,PRD_EXIS_KBN=t.PRD_EXIS_KBN - ,FCL_EXIS_KBN=t.FCL_EXIS_KBN - ,QTY1=t.QTY1 - ,QTY2=t.QTY2 - ,QTY3=t.QTY3 - ,SLIP_ORG_KBN=t.SLIP_ORG_KBN - ,BEF_SLIP_MGT_NUM=t.BEF_SLIP_MGT_NUM - ,WHS_REP_COMM_NAME=t.WHS_REP_COMM_NAME - ,WHS_REP_NONYU_FCL_NAME=t.WHS_REP_NONYU_FCL_NAME - ,WHS_REP_NONYU_FCL_ADDR=t.WHS_REP_NONYU_FCL_ADDR - ,ERR_FLG1=t.ERR_FLG1 - ,ERR_FLG2=t.ERR_FLG2 - ,ERR_FLG3=t.ERR_FLG3 - ,ERR_FLG4=t.ERR_FLG4 - ,ERR_FLG5=t.ERR_FLG5 - ,ERR_FLG6=t.ERR_FLG6 - ,ERR_FLG7=t.ERR_FLG7 - ,ERR_FLG8=t.ERR_FLG8 - ,ERR_FLG9=t.ERR_FLG9 - ,ERR_FLG10=t.ERR_FLG10 - ,ERR_FLG11=t.ERR_FLG11 - ,ERR_FLG12=t.ERR_FLG12 - ,ERR_FLG13=t.ERR_FLG13 - ,ERR_FLG14=t.ERR_FLG14 - ,ERR_FLG15=t.ERR_FLG15 - ,ERR_FLG16=t.ERR_FLG16 - ,ERR_FLG17=t.ERR_FLG17 - ,ERR_FLG18=t.ERR_FLG18 - ,ERR_FLG19=t.ERR_FLG19 - ,ERR_FLG20=t.ERR_FLG20 - ,KJYO_YM=t.KJYO_YM - ,TKSNBK_KBN=t.TKSNBK_KBN - ,FCL_EXEC_KBN=t.FCL_EXEC_KBN - ,REC_STS_KBN=t.REC_STS_KBN - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_whs_org_cd=t.rec_whs_org_cd + ,rec_cust_cd=t.rec_cust_cd + ,rec_comm_cd=t.rec_comm_cd + ,rec_tran_kbn=t.rec_tran_kbn + ,rev_hsdnymd_wrk=t.rev_hsdnymd_wrk + ,rev_hsdnymd_srk=t.rev_hsdnymd_srk + ,rec_urag_num=t.rec_urag_num + ,rec_comm_name=t.rec_comm_name + ,rec_nonyu_fcl_name=t.rec_nonyu_fcl_name + ,rec_nonyu_fcl_addr=t.rec_nonyu_fcl_addr + ,rec_lot_num1=t.rec_lot_num1 + ,rec_qty1=t.rec_qty1 + ,rec_lot_num2=t.rec_lot_num2 + ,rec_qty2=t.rec_qty2 + ,rec_lot_num3=t.rec_lot_num3 + ,rec_qty3=t.rec_qty3 + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,hsdn_ymd=t.hsdn_ymd + ,exec_dt=t.exec_dt + ,v_tran_cd=t.v_tran_cd + ,tran_kbn_name=t.tran_kbn_name + ,whs_org_cd=t.whs_org_cd + ,v_whsorg_cd=t.v_whsorg_cd + ,whs_org_name=t.whs_org_name + ,whs_org_kn=t.whs_org_kn + ,v_whs_cd=t.v_whs_cd + ,whs_name=t.whs_name + ,nonyu_fcl_cd=t.nonyu_fcl_cd + ,v_inst_cd=t.v_inst_cd + ,v_inst_name=t.v_inst_name + ,v_inst_kn=t.v_inst_kn + ,v_inst_addr=t.v_inst_addr + ,comm_cd=t.comm_cd + ,product_name=t.product_name + ,htdnymd_err_kbn=t.htdnymd_err_kbn + ,prd_exis_kbn=t.prd_exis_kbn + ,fcl_exis_kbn=t.fcl_exis_kbn + ,qty1=t.qty1 + ,qty2=t.qty2 + ,qty3=t.qty3 + ,slip_org_kbn=t.slip_org_kbn + ,bef_slip_mgt_num=t.bef_slip_mgt_num + ,whs_rep_comm_name=t.whs_rep_comm_name + ,whs_rep_nonyu_fcl_name=t.whs_rep_nonyu_fcl_name + ,whs_rep_nonyu_fcl_addr=t.whs_rep_nonyu_fcl_addr + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,err_flg11=t.err_flg11 + ,err_flg12=t.err_flg12 + ,err_flg13=t.err_flg13 + ,err_flg14=t.err_flg14 + ,err_flg15=t.err_flg15 + ,err_flg16=t.err_flg16 + ,err_flg17=t.err_flg17 + ,err_flg18=t.err_flg18 + ,err_flg19=t.err_flg19 + ,err_flg20=t.err_flg20 + ,kjyo_ym=t.kjyo_ym + ,tksnbk_kbn=t.tksnbk_kbn + ,fcl_exec_kbn=t.fcl_exec_kbn + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() ; """) }, @@ -1436,71 +1436,59 @@ class VjskRecvFileMapper: _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ INSERT INTO src05.lot_num_mst ( - SER_NUM - ,LOT_NUM - ,EXPR_DT - ,FRST_MOV_DT - ,INS_DT - ,INS_USR - ,DWH_UPD_DT + ser_num + ,lot_num + ,expr_dt + ,frst_mov_dt + ,ins_dt + ,ins_usr + ,dwh_upd_dt ) SELECT - t.SER_NUM - ,t.LOT_NUM - ,t.EXPR_DT - ,t.FRST_MOV_DT - ,t.INS_DT - ,t.INS_USR + t.ser_num + ,t.lot_num + ,t.expr_dt + ,t.frst_mov_dt + ,t.ins_dt + ,t.ins_usr ,SYSDATE() FROM org05.lot_num_mst AS t ON DUPLICATE KEY UPDATE - SER_NUM=t.SER_NUM - ,LOT_NUM=t.LOT_NUM - ,EXPR_DT=t.EXPR_DT - ,FRST_MOV_DT=t.FRST_MOV_DT - ,INS_DT=t.INS_DT - ,INS_USR=t.INS_USR - ,DWH_UPD_DT=SYSDATE() + ser_num=t.ser_num + ,lot_num=t.lot_num + ,expr_dt=t.expr_dt + ,frst_mov_dt=t.frst_mov_dt + ,ins_dt=t.ins_dt + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() ; """) }, } - def get_data_name(self, condkey: str) -> str: + def _get_interface_property(self, condkey: str, property_name: str) -> str: ret = None if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_DATA_NAME) + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(property_name) return ret + def get_data_name(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_DATA_NAME) + def get_file_prefix(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_PREFIX) - return ret + return self._get_interface_property(condkey, self._KEY_FILE_PREFIX) def get_file_suffix(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_FILE_SUFFIX) - return ret + return self._get_interface_property(condkey, self._KEY_FILE_SUFFIX) def get_org_table(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_ORG_TABLE) - return ret + return self._get_interface_property(condkey, self._KEY_ORG_TABLE) def get_src_table(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_SRC_TABLE) - return ret + return self._get_interface_property(condkey, self._KEY_SRC_TABLE) def get_upsert_sql(self, condkey: str) -> str: - ret = None - if condkey in self._VJSK_INTERFACE_MAPPING: - ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(self._KEY_UPSERT_SQL) - return ret + return self._get_interface_property(condkey, self._KEY_UPSERT_SQL) def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: ret = None From 5a32b27e26983cea93cac2ee5c7216ff8840791a Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 12 May 2023 12:27:29 +0900 Subject: [PATCH 14/86] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=8F=8D=E6=98=A0(=E5=8F=97=E9=A0=98?= =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB=E8=A7=A3=E5=87=8D=E3=81=A8?= =?UTF-8?q?=E3=83=90=E3=83=83=E3=82=AF=E3=82=A2=E3=83=83=E3=83=97=E9=80=80?= =?UTF-8?q?=E9=81=BF=E3=82=82=E3=82=8C)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_importer.py | 21 +++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 30 +++++++++---------- 2 files changed, 36 insertions(+), 15 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 719e3e62..f2da3ed9 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -48,6 +48,17 @@ def exec(): logger.debug(f'データベース登録失敗 {e}') raise e + # V実消化データ受領ファイルバックアップ退避 + logger.debug('V実消化データ受領ファイルバックアップ退避:開始') + try: + # 取込が完了したS3バケットにある受領ファイルをバックアップ用S3バケットに移動する + _backup_received_files() + + except BatchOperationException as e: + logger.debug('V実消化データ受領ファイルのバックアップ退避が失敗しました') + raise e + logger.debug('V実消化データ受領ファイルバックアップ退避:終了') + logger.debug('exec done') @@ -160,6 +171,8 @@ def _import_file_to_db(): local_file_path = vjsk_recv_bucket.download_data_file(file_name) logger.debug(f"download s3 file done : {file_name}") + # TODO: 受領ファイルはtar.gzなので、ローカルストレージ上で解凍する + # データファイル名に該当する辞書アクセス用のキーを取得する key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) @@ -237,3 +250,11 @@ def _determine_today_is_stockslipdata_target(): raise e logger.debug("_determine_today_is_stockslipdata_target done") return ret + + +def _backup_received_files(): + logger.debug("_backup_received_files start") + + # TODO 受領バケットのファイル → バックアップバケット + + logger.debug("_backup_received_files done") diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 612309d2..4c374a2b 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -29,7 +29,7 @@ class VjskReceiveFileMapper: CONDKEY_SLIP_DATA: { _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.sales", _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -298,7 +298,7 @@ class VjskReceiveFileMapper: CONDKEY_HLD_MST: { _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.hld_mst_v", _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -351,7 +351,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_MST: { _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.whs_mst_v", _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -419,7 +419,7 @@ class VjskReceiveFileMapper: CONDKEY_MKR_ORG_HORIZON: { _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -577,7 +577,7 @@ class VjskReceiveFileMapper: CONDKEY_ORG_CNV_MST: { _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -630,7 +630,7 @@ class VjskReceiveFileMapper: CONDKEY_TRAN_KBN_MST: { _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -677,7 +677,7 @@ class VjskReceiveFileMapper: CONDKEY_FCL_MST: { _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.fcl_mst_v", _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -769,7 +769,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRD_MST: { _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -873,7 +873,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRICE_MST: { _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.phm_price_mst_v", _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -923,7 +923,7 @@ class VjskReceiveFileMapper: CONDKEY_VOP_HCO_MERGE: { _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -955,7 +955,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_CUSTOMER_MST: { _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1026,7 +1026,7 @@ class VjskReceiveFileMapper: CONDKEY_MDB_CONV_MST: { _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1070,7 +1070,7 @@ class VjskReceiveFileMapper: CONDKEY_STOCK_SLIP_DATA: { _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.whole_stock", _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1177,7 +1177,7 @@ class VjskReceiveFileMapper: CONDKEY_BIO_SLIP_DATA: { _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.bio_sales", _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1431,7 +1431,7 @@ class VjskReceiveFileMapper: CONDKEY_LOT_NUM_MST: { _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", - _KEY_FILE_SUFFIX: ".tsv", + _KEY_FILE_SUFFIX: ".tar.gz", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ From 2f1b42705a5f374bbb3551de70a036e098f114fa Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Sat, 13 May 2023 01:06:11 +0900 Subject: [PATCH 15/86] =?UTF-8?q?=E5=8F=97=E9=A0=98=E3=83=95=E3=82=A1?= =?UTF-8?q?=E3=82=A4=E3=83=AB=E3=81=AFtar.gz=E5=BD=A2=E5=BC=8F=E3=81=AA?= =?UTF-8?q?=E3=81=AE=E3=81=A7=E8=A7=A3=E5=87=8D=E3=81=97=E3=81=A6=E3=81=8B?= =?UTF-8?q?=E3=82=89tsv=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB=E3=82=92LOAD?= =?UTF-8?q?=E3=81=99=E3=82=8B=E3=82=88=E3=81=86=E5=AE=9F=E8=A3=85=E6=BC=8F?= =?UTF-8?q?=E3=82=8C=E3=82=92=E8=A3=9C=E5=AE=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/aws/s3.py | 13 +++++++++++++ .../src/batch/vjsk/vjsk_importer.py | 7 +++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 62111409..2e98eaac 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -1,5 +1,7 @@ import io +import os import os.path as path +import tarfile import tempfile import boto3 @@ -127,3 +129,14 @@ class VjskReceiveBucket(S3Bucket): self._s3_client.download_file(self._bucket_name, data_filename, f) f.seek(0) return temporary_file_path + + def unzip_data_file(self, filename: str): + ret = [] + with tarfile.open(filename) as tar: + temp_dir = os.path.dirname(filename) + tar.extractall(path=temp_dir) + extracted_files = tar.getnames() + for extracted_file in extracted_files: + file = os.path.join(temp_dir, extracted_file) + ret.append(file) + return ret diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index f2da3ed9..2737ffca 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -171,14 +171,17 @@ def _import_file_to_db(): local_file_path = vjsk_recv_bucket.download_data_file(file_name) logger.debug(f"download s3 file done : {file_name}") - # TODO: 受領ファイルはtar.gzなので、ローカルストレージ上で解凍する + # ローカルストレージにdownloadした受領ファイル(tar.gz)を解凍する + unzip_file_path = vjsk_recv_bucket.unzip_data_file(local_file_path) + logger.debug(f"unzip done : {unzip_file_path}") # データファイル名に該当する辞書アクセス用のキーを取得する key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) # 想定されたデータファイルであれば辞書登録する if key is not None: - target_dict[key] = {"condkey": key, "src_file_path": local_file_path} + # ※受領ファイル(tar.gz)の書庫構成はtsvファイルが1つだけの前提 + target_dict[key] = {"condkey": key, "src_file_path": unzip_file_path[0]} logger.debug(f'取込対象データファイル辞書{target_dict}') # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) From 2cb6cb5d996411f99e9b8a398acb5873089fb87e Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 15 May 2023 14:50:48 +0900 Subject: [PATCH 16/86] =?UTF-8?q?=E5=8F=97=E9=A0=98=E3=83=95=E3=82=A1?= =?UTF-8?q?=E3=82=A4=E3=83=AB=E3=81=AELOAD=E5=AE=8C=E4=BA=86=E5=BE=8C?= =?UTF-8?q?=E3=80=81=E5=8F=97=E9=A0=98=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=82=92=E3=83=90=E3=83=83=E3=82=AF=E3=82=A2=E3=83=83=E3=83=97?= =?UTF-8?q?=E3=83=90=E3=82=B1=E3=83=83=E3=83=88=E3=81=AB=E7=A7=BB=E5=8B=95?= =?UTF-8?q?=E3=81=99=E3=82=8B=E5=AE=9F=E8=A3=85=E6=BC=8F=E3=82=8C=E3=81=BB?= =?UTF-8?q?=E8=A3=9C=E5=AE=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 1 + ecs/jskult-batch-daily/src/aws/s3.py | 14 +++++++++++ .../src/batch/vjsk/vjsk_importer.py | 24 ++++++------------- .../src/system_var/environment.py | 1 + 4 files changed, 23 insertions(+), 17 deletions(-) diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index d95322fb..2c1cb5a7 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -8,6 +8,7 @@ ULTMARC_DATA_BUCKET=**************** ULTMARC_DATA_FOLDER=recv JSKULT_BACKUP_BUCKET=**************** ULTMARC_BACKUP_FOLDER=ultmarc +VJSK_BACKUP_FOLDER=vjsk JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 2e98eaac..0df364d4 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -112,6 +112,10 @@ class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER +class VjskBackupBucket(JskUltBackupBucket): + _folder = environment.VJSK_BACKUP_FOLDER + + class VjskReceiveBucket(S3Bucket): _bucket_name = environment.JSKULT_DATA_BUCKET _recv_folder = environment.JSKULT_DATA_FOLDER_RECV @@ -140,3 +144,13 @@ class VjskReceiveBucket(S3Bucket): file = os.path.join(temp_dir, extracted_file) ret.append(file) return ret + + def backup_dat_file(self, target_files: list, datetime_key: str): + jskult_backup_bucket = VjskBackupBucket() + for target_file in target_files: + backup_from_file_path = target_file.get("filename") + backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") + backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' + self._s3_client.copy(self._bucket_name, backup_from_file_path, + jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.delete_file(self._bucket_name, backup_from_file_path) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 2737ffca..23fc8e5d 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -30,9 +30,10 @@ def exec(): # V実消化データファイル受領チェック logger.debug('V実消化データファイル受領チェック:開始') + received_s3_files = [] try: # S3バケットにある受領済のV実消化データファイルの存在チェックをする - _check_received_files() + received_s3_files = _check_received_files() except BatchOperationException as e: logger.debug('受領したV実消化データファイルに未受領もものがあります') @@ -43,7 +44,7 @@ def exec(): logger.debug('V実消化データ取込:開始') try: # S3バケットにある受領済のV実消化データファイルをデータベースに登録する - _import_file_to_db() + _import_file_to_db(received_s3_files) except Exception as e: logger.debug(f'データベース登録失敗 {e}') raise e @@ -52,7 +53,7 @@ def exec(): logger.debug('V実消化データ受領ファイルバックアップ退避:開始') try: # 取込が完了したS3バケットにある受領ファイルをバックアップ用S3バケットに移動する - _backup_received_files() + vjsk_recv_bucket.backup_dat_file(received_s3_files, batch_context.syor_date) except BatchOperationException as e: logger.debug('V実消化データ受領ファイルのバックアップ退避が失敗しました') @@ -80,7 +81,7 @@ def _check_if_file_exists(src_list: list, condkey: str) -> bool: return ret -def _check_received_files(): +def _check_received_files() -> list: """V実消化連携データファイル受領確認処理""" logger.debug('_check_received_files start') @@ -151,16 +152,13 @@ def _check_received_files(): logger.debug('_check_received_files done') - return + return received_s3_files -def _import_file_to_db(): +def _import_file_to_db(received_s3_files: list): """V実消化連携データ取込処理""" logger.debug('_import_file_to_db start') - # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 - received_s3_files = vjsk_recv_bucket.get_s3_file_list() - # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する target_dict = {} for s3_file_path in received_s3_files: @@ -253,11 +251,3 @@ def _determine_today_is_stockslipdata_target(): raise e logger.debug("_determine_today_is_stockslipdata_target done") return ret - - -def _backup_received_files(): - logger.debug("_backup_received_files start") - - # TODO 受領バケットのファイル → バックアップバケット - - logger.debug("_backup_received_files done") diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index 6a2fca0b..5973a181 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -12,6 +12,7 @@ ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] +VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] From 7e69b86f0a5a6824d2759c60d9b20077930d3ca7 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 15 May 2023 17:26:11 +0900 Subject: [PATCH 17/86] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=8F=8D=E6=98=A0=E3=80=80=E5=8F=97=E9=A0=98?= =?UTF-8?q?=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB=E3=81=AE=E6=8B=A1=E5=BC=B5?= =?UTF-8?q?=E5=AD=90=E3=81=AF=20.gz=20=E3=81=A0=E3=81=91=E3=81=AB=E3=81=AA?= =?UTF-8?q?=E3=81=A3=E3=81=A6=E3=81=84=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 4c374a2b..8b372dc2 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -29,7 +29,7 @@ class VjskReceiveFileMapper: CONDKEY_SLIP_DATA: { _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.sales", _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -298,7 +298,7 @@ class VjskReceiveFileMapper: CONDKEY_HLD_MST: { _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.hld_mst_v", _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -351,7 +351,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_MST: { _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.whs_mst_v", _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -419,7 +419,7 @@ class VjskReceiveFileMapper: CONDKEY_MKR_ORG_HORIZON: { _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -577,7 +577,7 @@ class VjskReceiveFileMapper: CONDKEY_ORG_CNV_MST: { _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -630,7 +630,7 @@ class VjskReceiveFileMapper: CONDKEY_TRAN_KBN_MST: { _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -677,7 +677,7 @@ class VjskReceiveFileMapper: CONDKEY_FCL_MST: { _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.fcl_mst_v", _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -769,7 +769,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRD_MST: { _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -873,7 +873,7 @@ class VjskReceiveFileMapper: CONDKEY_PHM_PRICE_MST: { _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.phm_price_mst_v", _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -923,7 +923,7 @@ class VjskReceiveFileMapper: CONDKEY_VOP_HCO_MERGE: { _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -955,7 +955,7 @@ class VjskReceiveFileMapper: CONDKEY_WHS_CUSTOMER_MST: { _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1026,7 +1026,7 @@ class VjskReceiveFileMapper: CONDKEY_MDB_CONV_MST: { _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1070,7 +1070,7 @@ class VjskReceiveFileMapper: CONDKEY_STOCK_SLIP_DATA: { _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.whole_stock", _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1177,7 +1177,7 @@ class VjskReceiveFileMapper: CONDKEY_BIO_SLIP_DATA: { _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.bio_sales", _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1431,7 +1431,7 @@ class VjskReceiveFileMapper: CONDKEY_LOT_NUM_MST: { _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", - _KEY_FILE_SUFFIX: ".tar.gz", + _KEY_FILE_SUFFIX: ".gz", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ From a9b0b000567fc6b6c824cc626bda5f58565ca5e9 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 11 May 2023 20:20:19 +0900 Subject: [PATCH 18/86] =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=82=B3?= =?UTF-8?q?=E3=83=BC=E3=83=89=E3=81=AE=E3=82=B5=E3=83=B3=E3=83=97=E3=83=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py | 0 .../tests/batch/vjsk/vjsk_file_check/__init__.py | 0 .../batch/vjsk/vjsk_file_check/test_vjsk_file_check.py | 6 ++++++ .../tests/batch/vjsk/vjsk_load/__init__.py | 0 .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 6 ++++++ 5 files changed, 12 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py new file mode 100644 index 00000000..e9f88d81 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -0,0 +1,6 @@ +def test1(): + pass + + +def test2(): + pass diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py new file mode 100644 index 00000000..e9f88d81 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -0,0 +1,6 @@ +def test1(): + pass + + +def test2(): + pass From 647d8836ff8012ae8c2169080e206ccc883597c7 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 16 May 2023 22:30:09 +0900 Subject: [PATCH 19/86] =?UTF-8?q?=E5=AE=9F=E6=B6=88=E5=8C=96=E3=83=95?= =?UTF-8?q?=E3=82=A1=E3=82=A4=E3=83=AB=E5=8F=97=E9=A0=98=E3=83=81=E3=82=A7?= =?UTF-8?q?=E3=83=83=E3=82=AF=E3=81=AE=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E3=82=B3=E3=83=BC=E3=83=89=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/Pipfile | 1 + ecs/jskult-batch-daily/Pipfile.lock | 292 ++-- .../batch/vjsk/vjsk_file_check/conftest.py | 1226 +++++++++++++++++ .../vjsk_file_check/test_vjsk_file_check.py | 269 +++- ecs/jskult-batch-daily/tests/conftest.py | 10 - 5 files changed, 1666 insertions(+), 132 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py diff --git a/ecs/jskult-batch-daily/Pipfile b/ecs/jskult-batch-daily/Pipfile index a5d5dddd..1e6adf91 100644 --- a/ecs/jskult-batch-daily/Pipfile +++ b/ecs/jskult-batch-daily/Pipfile @@ -18,6 +18,7 @@ autopep8 = "*" flake8 = "*" pytest = "*" pytest-cov = "*" +boto3 = "*" [requires] python_version = "3.9" diff --git a/ecs/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-daily/Pipfile.lock index 519c60a0..10b5f555 100644 --- a/ecs/jskult-batch-daily/Pipfile.lock +++ b/ecs/jskult-batch-daily/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9bce8f43bcad5d6ae8e5a558b8ade00a83f6e1671993e91b0a883fffa6b95df9" + "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" }, "pipfile-spec": 6, "requires": { @@ -18,19 +18,19 @@ "default": { "boto3": { "hashes": [ - "sha256:816a198a6cc4f283af6b21439d85be6dbe4b73c2232dd906c6bafb4fece28d19", - "sha256:9de90a2c0b853f84436b032b28947fc8a765dc462573a8d543b13f16c6579b40" + "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6", + "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40" ], "index": "pypi", - "version": "==1.26.107" + "version": "==1.26.134" }, "botocore": { "hashes": [ - "sha256:ee1e43e6cd0864cc6811ba3f05123647612ee3f07a286a4c94f5885aa86d6922", - "sha256:f63942b4b7248c0b3d6ecbc2852cf0787c23ace2a91a012f7ee0b3ae3eb08f4f" + "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba", + "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f" ], "markers": "python_version >= '3.7'", - "version": "==1.29.107" + "version": "==1.29.134" }, "greenlet": { "hashes": [ @@ -124,11 +124,11 @@ }, "s3transfer": { "hashes": [ - "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd", - "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947" + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" ], "markers": "python_version >= '3.7'", - "version": "==0.6.0" + "version": "==0.6.1" }, "six": { "hashes": [ @@ -140,50 +140,50 @@ }, "sqlalchemy": { "hashes": [ - "sha256:07950fc82f844a2de67ddb4e535f29b65652b4d95e8b847823ce66a6d540a41d", - "sha256:0a865b5ec4ba24f57c33b633b728e43fde77b968911a6046443f581b25d29dd9", - "sha256:0b49f1f71d7a44329a43d3edd38cc5ee4c058dfef4487498393d16172007954b", - "sha256:13f984a190d249769a050634b248aef8991acc035e849d02b634ea006c028fa8", - "sha256:1b69666e25cc03c602d9d3d460e1281810109e6546739187044fc256c67941ef", - "sha256:1d06e119cf79a3d80ab069f064a07152eb9ba541d084bdaee728d8a6f03fd03d", - "sha256:246712af9fc761d6c13f4f065470982e175d902e77aa4218c9cb9fc9ff565a0c", - "sha256:34eb96c1de91d8f31e988302243357bef3f7785e1b728c7d4b98bd0c117dafeb", - "sha256:4c3020afb144572c7bfcba9d7cce57ad42bff6e6115dffcfe2d4ae6d444a214f", - "sha256:4f759eccb66e6d495fb622eb7f4ac146ae674d829942ec18b7f5a35ddf029597", - "sha256:68ed381bc340b4a3d373dbfec1a8b971f6350139590c4ca3cb722fdb50035777", - "sha256:6b72dccc5864ea95c93e0a9c4e397708917fb450f96737b4a8395d009f90b868", - "sha256:6e84ab63d25d8564d7a8c05dc080659931a459ee27f6ed1cf4c91f292d184038", - "sha256:734805708632e3965c2c40081f9a59263c29ffa27cba9b02d4d92dfd57ba869f", - "sha256:78612edf4ba50d407d0eb3a64e9ec76e6efc2b5d9a5c63415d53e540266a230a", - "sha256:7e472e9627882f2d75b87ff91c5a2bc45b31a226efc7cc0a054a94fffef85862", - "sha256:865392a50a721445156809c1a6d6ab6437be70c1c2599f591a8849ed95d3c693", - "sha256:8d118e233f416d713aac715e2c1101e17f91e696ff315fc9efbc75b70d11e740", - "sha256:8d3ece5960b3e821e43a4927cc851b6e84a431976d3ffe02aadb96519044807e", - "sha256:93c78d42c14aa9a9e0866eacd5b48df40a50d0e2790ee377af7910d224afddcf", - "sha256:95719215e3ec7337b9f57c3c2eda0e6a7619be194a5166c07c1e599f6afc20fa", - "sha256:9838bd247ee42eb74193d865e48dd62eb50e45e3fdceb0fdef3351133ee53dcf", - "sha256:aa5c270ece17c0c0e0a38f2530c16b20ea05d8b794e46c79171a86b93b758891", - "sha256:ac6a0311fb21a99855953f84c43fcff4bdca27a2ffcc4f4d806b26b54b5cddc9", - "sha256:ad5363a1c65fde7b7466769d4261126d07d872fc2e816487ae6cec93da604b6b", - "sha256:b3e5864eba71a3718236a120547e52c8da2ccb57cc96cecd0480106a0c799c92", - "sha256:bbda1da8d541904ba262825a833c9f619e93cb3fd1156be0a5e43cd54d588dcd", - "sha256:c6e27189ff9aebfb2c02fd252c629ea58657e7a5ff1a321b7fc9c2bf6dc0b5f3", - "sha256:c8239ce63a90007bce479adf5460d48c1adae4b933d8e39a4eafecfc084e503c", - "sha256:d209594e68bec103ad5243ecac1b40bf5770c9ebf482df7abf175748a34f4853", - "sha256:d5327f54a9c39e7871fc532639616f3777304364a0bb9b89d6033ad34ef6c5f8", - "sha256:db4bd1c4792da753f914ff0b688086b9a8fd78bb9bc5ae8b6d2e65f176b81eb9", - "sha256:e4780be0f19e5894c17f75fc8de2fe1ae233ab37827125239ceb593c6f6bd1e2", - "sha256:e4a019f723b6c1e6b3781be00fb9e0844bc6156f9951c836ff60787cc3938d76", - "sha256:e62c4e762d6fd2901692a093f208a6a6575b930e9458ad58c2a7f080dd6132da", - "sha256:e730603cae5747bc6d6dece98b45a57d647ed553c8d5ecef602697b1c1501cf2", - "sha256:ebc4eeb1737a5a9bdb0c24f4c982319fa6edd23cdee27180978c29cbb026f2bd", - "sha256:ee2946042cc7851842d7a086a92b9b7b494cbe8c3e7e4627e27bc912d3a7655e", - "sha256:f005245e1cb9b8ca53df73ee85e029ac43155e062405015e49ec6187a2e3fb44", - "sha256:f49c5d3c070a72ecb96df703966c9678dda0d4cb2e2736f88d15f5e1203b4159", - "sha256:f61ab84956dc628c8dfe9d105b6aec38afb96adae3e5e7da6085b583ff6ea789" + "sha256:0aa2cbde85a6eab9263ab480f19e8882d022d30ebcdc14d69e6a8d7c07b0a871", + "sha256:0d6979c9707f8b82366ba34b38b5a6fe32f75766b2e901f9820e271e95384070", + "sha256:0eb14a386a5b610305bec6639b35540b47f408b0a59f75999199aed5b3d40079", + "sha256:2424a84f131901fbb20a99844d47b38b517174c6e964c8efb15ea6bb9ced8c2b", + "sha256:2ad9688debf1f0ae9c6e0706a4e2d33b1a01281317cee9bd1d7eef8020c5baac", + "sha256:2f0a355264af0952570f18457102984e1f79510f856e5e0ae652e63316d1ca23", + "sha256:31f72bb300eed7bfdb373c7c046121d84fa0ae6f383089db9505ff553ac27cef", + "sha256:375b7ba88f261dbd79d044f20cbcd919d88befb63f26af9d084614f10cdf97a6", + "sha256:37de4010f53f452e94e5ed6684480432cfe6a7a8914307ef819cd028b05b98d5", + "sha256:49c138856035cb97f0053e5e57ba90ec936b28a0b8b0020d44965c7b0c0bf03a", + "sha256:4f9832815257969b3ca9bf0501351e4c02c8d60cbd3ec9f9070d5b0f8852900e", + "sha256:566a0ac347cf4632f551e7b28bbd0d215af82e6ffaa2556f565a3b6b51dc3f81", + "sha256:6777673d346071451bf7cccf8d0499024f1bd6a835fc90b4fe7af50373d92ce6", + "sha256:72746ec17a7d9c5acf2c57a6e6190ceba3dad7127cd85bb17f24e90acc0e8e3f", + "sha256:755f653d693f9b8f4286d987aec0d4279821bf8d179a9de8e8a5c685e77e57d6", + "sha256:7612a7366a0855a04430363fb4ab392dc6818aaece0b2e325ff30ee77af9b21f", + "sha256:7ad24c85f2a1caf0cd1ae8c2fdb668777a51a02246d9039420f94bd7dbfd37ed", + "sha256:881cc388dded44ae6e17a1666364b98bd76bcdc71b869014ae725f06ba298e0e", + "sha256:8d97b37b4e60073c38bcf94e289e3be09ef9be870de88d163f16e08f2b9ded1a", + "sha256:9119795d2405eb23bf7e6707e228fe38124df029494c1b3576459aa3202ea432", + "sha256:9136d596111c742d061c0f99bab95c5370016c4101a32e72c2b634ad5e0757e6", + "sha256:9ad883ac4f5225999747f0849643c4d0ec809d9ffe0ddc81a81dd3e68d0af463", + "sha256:a25b4c4fdd633501233924f873e6f6cd8970732859ecfe4ecfb60635881f70be", + "sha256:a30e4db983faa5145e00ef6eaf894a2d503b3221dbf40a595f3011930d3d0bac", + "sha256:a5e9e78332a5d841422b88b8c490dfd7f761e64b3430249b66c05d02f72ceab0", + "sha256:b4e08e3831671008888bad5d160d757ef35ce34dbb73b78c3998d16aa1334c97", + "sha256:bf1aae95e80acea02a0a622e1c12d3fefc52ffd0fe7bda70a30d070373fbb6c3", + "sha256:c61b89803a87a3b2a394089a7dadb79a6c64c89f2e8930cc187fec43b319f8d2", + "sha256:cdf80359b641185ae7e580afb9f88cf560298f309a38182972091165bfe1225d", + "sha256:d93ebbff3dcf05274843ad8cf650b48ee634626e752c5d73614e5ec9df45f0ce", + "sha256:db24d2738add6db19d66ca820479d2f8f96d3f5a13c223f27fa28dd2f268a4bd", + "sha256:e0d20f27edfd6f35b388da2bdcd7769e4ffa374fef8994980ced26eb287e033a", + "sha256:e2f3b5236079bc3e318a92bab2cc3f669cc32127075ab03ff61cacbae1c392b8", + "sha256:e481e54db8cec1457ee7c05f6d2329e3298a304a70d3b5e2e82e77170850b385", + "sha256:e5e5dc300a0ca8755ada1569f5caccfcdca28607dfb98b86a54996b288a8ebd3", + "sha256:ec2f525273528425ed2f51861b7b88955160cb95dddb17af0914077040aff4a5", + "sha256:f234ba3bb339ad17803009c8251f5ee65dcf283a380817fe486823b08b26383d", + "sha256:f463598f9e51ccc04f0fe08500f9a0c3251a7086765350be418598b753b5561d", + "sha256:f717944aee40e9f48776cf85b523bb376aa2d9255a268d6d643c57ab387e7264", + "sha256:fd0febae872a4042da44e972c070f0fd49a85a0a7727ab6b85425f74348be14e", + "sha256:fec56c7d1b6a22c8f01557de3975d962ee40270b81b60d1cfdadf2a105d10e84" ], "index": "pypi", - "version": "==2.0.9" + "version": "==2.0.13" }, "tenacity": { "hashes": [ @@ -211,14 +211,6 @@ } }, "develop": { - "attrs": { - "hashes": [ - "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", - "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" - ], - "markers": "python_version >= '3.6'", - "version": "==22.2.0" - }, "autopep8": { "hashes": [ "sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1", @@ -227,65 +219,89 @@ "index": "pypi", "version": "==2.0.2" }, + "boto3": { + "hashes": [ + "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6", + "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40" + ], + "index": "pypi", + "version": "==1.26.134" + }, + "botocore": { + "hashes": [ + "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba", + "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f" + ], + "markers": "python_version >= '3.7'", + "version": "==1.29.134" + }, + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.6" + }, "coverage": { "extras": [ "toml" ], "hashes": [ - "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d", - "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4", - "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e", - "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab", - "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90", - "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6", - "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731", - "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540", - "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2", - "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292", - "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5", - "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b", - "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2", - "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0", - "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57", - "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3", - "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140", - "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84", - "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988", - "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67", - "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d", - "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2", - "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5", - "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9", - "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8", - "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd", - "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6", - "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be", - "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88", - "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25", - "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137", - "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968", - "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9", - "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef", - "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54", - "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512", - "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005", - "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f", - "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149", - "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d", - "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8", - "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7", - "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5", - "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016", - "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69", - "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212", - "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc", - "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8", - "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d", - "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd", - "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169" + "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3", + "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a", + "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813", + "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0", + "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a", + "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd", + "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139", + "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b", + "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252", + "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790", + "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045", + "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce", + "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200", + "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718", + "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b", + "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f", + "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5", + "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade", + "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5", + "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a", + "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8", + "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33", + "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e", + "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c", + "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3", + "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969", + "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068", + "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2", + "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771", + "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed", + "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212", + "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614", + "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88", + "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3", + "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c", + "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84", + "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11", + "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1", + "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1", + "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e", + "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1", + "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd", + "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47", + "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a", + "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c", + "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31", + "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5", + "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6", + "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303", + "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5", + "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47" ], "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.2.5" }, "exceptiongroup": { "hashes": [ @@ -311,6 +327,14 @@ "markers": "python_version >= '3.7'", "version": "==2.0.0" }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, "mccabe": { "hashes": [ "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", @@ -321,11 +345,11 @@ }, "packaging": { "hashes": [ - "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", - "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" ], "markers": "python_version >= '3.7'", - "version": "==23.0" + "version": "==23.1" }, "pluggy": { "hashes": [ @@ -353,11 +377,11 @@ }, "pytest": { "hashes": [ - "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e", - "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4" + "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", + "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" ], "index": "pypi", - "version": "==7.2.2" + "version": "==7.3.1" }, "pytest-cov": { "hashes": [ @@ -367,6 +391,30 @@ "index": "pypi", "version": "==4.0.0" }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.2" + }, + "s3transfer": { + "hashes": [ + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" + ], + "markers": "python_version >= '3.7'", + "version": "==0.6.1" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, "tomli": { "hashes": [ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", @@ -374,6 +422,14 @@ ], "markers": "python_version < '3.11'", "version": "==2.0.1" + }, + "urllib3": { + "hashes": [ + "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", + "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.15" } } } diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py new file mode 100644 index 00000000..c341b2e8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -0,0 +1,1226 @@ +"""vjsk_file_cheak用テストフィクスチャoverride""" + + +import os + +import boto3 +import pytest + + +@pytest.fixture +def s3_client(): + conn = boto3.client('s3') + yield conn + + +@pytest.fixture +def bucket_name(): + return os.environ["JSKULT_DATA_BUCKET"] + + +@pytest.fixture +def receive_folder(): + return os.environ["JSKULT_DATA_FOLDER_RECV"] + + +@pytest.fixture +def init_check_received_files_ok1(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ok2(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng01(s3_client, bucket_name, receive_folder): + # setup + + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng02(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng03(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng04(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng05(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng06(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng07(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng08(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng09(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng10(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng11(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng12(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng13(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng14(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng15(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index e9f88d81..44ece095 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -1,6 +1,267 @@ -def test1(): - pass +import pytest + +from src.batch.common.batch_context import BatchContext +from src.batch.vjsk.vjsk_importer import _check_received_files +from src.error.exceptions import BatchOperationException -def test2(): - pass +def test_check_received_files_ok1(init_check_received_files_ok1): + """ + 観点 + 正常系 : 卸在庫データ取込対象日 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + received_s3_files = _check_received_files() + + assert received_s3_files is not None + + +def test_check_received_files_ok2(init_check_received_files_ok2): + """ + 観点 + 正常系 : 卸在庫データ取込対象日以外 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = False + + received_s3_files = _check_received_files() + # with pytest.raises(BatchOperationException): + # received_s3_files = _check_received_files() + + assert received_s3_files is not None + + +def test_check_received_files_ng01(init_check_received_files_ng01): + """ + 観点 + 異常系 : 卸在庫データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸在庫データファイルがありません") > 0 + + +def test_check_received_files_ng02(init_check_received_files_ng02): + """ + 観点 + 異常系 : 卸販売データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸販売データファイルがありません") > 0 + + +def test_check_received_files_ng03(init_check_received_files_ng03): + """ + 観点 + 異常系 : 卸組織変換マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸組織変換マスタファイルがありません") > 0 + + +def test_check_received_files_ng04(init_check_received_files_ng04): + """ + 観点 + 異常系 : 施設統合マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("施設統合マスタファイルがありません") > 0 + + +def test_check_received_files_ng05(init_check_received_files_ng05): + """ + 観点 + 異常系 : 卸マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸マスタファイルがありません") > 0 + + +def test_check_received_files_ng06(init_check_received_files_ng06): + """ + 観点 + 異常系 : 卸ホールディングスマスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸ホールディングスマスタファイルがありません") > 0 + + +def test_check_received_files_ng07(init_check_received_files_ng07): + """異常系 : 施設マスタファイルが欠落""" + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("施設マスタファイルがありません") > 0 + + +def test_check_received_files_ng08(init_check_received_files_ng08): + """ + 観点 + 異常系 : メーカー卸組織展開表ファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("メーカー卸組織展開表ファイルがありません") > 0 + + +def test_check_received_files_ng09(init_check_received_files_ng09): + """異常系 : 取引区分マスタファイルが欠落""" + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("取引区分マスタファイルがありません") > 0 + + +def test_check_received_files_ng10(init_check_received_files_ng10): + """ + 観点 + 異常系 : 製品マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製品マスタファイルがありません") > 0 + + +def test_check_received_files_ng11(init_check_received_files_ng11): + """ + 観点 + 異常系 : 製品価格マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製品価格マスタファイルがありません") > 0 + + +def test_check_received_files_ng12(init_check_received_files_ng12): + """ + 観点 + 異常系 : 卸得意先情報マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸得意先情報マスタファイルがありません") > 0 + + +def test_check_received_files_ng13(init_check_received_files_ng13): + """ + 観点 + 異常系 : MDBコード変換マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("MDBコード変換マスタファイルがありません") > 0 + + +def test_check_received_files_ng14(init_check_received_files_ng14): + """ + 観点 + 異常系 : 生物由来データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("生物由来データファイルがありません") > 0 + + +def test_check_received_files_ng15(init_check_received_files_ng15): + """ + 観点 + 異常系 : 製造ロット番号マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製造ロット番号マスタファイルがありません") > 0 diff --git a/ecs/jskult-batch-daily/tests/conftest.py b/ecs/jskult-batch-daily/tests/conftest.py index a03a8638..d2afff68 100644 --- a/ecs/jskult-batch-daily/tests/conftest.py +++ b/ecs/jskult-batch-daily/tests/conftest.py @@ -1,11 +1 @@ """共通テストフィクスチャ""" - -import pytest - -from src.db.database import Database - - -@pytest.fixture -def database() -> Database: - """データベース接続モジュールを作成""" - return Database.get_instance() From 4af4a6ca60c99c983e35c39c69a67ba1084ab0ac Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 17 May 2023 11:46:15 +0900 Subject: [PATCH 20/86] =?UTF-8?q?=E3=83=A1=E3=83=A2=E6=9B=B8=E3=81=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/batch/vjsk/vjsk_file_check/conftest.py | 1 + .../tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index c341b2e8..cfab500f 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -23,6 +23,7 @@ def receive_folder(): return os.environ["JSKULT_DATA_FOLDER_RECV"] +# TODO 共通fixtureにして15個固定でput/delete、各個別fixtureで15個から引き算でdeleteする @pytest.fixture def init_check_received_files_ok1(s3_client, bucket_name, receive_folder): # setup diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index 44ece095..43062be1 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -19,6 +19,10 @@ def test_check_received_files_ok1(init_check_received_files_ok1): assert received_s3_files is not None + # target_path = path.dirname(__file__) + # target_file = "xxxxxxxxxxxx000000000000.gz" + # s3_client.upload_file() + def test_check_received_files_ok2(init_check_received_files_ok2): """ From 7a7d5597407b24d946be9183dce687311963ef67 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 17 May 2023 12:03:51 +0900 Subject: [PATCH 21/86] =?UTF-8?q?=E9=96=93=E9=81=95=E3=81=A3=E3=81=A6?= =?UTF-8?q?=E6=B6=88=E3=81=97=E3=81=A6=E3=81=9F=E3=81=AE=E3=81=A7=E6=88=BB?= =?UTF-8?q?=E3=81=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/tests/conftest.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/ecs/jskult-batch-daily/tests/conftest.py b/ecs/jskult-batch-daily/tests/conftest.py index d2afff68..a03a8638 100644 --- a/ecs/jskult-batch-daily/tests/conftest.py +++ b/ecs/jskult-batch-daily/tests/conftest.py @@ -1 +1,11 @@ """共通テストフィクスチャ""" + +import pytest + +from src.db.database import Database + + +@pytest.fixture +def database() -> Database: + """データベース接続モジュールを作成""" + return Database.get_instance() From c1c9fd68b53178b31a52bc56adf0d807925ea1e6 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 17 May 2023 19:24:45 +0900 Subject: [PATCH 22/86] =?UTF-8?q?load=E5=87=A6=E7=90=86=E3=81=AE=E3=82=AB?= =?UTF-8?q?=E3=83=90=E3=83=AC=E3=83=83=E3=82=B8=E7=A2=BA=E8=AA=8D=E5=88=86?= =?UTF-8?q?=E3=82=92=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_file_check/conftest.py | 2 +- .../tests/batch/vjsk/vjsk_load/conftest.py | 41 +++++ .../batch/vjsk/vjsk_load/test_vjsk_load.py | 146 +++++++++++++++++- .../testdata/bio_slip_data_202304270000.gz | Bin 0 -> 2868 bytes .../batch/vjsk/vjsk_load/testdata/dummy.gz | Bin 0 -> 107 bytes .../testdata/fcl_mst_202304270000.gz | Bin 0 -> 2009 bytes .../testdata/hld_mst_202304270000.gz | Bin 0 -> 500 bytes .../testdata/lot_num_mst_202304270000.gz | Bin 0 -> 303 bytes .../testdata/mdb_conv_mst_202304270000.gz | Bin 0 -> 426 bytes .../testdata/mkr_org_horizon_202304270000.gz | Bin 0 -> 729 bytes .../testdata/org_cnv_mst_202304270000.gz | Bin 0 -> 402 bytes .../testdata/phm_prd_mst_202304270000.gz | Bin 0 -> 1134 bytes .../testdata/phm_price_mst_202304270000.gz | Bin 0 -> 413 bytes .../testdata/slip_data_202304270000.gz | Bin 0 -> 3134 bytes .../testdata/stock_slip_data_202304270000.gz | Bin 0 -> 899 bytes .../testdata/tran_kbn_mst_202304270000.gz | Bin 0 -> 419 bytes .../testdata/vop_hco_merge_202304270000.gz | Bin 0 -> 286 bytes .../testdata/whs_customer_mst_202304270000.gz | Bin 0 -> 1307 bytes .../testdata/whs_mst_202304270000.gz | Bin 0 -> 499 bytes 19 files changed, 184 insertions(+), 5 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index cfab500f..0778241d 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -1,4 +1,4 @@ -"""vjsk_file_cheak用テストフィクスチャoverride""" +"""vjsk_file_check用テストフィクスチャoverride""" import os diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py new file mode 100644 index 00000000..ea29eb63 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py @@ -0,0 +1,41 @@ +"""vjsk_load用テストフィクスチャoverride""" +import os + +import boto3 +import pytest + +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper + + +@pytest.fixture +def s3_client(): + conn = boto3.client('s3') + yield conn + + +@pytest.fixture +def bucket_name(): + return os.environ["JSKULT_DATA_BUCKET"] + + +@pytest.fixture +def receive_folder(): + return os.environ["JSKULT_DATA_FOLDER_RECV"] + + +@pytest.fixture +def mapper(): + return VjskReceiveFileMapper() + +# @pytest.fixture +# def init_Load_ok(s3_client, bucket_name, receive_folder): +# # setup + +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + +# # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index e9f88d81..43afd62c 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1,6 +1,144 @@ -def test1(): - pass +from os import path + +import pytest + +from src.batch.common.batch_context import BatchContext +# from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager +from src.batch.vjsk.vjsk_importer import (_check_received_files, + _import_file_to_db) +from src.db.database import Database -def test2(): - pass +class TestImportFileToDb: + db: Database + batch_context: BatchContext + test_file_path: str + + @pytest.fixture(autouse=True, scope='function') + def pre_test(self, database: Database): + """テスト実行前後処理""" + # setup + self.test_file_path = path.join(path.dirname(__file__), "testdata") + + self.batch_context = BatchContext.get_instance() + + self.db = database + self.db.connect() + # self.db.begin() + + # testing + yield + + # teardown + # self.db.rollback() + self.db.disconnect() + + def test_import_file_to_db_ok(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 正常系 : すべての受領データをデータベースに登録できる + 期待値 + 例外が発生しない + """ + # setup + self.batch_context.is_vjsk_stock_import_day = True + + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz", + "dummy.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") + # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") + + # assertion + received_s3_files = _check_received_files() + _import_file_to_db(received_s3_files) + + # self.db.connect() + + # # 検証 (卸在庫データファイル) + # table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) + # table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) + # result = self.db.execute(f"select * from {table_name_org}") + # assert result.rowcount == 10 + # result = self.db.execute(f"select * from {table_name_src}") + # assert result.rowcount == 10 + + # # 検証 (卸販売データ) + # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + # result = self.db.execute(f"select * from {table_name_org}") + # assert result.rowcount == 10 + # result = self.db.execute(f"select * from {table_name_src}") + # assert result.rowcount == 10 + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + # def test_load_stock_slip_data_ok(self, mapper): + # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + + # # setup + # self.batch_context.is_vjsk_stock_import_day = True + # self.db.execute(f"truncate table {table_name_src}") + + # # assertion (insert) + # target_dict = { + # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304280000.tsv") + # } + # VjskDataLoadManager.load(target_dict) + + # result = self.db.execute(f"select * from {table_name_org}") + # assert result.rowcount == 4 + # result = self.db.execute(f"select * from {table_name_src}") + # assert result.rowcount == 4 + + # # assertion (update) + # target_dict = { + # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304290000.tsv") + # } + # VjskDataLoadManager.load(target_dict) + + # result_org = self.db.execute(f"select * from {table_name_org}") + # assert result_org.rowcount == 4 + # result_src1 = self.db.execute(f"select * from {table_name_src}") + # assert result_src1.rowcount == 6 + + # # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..36d1af4593f1b83a34e64c586b11a47eb6cae724 GIT binary patch literal 2868 zcmV-43(NE$iwFn=tX*UT0AguxUvq3}a9?C$bYWjIFfubRG%`0ZFfcGKbYXG;?VDLs z6G0GwebryF<(m(&X7*m+#H+k5Ewy|~q2di%Nxa@9@iv~Ih=TVm9z?}RlJ&nBHZf28 z7wqZTD+z?GiGa!!gr44+*-dxPx4pB&?t@38@goP1MfY|lI-?>HB_fL&fwD9aKe3=K zg-}8?OCsrf6Z5~T~MEQ_iv6DotAilAE{kOebPj>i+7u?1vC&NT;%o%_Bx*0l%S zzjz^vClADVf#)o?YF0qbFmB zYcz3Bb3E3$KiYFNI9yM+Q|sx8A6~uZNVOum4>X;7_r@IEk)w%d&+%^RR6Dy9wEBqX z9}%ZKB01veZuVW`okzO(4B69}fI7~YeFs6=)9qHf_a~g|K6;W31`_*r&Inznn2}lE z6KEA~BiBBBm@3nrV3yvDHUCg`yTd(hJFgLMz|6u`U>i?>V_e`JU9&|`K*e{PZ}9^O z?whVy%<BKAy{=R?U&@m0WSf|bIu6@yfwVaENbsh6O?Q48*`^q|x zvy0nzWIy#q(HA9Olzmb0Mb#HIU(|ik2%LfhWEzmgfGh=MIUp+mSq;cqK-L3t;JhL` z@8LtIk3uA|kU1RRwEJ+C2fFqI=2$%8`r}|v93svUj>lt*7cE{XE25zikTbZC=ICVcYDr3twzA1v2|Ad=&}V zxJIBmgvp9s_-+?I*@bcF%>?rnbV4~}7shOJ&^FK8<~0}$+F;8u9QX>JEom3B&iM;@ z!8XsZ!-X82RL~Sj2vs_k@92O#l3OLL)DA(gOsxd7f+gXTF!$Hk6Ewa_q2gK)n_}#s zolzj3=vW=3)(i~;v|?-M-d=OVfUd=T4L3Rn^^Lie8yxb?h6AjF-yQ3=t&eo9-5gyP z-MDJ&iuFGz7suZWZ^)xe)z3ey{{zy20q~Woz z!`A0(*42Sh{|9S)05muXFKliXlTV7Nx5d=$VrrzAN)?k|N{`-H`Rfz;d*x?lZJekY zT!3Q0A1c8$DtMJvI#(V{mOkB{7$1i|*8Qi&-o9e*`C{*tVzLhgDxNu08hBnB8mU~# zm)}1w51gwEK7o$R+2&ol@E#-J8oD{pBZ>)y5pb(p^@9LDp`*|sK=_C_`Y`*tlthTG zh(h`K1MB%kFiCkR?HDGTbcFOczqrbodv^ZjkBhhL{4M;v8Jr$dg}zYw^ve2p*1Gge z5Wu?K`yuq#F5ke-P5B>*8I%1wArbr`ZT{sUQ#HIOMTi_B5?tRhSzLybJbpT!=<2S* z?i$L9q-mg$iD*bfl>^w7B#|ls?6OGJoM~*Hi?E4X_-vbL=7_==Gmd#1e1YbA2Ms#7Q3~nXgwpWn#P~=hRp3Sf zLQmu_1Fh|R9|tpj!cTwotnp`rUHItD388%N{Y3T(aN3&=f&dd@?RJ4~S+%K()Z$-3 z>K#A_5q^kDWC%FTFqB||tb#GrYEnyb2v`_oYFO}_Z!acmxrpn=0=oA$4;6Q`H1l@h z3(wAA*lJB)K<$ui4j?GZKK$v$o9uPqgBDQ)ZM86cS+m+$5g;?pG14NZgBE5Q-^ft$ zY}%01jvvBtx}oV|sNc6fbpBHgfT3L<_!{np*`u_Ml&0Y*y~C%pY%sa};<}Z2Tpk&j z$e#IqM7zP`=h3<@$q-;LqsioXbmi^cO7aEIp!r2uqod_(SDK@AZ3gYN@iOT8$$1G5 zPl{!o0+QG**fGXXnhTIYmj+({yz~M1Tplzly?v!GLoTB4;Ag*`za5+~p%875NfAxS zU%}{g9gH9nG2olSD7pqK89t*GQBrl|&t=o#x}^JDbk*WqW%DjUIlOmAT!!-*6hz#7qXLUeemqe?8qf*p#%lZ%e?ljY@uZk@7kz#=7z+X(`%oVtsQik!&Ejf>5`oZGCtt0o zjOWVz4}rDT^=GBb6Uc(?{D8Mq(e6Z;{soX-5BV89Ov59Cs2ZvQ?o>1?2O+Czps9i^ zB_U?M)Ho6DLqA`_J81#s{+em;H1!^U&f(1e21D~F^q2#tUhRe_s?Vf)jV^)XjTJ+{uhtz5WTJ~LA8 zABQWm-Va&V9^&FfpPj#33vWt7!CRU1KYz2_Re|D)Auk0_#OC=$|JH}oLAbzK*OTkO~&aGCSU z312NFUBC2t@1ERA`;|_$2Ml47<*%u{*AYmB;o!ajJk4->_TRMIE`@jg8^mC|OxmLURiX!L zs!a#$XLl-h`rH-no1D}&69#V}AN}c2_`s(_a6cZMVn3ezZ!)^|ZP1~~FmqUNw&~6^ zou}Ox&Hd1pjnSd4Khs=xd@zT&EF+h-c`nZZF1NTz`fq?TrJBNahRLF+QMRo^(Ihzt zWtlB{{E@^N1Hs*Bo92E+c^G?UptHLjeiTo0!{4`CzfTtrpW^|-b|AR_LNO-D5gLJ4 z&^T6>p9zNJk8U*s1^c10AFc-d{}8_4PTsmY!Dbs2+KBKff1rlDBxHh(UTb55-GZ;6 z`&@AsT#DndIRtzKfv<-FJ_{0mONn8CJdwW)dyHGyPr`%D=R8E0LNyisUl<_4PX6vi z>Ct0{^Y;std-pl{Vdq@=`6cVwOK1StZ*Zu5ezf%Et9K)Cmrp)`r|a(p-2LdDp3ewGX9H`Nnka<;OX0$$w)$i zmRc_C3X|KNp1$Y4Z*#iuLgr#Nmk~_p%n&xiIF`#^J@6X`zfPR+50Z1AV6sA3U7Baxyb;353*1E^{ge zvVY)|BKj^|%Jw(ufs1&YAIQ*m_DrrtM2i>C52`cIWzT0W4_@laoIg2$>FJBP%!yN{ z28op0quJ$4*_?Xg^5E$f)jYcIZ0@?EHCPdoMnQbx4)E!~sJ%&F)PRk_sGzQ#PlKAYr z84gD7Hl{1}l@EgTY!5qjg2@bY1GHLRAxsvB0<<zumqcx)14z&jj)QBH4J; z#C{XZQa>)a1Z^m+mQx*Za?R325(r3LK4Lr(-GYRb?x>h!387nt<`cV~!`56uOk$=*>i4)^c~! z+7`XKhPSzC>l)XL;|ZkFy3!?Lz@U?uG5uNC%uXA^Ff4A<;exX+u4X%?p~Jb-yw^@JdP+m5%qnEvo8LY)40G_DVr%9r7wd>ik0mM1B4}a@_W-uaGX1 zaU$~jGLCE2^Bfq_b|)L0B9rj_cu17eP2nWnIWN#}?z zrQAvD4ikoD8j1EyFjKTE9cdNGwY{JRPu<47kziw~zO?D*S5;+ijIN;jsFhcO#nr~l z@Xp)|oS?F%s>?r9*0->krsFD9)K1E~Bh4z0O{#k$y{^!4U+CyjY>k3pO(oWLa4M5R zp18j6|G?A{1-Px&!<{v*Gb^Pu1S=}WWhyE?pRO8i?;MHxkHZuM{jw8VZ_nty) z@Rs_*Bxq`CCZaSKHwnXWztw8#BNv`mv7d@|P0G1<2Tz+hnw?-T6)bCC5(TD7AeeT+ z!el5L+;JQ_N+@3t9SsMo3ESp^8T&U}eSTzTZX#;3zRFIj_`)yVj$4%WMKZ(#3YSe4 z(XBSB73p)KHwAm`Ze6pfo?t33Tih|3VS~jt zwaSC2)=eg=-0WVRkDGJ~CCg0`_zQj342?VZBv^N9dSVcud*Wca4uf(l>U&t4?AO`) zmSOA2^p4T1+JfiK>@qdb1=T&1-VtqXz5Ki}^gb@Hk!)Ke_kHR+pQt_KM<$@{&P;zZ^I9OcXDc(a}jg^dNe2t-Sd))?QER~#t z^RB9)8s81IL{J}tP}K!~py??8#}gQ(Ib^uSP(in&#p3<}GYkEI&P?Xe;aoU-H3j%* z4Yw^DotxG3e>OjV>H137ny)Hcf7cajg03|a?HOVhzg;(`yqaZc7#_5I5qh6Rt-V9EBfG!0A-x rV_=~7kBu3Qa2$CRQuUtcZHeQ+|M7qPAOFYyksRLv3aT>602}}ShH~>@ literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..efedd921426c1148d8eee36540901f014c0e1cc0 GIT binary patch literal 500 zcmVgiwFoqtX*UT0BCGvUu|=AUotQjGz?i?+QbkJ`-ZXd z>#rprSO=G}7|B#%zyv7fpmUnadGL<`1i(2DV4dANMU(`HQXEo35azlcQQdw5!5%V` z>G?E(y?oaJ=KJHTH5bKG(2H9~EjjCoWUL3tNWTpevAWAax*in2RVw0C5Bo{H9>j7Y z#_?2-rv09fsT9L*qCZ_Ea@{bStU8ZdNdMVL7m91ScvJa}%0E^9sPZe7-KqRVWzQ;m zUtTW&wTmW|J*fOiWv_p=s{B@E_qy9`SuQP}=QbeyIzjjh;0(5_D=F3wE(!FN@i-Tw1kYs|D9uu+@UMEX2h^yewwRa%t5~i))t0k zbluRpHblo@1cSj`;N`iz1`!M>{k+=TY;a7+R#Iw; zgo<#8YK>}*dd0ZZ={XGFZsXl!PQw|Ep{i7)CJkZMSZYc&I-{vsm1?StreRyEG2>hf z`qHEVF5VlP(WJXqs;M&?29;`>Ni!cfBUKb?2pJ8d{#hdy78Vx%8=o133Tprg001U# Bhw1c3*9CbYC(sGBYqVGB+?VFfcB3VR8WNl3Pl{ zP!NXms;ek@ugvXSH*>I!V6Ycy(t>OA!6mr`=k%_&L5dWS{BX$3obb(0@=vQ#8qa5| zG;P{MKny|-RCjH&DtRGcH zFQ2Pi+?Lh>QE(mrF_^$;^cM91u%HG_a;=AQ2rS^Pr6KST!T>oM8!?=kZTSq~5s|DB zZz%)Te8r;>9Qf*C&Ce`!Lxdr^0lEk_8v4YSsAHCMeBALR1oxc}Cs=%Hw&!D}i5K+N z?ESMP(zO@*=P2PEAGdtA;^7DWGpm!&4A!v|@`m^Rl`XpE68|O=L^kL6xZ^w4m%98P zSlxWj&Kgp_Kgv(g|IQcXN5knOyK{Wp@{w(jPX7m1Ctn~~1S5jhANxlExy(OtBr0!< UUcGwt>h*v10l?2|!vG8b0P#W2IRF3v literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..835d236007b3aa749ac63f3d9bf9ef9681cf26f4 GIT binary patch literal 729 zcmV;~0w(<*iwFoetX*UT0Bvh>UvF|}UubV~X?kyNUotQZe$^pE)mmp?sM}$KnVXEROs?X=IDIxoL(NmN2(5<-?TI z!WCb^b9yWsb-v(unaO!>`q2KMrRRT6yPV8e>CAm=DV<3?Os6aa5C$qT0IfS&JEx4H z00h7=3?6Zm(GBc!u>uh(d@sL;^%2#395SVgX_isv%T4R0XJtPz#~Pp(a30 zgn9^d4s`+QA~Zs1aA*k75MeZgQ4XU5jEc|yxa*dp9_g+-`mru#7JyP82Up|w3rW^rYM$khO zsDMpSlNLl6epAq#2kJb8a}dTbh-oI%IM|2gpb^th3`f7q2*fPNq_EQ~zwve(e(9vs zu69q?#jyNz#n5CEuXkAPIiE?T=pVKGdev_}_RmYwPQ^|t4x=s^NN4FeI3j!dVrV5C zE1cA5(|0g7+D_GVp1ApaH-GFp$G@HVC^#`)D7@w;H-GNt+pgod&T~>|`7iU5&y-Ip zCL=EjELHqT=GSVSi-H(w$!E$Zl@X}2p2m^B#PERl&4#y8`{f&iKH!63N%c?asv!+q z6N76E>8MW!XyQQvz0yZ=u_MXVSpi`OdoO!-a82xLD=wl_)ffHXGf>#BIIlFyWnH6w~FS+Zow`p@bs LCB`z%03HAUu3ciK literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..89eb4a65c860810433383f7aec3b85d0c884d501 GIT binary patch literal 402 zcmV;D0d4*tiwFoatX*UT0B>?dERpN z)P{Uvvrac}r@UmG0`ehrT#lHq&;DD+|)V}rP=_b5cfu>xDx9z#)G578HhAG>6CTS3_-?8Dub{HI%;&W w>2V-6a75>DBy1Ukzd@%u681Qd890*Xa6}BMMvWRZYScya39mPJQ~(SB0D^V7aR2}S literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..169f6761a19b359dec7013fb730d8675dbeffc6d GIT binary patch literal 1134 zcmV-!1d;n6iwFoWtX*UT0B~q+UvP3{Uu|=AUotQh=`FRcs%&hsNhW@l)oQPXAgYlLGNlrcs@iT`*I#^IGXJRJ}2$Kh4-`Sgts zCvFpB($+r0>9+iOFoNGkgBjGyqf69(69o^W;C$4-69rGv14B$;t-lq|=fGz+_x zNs&3!kp@g8&zYTOEKRe7Svk|l35Gf+O`SAz2xB<`I&*}{u>V_3HWgEt%mpxU$d}2O z64UEAycUPY;=ytpUWy0L02%po`O*8Ar#?Qt8;57&@H+A%;8xmBSglae;F#l3i)Q80 zpGCpRD0mwM52F5EV2}g=`7)y9a?!(RavM&l3?@QPAY}+q#+FPOa?m-mF`@}ZbBd@86~%N!HFWI!)FTBeGolG+Af5R@cu<7JR=Z3(@{CSqH)VLd~+7WV17B zmgVTM3oy=19TzwPx};y;i3e{1F@bzPEFaZfQxT;*O8G6CsjzL`IC0q{5HA2O`hruk zrh0W_ert)8NxodmNIj98U$;+VW(;(c43)t!n+bZdtVUBQ;KM}Oe0|hfsE=~^ENj2y zLvtw(gLnY7ctRHGxp9cLi3rCCGMiIN=8}9i9wc4wtgO%*HQPkPLVa7QQmvPXyoERS z9T&8({TQ2k_qLfp!-z?0Y#wCUg@_5pvO{7gvE~NLdRq@(%FTUIa=ZF{Zq|s~IaoFB z>HcyQ+?NENf~(|IE7XhCV!cqPB;3TeT+q7s=b3iiwFoRtX*UT0B~q+UvP41V`X1$b97%aFfubRG%`0ZFfcGKbYXG;?NUo_ zgD?!L^M6*`8bK@_B* zlY&Xn#E4dqm1;_bx-Np$S;eqs*f0zfgug1|j?zLtRU50&(966PnzUl9)Yn*{p_O?j z^gky>=yOShn-wCHx1}Pr1(>$-SA{y2T&qmqJC&0P4ZX?oGx?QjMS`%xKSipuwcQ01)Yh`hg%2vrqg;^Ye0hc6`RdwR662!sl!G)6P8dwQAq`t|oszZoYkcV3CLwIo|&x~3bNsXH2MuO<_H zO*PvzrfH7jG-kMxG#@4xb zdutzefpaf$GxqJ*iCAk}D3Q2&u{fhQeo1At0)nAbZ+Ev!CF3nUv960fnCy#vooMU% zs=F=rZM>@_CjmKBt)L5 zh_|=LI+N|;ld%ps+tSt+FU`Bsn@E=CCEG8Htc&d(c)_vnyW2upd!jw8y zT&}A}mJ13|0e-WT1$P`6RK7zZDM5?%K|+kPuw*NIywJem`k0RpCWFD^*+mIaqYXx# z@U+2Q>Ly= zLz$*BEoIuubd>2TGaBkb3)w7W>ml0+*=ESLLbe^UosjK@>`=ct@3-rl?|T6b?oC~Z z4=;8V0oUHjlfsu1NeN?TPXe4#7*`YVb1$EJ6Bt|L{}{6wJ8wp4)0k^Gh7k#Epeu&P z48zu~C}T|H>4+J%4NK!T-*osHaEdlthjislH0fuTn*H2}pL^)%Qhsj0&#wE~te@-m zb3ggnWk0*==O+B@-+p!t8olriR+5&kUHITj_JyCj;b${YA6vN=s=NYw+4^0~dG6;P z@Fs*}xt42{RL3sIpcu46wG0D_;K>I%mV5}AkwZ5C8Bm2i?j*YEqyU%_0g7NV^eO@y z8wpGSJ+BNKwkz=lQ$mi3S^x{p90yydcDm3)2dCGvxOA`tqzI=tFNZ_dIri_t?~C_7 zeLwbI?7~~0yzxFgdeJFa3>%<4JsKL#z+^D{Oco5g_vHD``VHwYaHM%*%9>{DDENx#~F7-9rfsW0nVOoxDM@`-3 z*ToUc&iUC@9LF@c3t>vWLO_;*p24#@s3Jzcg1cnrOWh^A5V#9;3>V8U`M2@H>~Mbh zr=3iO?$R5ZDoi}uyZ`HUDn&Uv>+{~sLSb?cGHIZi;m%paOHbxk*7o}Eh>Gh2`RTdB zM8@B|1AqSx-S#(cho>0L4nMo7=12A!&5uZa`Kh;f-5Z|4$;;Q(gs)d7d^b$^y6L(N!ml~u^X3TbV#1e@HwgbEEuRkI8%W&^!hc~2-whGIm}H9a zb&FY=8KQjM)lEk~)0_|9r7^f8*q?WWdt1x|L@RKm48LSem(^LbPUOvi8;I;!qk zj$Q}OzYaBVf6veTfUV@9LM29$d~drjvzb#=HTw?~DbCr1KWYGS;MxL8Vaz!Kf6%PO z2`8@$=8-dX!*`xs`q9?{C zn^Yl!cs?VX_bXbF-1C#Oz{R^OGu~*wH$TZQFqNXa2{Hphw_PN3Cqe}Z(`hn*(7nYe zFSm@GehdF5PfX}IljJlKQ%Eto;W)Y#euy_YdT0A7zFJ0yE-Ev+=7ty@3Pm;3Z7_N* z8J$C4@)R#!Q9<5d^pmxII*iWqnA2eNv&ZOeh|p;Uz2J}N6qYd=kotKh{1!YQYPqbO z&^3V0arTMBJMhz^wk?A4j;+@tEOQ+v?ifzopRw8o_|ZXx4iCDnWgFm;kWI@-TwHFV zH>}W-9@JN889B;U9Fmd9s*?iTYCh2IrhlkoWKyy9o)m;trH6ccNRHAc_zrSyJ22`Y6nD`Nvlx9`gXCeHMj=&~udjKX z+#u|++%&m+xJB^@nH!=qNIv6olB~egV$IAxx@bt0)ky(#Ric4IAVmXjAkf%-PE@%xABya%;;`JP zpL+x@V_LFO1kBIniiZ}oICB3W4WyuIz+m#R@)|nqX7FeO#V8Zi0z+AjF5hgeK>x%s z#W|ZLFF0{d42CJkXx5vZgLwwPlkp;nxH>67r$m74f$<_S9FnxSA(1CjC29pWGn<8hOX+-hq#@ZDF;)Lq^i~++?@j9fRGRO8+dc-J%3E3SwTMZ(s)u~Yhw5Aobtr5 za6~Q3wRI3DOE;t_r)AJ+jVIOeZmz^Z0yUsA_*C*ts>FJv+S2>FSD6}aaVZ>F17r8k zEg*BaJ8vlkx^loNZ{`m!D(Ojpi^jK^4{$_)Ih`g0G=^R}!)j#jr!`QTU>$<{$>KSz zrXT1{FaaZlsfC^O5fGbT?%|Ys_wxw*_^l6xV2By4j?k_aK01~jmB%wJHS`+7WB|%} zDJ~W~#zZx@!eRuXCkET+#WMOYtQ4bxQIEw4)c+Zaj{Dgys78ZY@oSe?V0fj6u44Fy zKVq7CxDm-oH{gQg=N0`jwEd7850>Pt*=RNnjfsR_NJyO3Nda{=FC=IXik2f1aBfPw zrpuAzq70Q!A7tp{bX%uesIj2n_eIqOwa|Tnm57Noyv#Lkd9^S%x3iwA`UQn>KBXz} zdcxw~!ysa&Rm8REGosuu+A7i|H9^I-jFb&P{oI(bp;R zOH_k})+x+%3|*_RPBB2U9kV==CDjAp@xW17r!OAal^`H{D6P@G@y#;$D_?Z*ajY;- z4_O!pS;Y(4P#-pug-zZzgO42DBYosp9Lth8tCIriYGPSp5W=eyi8q=uQA6>l8fB`vdgWqcAoC5xcMufI5_C(m*xyky2l(2JnD7 z?2gRtjt;|{aH$IuxW9LDb&zkl<*`&7N~GazFDs6r3|3`jFhhC(4T5gg;p?VD9qs?B z43;D(vsf7d@N4;w%@508y6m6%hO8;Umpg z;Jul|j*Ch{9FiXLq}XpJW555r-)s)Tc6HYa8udMI%MX2z5ypvNE&;m~2K%`Q7@>r? zuA3v)(s%gNz;Ig$u?#xE^@b zt${+&-ApQAz<7HNzgp{+?Et#j?EhbFH=9XozZn{DAKUGEt+p4`cUrxw8-6Qkx!tMN zyn0w`+H&uWf#0avfm!wOO=h5O*P*i2>=&CmA-otLYae*6F4or$u}dL(osPHN*rAEb z5|<~gNL-n?PU5yaIOE??2G?OC#r|C#(bx*5TTHVsod*h#Ibz7@vI{KiapISZD>N)u25|iZ>c!X|1 zYzPNef*CMD;cnxC6Nd)hMn}gM@HRmiW?|tJ^Slbbjn&H9LS_BR(wevGm6xx=z25>C zp2HB^#Wltxsx+5=JtM+&>mPoMFA%;!MV^sGK^G-rtz4_DFPAMGCJ_#E(r~C<5J^u7 z)zNp;>-#!-r`55JK5F$?t4BKeoTNe>Y4ujCH$obm6jBA%;xt zK8OcFxoJ?bfd0~#ZAOhCCOatXjl*XG%I;B=jst3hZDBYlvqxDCnLv#oCOas6))6xS z#l`qDYJ_cJI4CYAGowZjlO2>qP;wTaI4zn3m+XU5ug!Y@)hzrEz&9zmV~@K!`erhB zTq1DlMRT=!0*%Il1i`_#@zandD`zA0kkz<4yq- zEwMpR61E1G{(e#qO-hd7YDcfh)?M)N8hwH0cI=wZM_2w84ung?iG!Z;%`_PS*@2)| Zr}+PXt?|0HD!Db zFo%8>zg61--L9NL*6j|e=&C0M_cJ)j;3R_w8JuMB9s~UhBpFCDFvviXfjx%sGX%*H zBtrxlf@FvuL;4w#WJr=BgA7SBq+{?0x9fBMUMzl94$^5@IAYgTa7ts{UkjyF-SnPNFem#*F!! Nd;xuB%U}Qu004LWxEBBb literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..58fbc63dc9c6a6172a51e020e1c64a029b77860f GIT binary patch literal 286 zcmV+(0pb21iwFoAtX*UT0CsP1Uua`*Uu|V_XJub9FfubRG%`0ZFfcG*E_7jX0PWI2 zYQr!P1yF69Qv@HNjHIz;okp0(w1u`dwrl7ay6r`}Z5O>vAJ(k%t}D#u_b=Q1|MYKN0k?F%1%8`e9 zKSw!vDgS>y*I(b#owWgNqS}DAplwuJ&2Ca$jw8bYC(sGBYqVGB+?VFfcB3VR8WN znB8*HKorNh)u&**(hHpZ$OrMx(FbtG2Vm58)KSp^)ETb|ZMA5pMJd!OSU=kFV``_g z+R`>j-$h75ul5x@*(C|I3IS#uFSUnbmGRtbmsoO@l@hoI+GxX zP$Ur*K|7yGKZ=ar5JpH<)d&);`x{QmvJ%0PL==fiR8e6*l@uisMUj8dVDHyb5%f>) z^&d92e{hCB17l}S@lN{Tbzyiq(+uSTOV~d$j!HmMF`!TC9`>?cQ{g`tqv+(1lQE1l7&4a2{ z&pB_NIumQo!XDFKF@34;je9O=8t&>Y$k=Db4l{P))=_Sl%$(yVbiT|@7~u=giuFc` z2EUKXDuO6<9jeKSq=5@H&$~2+q!^Z>7+;X-1uXq(>=+?g3PFhQ1ICxAgk>4aU;+}vgZ!w`#U8ROLKvl^UN@~S>WT<_T*q% zxd%eADhNUt=ggKkydu-feb+V=!tNEj0F8B6Ya3jgkTy7qsY2;!;Nqdd5e|+JOpky& z%odybSrLI&F>CEqLrP{pa8uCf91)=y1v~JgSqY<~_VOFM@Xyj>6DC?bhJB5ceT{?)b+k~^aA;7(1w|wx zG_qE+=06Pq=ZIfCS*eTM9)^njdevTB^~(=0XD#%--OyAZzzs3aM?y4kYa0lJP8$e3 z$O=ZI0Z|75BKg{9SMrBT1<*TZ#dHghPwTC32UdQiRbI1dkRi61zQOdB5H2af_RcZ0 z2omUT7hJ-fvB`Pmz*l^{e)Y=bUh%X1(c0P&r1+;Oa#UY5!;&%=cxsyI5IFES^#g}` zfrH|ThDYIq5e?1BRX7R_u5zrna4O3%0@(UGA$*P!xv1)wiM^_bm9Ng~J1F=(Z7X(E zfrY;77=q9wj0P)}y9z-Hl^_zZFjghMpiosZlFDH;SV0k}Tp@2)*5K7^xJ2aphHp{l zF*PM#y~3s-U+g%Vf{33XUb;d9`jgm2DT-0;xZ1h@aTzU&aG2MvKS5XW=X zr;y&2oy|w7FSDpARJ3X7rWL%o0YysnAXV^CZ0*Hr4=REm#Xij1YEQm`n{0@sjaY*Q z5Bal6W@l%1f9y=Qa+Kt$O)$bL(r_9HE!ni9{Gt#bluoA=BsKX}F(3sr4I7%OX|5kY z2SZsvN(?Gfd!?kHSkC%~dVlJtD#Y!AH8VArqUBX$6@+2s#cRVN-k0WVX>6|-W!#Es zmSli$+1X7mQEZmmG-Z%`voaM?2$WVrgP^W+puM3`a7v;?i4rA>RUa~aF$@3>004!C=xqQ1 literal 0 HcmV?d00001 From f7957af13bfd71de522bc69fb7bbb0d059c4cc93 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 18 May 2023 16:22:02 +0900 Subject: [PATCH 23/86] =?UTF-8?q?=E6=83=B3=E5=AE=9A=E5=A4=96=E3=83=95?= =?UTF-8?q?=E3=82=A1=E3=82=A4=E3=83=AB=E3=81=AE=E5=8F=97=E9=A0=98=E7=A2=BA?= =?UTF-8?q?=E8=AA=8D=E3=83=81=E3=82=A7=E3=83=83=E3=82=AF=E3=82=92=E8=BF=BD?= =?UTF-8?q?=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 想定ファイル数、卸在庫データ取込対象日の場合は15、そうでない場合は14 --- .../src/batch/vjsk/vjsk_importer.py | 5 +++++ .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 13 ++++++++++--- .../tests/batch/vjsk/vjsk_load/testdata/dummy.gz | Bin 107 -> 0 bytes 3 files changed, 15 insertions(+), 3 deletions(-) delete mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py index 23fc8e5d..694f93ac 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -150,6 +150,11 @@ def _check_received_files() -> list: if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') + # 想定外ファイルの受領確認 (想定ファイル数、卸在庫データ取込対象日の場合は15、そうでない場合は14) + naturally_count = 15 if batch_context.is_vjsk_stock_import_day else 14 + if len(received_s3_files) > naturally_count: + raise BatchOperationException(f'想定数を超える受領ファイルがあります ファイル一覧:{received_s3_files}') + logger.debug('_check_received_files done') return received_s3_files diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 43afd62c..8dac0688 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -2,6 +2,7 @@ from os import path import pytest +from src.aws.s3 import VjskReceiveBucket from src.batch.common.batch_context import BatchContext # from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_importer import (_check_received_files, @@ -40,9 +41,16 @@ class TestImportFileToDb: 期待値 例外が発生しない """ - # setup + # setup - 卸在庫データ取込対象日 self.batch_context.is_vjsk_stock_import_day = True + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload test_files = [ "stock_slip_data_202304270000.gz", "slip_data_202304270000.gz", @@ -58,8 +66,7 @@ class TestImportFileToDb: "whs_customer_mst_202304270000.gz", "mdb_conv_mst_202304270000.gz", "bio_slip_data_202304270000.gz", - "lot_num_mst_202304270000.gz", - "dummy.gz" + "lot_num_mst_202304270000.gz" ] for test_file in test_files: file_name = path.join(self.test_file_path, test_file) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/dummy.gz deleted file mode 100644 index fb34f9372aaf78634f82e936651d484182b6cc88..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 107 zcmV-x0F?h9iwFqlUu0wi0AzJ-ZFw$qVR8WN(LoA;Fc1U4JjEACmbUJD1h2j57WDU` zg1*2~=*}gVKn__-&c|rG;Xf2oRl&!XuLOxP2$N1KxY`mh5EYco<`t~6)u8)-IOm+} N%?T+)6;uES0074AF75yT From bb1c545a44511640a3b908b054f81859298a204c Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Thu, 18 May 2023 16:35:05 +0900 Subject: [PATCH 24/86] =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=82=B1?= =?UTF-8?q?=E3=83=BC=E3=82=B9=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_file_check/conftest.py | 150 ++++++++++++++++++ .../vjsk_file_check/test_vjsk_file_check.py | 32 ++++ 2 files changed, 182 insertions(+) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py index 0778241d..c315147a 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -1225,3 +1225,153 @@ def init_check_received_files_ng15(s3_client, bucket_name, receive_folder): Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') # s3_client.delete_object(Bucket=bucket_name, # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng16(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng17(s3_client, bucket_name, receive_folder): + # setup + + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz') diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index 43062be1..99b31d68 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -269,3 +269,35 @@ def test_check_received_files_ng15(init_check_received_files_ng15): _check_received_files() assert str(e.value).startswith("製造ロット番号マスタファイルがありません") > 0 + + +def test_check_received_files_ng16(init_check_received_files_ng16): + """ + 観点 + 異常系 : 想定外のファイルが受領されている(卸在庫データ取込対象日) + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("想定数を超える受領ファイルがあります") > 0 + + +def test_check_received_files_ng17(init_check_received_files_ng17): + """ + 観点 + 異常系 : 想定外のファイルが受領されている(卸在庫データ取込対象日) + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = False + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("想定数を超える受領ファイルがあります") > 0 From 94016f513d9eb9004f9883dbf3db52fac76a8a81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 18 May 2023 18:04:16 +0900 Subject: [PATCH 25/86] =?UTF-8?q?feat:=20DCF=E6=96=BD=E8=A8=AD=E7=B5=B1?= =?UTF-8?q?=E5=90=88=E3=83=9E=E3=82=B9=E3=82=BF=E6=97=A5=E6=AC=A1=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E3=83=90=E3=83=83=E3=83=81(=E9=80=94=E4=B8=AD?= =?UTF-8?q?=E3=81=BE=E3=81=A7)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Integrate_dcf_inst_merge.py | 547 ++++++++++++++++++ 1 file changed, 547 insertions(+) create mode 100644 ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py new file mode 100644 index 00000000..fa6ce77a --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py @@ -0,0 +1,547 @@ +from datetime import datetime, timedelta +from sqlalchemy import CursorResult +from src.batch.batch_functions import logging_sql +from src.batch.common.batch_context import BatchContext +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.time.elapsed_time import ElapsedTime + +logger = get_logger('DCF施設統合マスタ日次更新バッチ') +batch_context = BatchContext.get_instance() + + +def exec(): + db = Database.get_instance() + try: + db.connect() + db.begin() + # + enabled_dst_inst_merge_records = _laundering_enabled_dct_inst_merge(db) + # + _laundering_disabled_dct_inst_merge(db) + # + if len(enabled_dst_inst_merge_records) > 0: + logger.info('') + _add_emp_chg_inst(db, enabled_dst_inst_merge_records) + logger.info('') + _add_ult_ident_presc(db, enabled_dst_inst_merge_records) + db.commit() + # + logger.info('') + except Exception as e: + db.rollback() + raise BatchOperationException(e) + finally: + db.disconnect() + + +def _laundering_enabled_dct_inst_merge(db: Database) -> list[dict]: + # データ取得(無効フラグが『0(有効)』) + valid_dst_inst_merge_records = _select_dct_inst_merge(db, 0, True) + # 移行先DCF施設コードの更新(無効フラグが『0(有効)』) + _update_dcf_inst_merge(db, 0) + # DCF施設統合マスタの過去分の洗い替え + if len(valid_dst_inst_merge_records) == 0: + return + for row in valid_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) + + return valid_dst_inst_merge_records + + +def _laundering_disabled_dct_inst_merge(db: Database): + # データ取得(無効フラグが『1(無効)』) + disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1, False) + # 移行先DCF施設コードの更新(無効フラグが『1(無効)』) + _update_dcf_inst_merge(db, 1) + # DCF施設統合マスタの過去分の洗い替え + if len(disabled_dst_inst_merge_records) == 0: + return + for row in disabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) + + +def _add_ult_ident_presc(db: Database, valid_dst_inst_merge_records: list[dict]): + # + for data_inst_cnt, row in enumerate(valid_dst_inst_merge_records, start=1): + tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + + ult_ident_presc_records = _select_ult_ident_presc(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for data_cnt, ult_row in enumerate(ult_ident_presc_records, start=1): + logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') + # 処方元コード=重複時相手先コードが発生した場合 + if ult_row['opp_count'] > 0: + break + + start_date = _str_to_date_time(ult_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + set_start_date = _date_time_to_str(set_start_date) + is_delete_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_row): + _delete_ult_ident_presc(db, set_start_date, ult_row) + is_delete_duplicate_key = True + else: + logger.info('納入先処方元マスタの重複予定データなし') + _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_row) + + if _str_to_date_time(ult_row['end_date']) < start_date: + _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + continue + if not is_delete_duplicate_key: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_row) + if start_date > last_end_date: + _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + + +def _delete_ult_ident_presc(db: Database, start_date: str, ult_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + DELETE FROM + src05.ult_ident_presc + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :set_start_date + """ + params = { + 'ta_cd': ult_row['ta_cd'], + 'ult_ident_cd': ult_row['ult_ident_cd'], + 'ratio': ult_row['ratio'], + 'start_date': start_date + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + + +def _add_emp_chg_inst(db: Database, valid_dst_inst_merge_records: list[dict]): + # + for row in valid_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for emp_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_row['opp_count'] > 0: + break + + start_date = _str_to_date_time(emp_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + + _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), emp_row) + + if start_date < tekiyo_month_first_day: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_row) + if start_date <= last_end_date: + continue + _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], emp_row['start_date']) + + # if start_date >= tekiyo_month_first_day: + # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) + # continue + # last_end_date = tekiyo_month_first_day - timedelta(days=1) + # _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], last_end_date, emp_row) + # if start_date > last_end_date: + # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) + + +def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.emp_chg_inst + SET + enabled_flg = 'N', + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + inst_cd = :dcf_inst_cd + AND ta_cd = :ta_cd + AND start_date = :start_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'ta_cd': ta_cd, 'start_date': start_date} + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, emp_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.emp_chg_inst + SET end_date = :end_date, + updater = CURRENT_USER(), + update_date= SYSDATE() + WHERE + inst_cd = :dcf_inst_cd + AND ta_cd = :ta_cd + AND emp_cd = :emp_cd + AND bu_cd = :bu_cd + AND start_date = :start_date + """ + params = { + 'end_date': last_end_date, + 'dcf_inst_cd': dcf_inst_cd, + 'ta_cd': emp_row['ta_cd'], + 'emp_cd': emp_row['emp_cd'], + 'bu_cd': emp_row['bu_cd'], + 'start_date': emp_row['start_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, emp_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + INSERT INTO + src05.emp_chg_inst( + inst_cd, + ta_cd, + emp_cd, + bu_cd, + start_date, + end_date, + main_chg_flg, + enabled_flg, + creater, + create_date, + updater, + update_date + ) + VALUES( + :dup_opp_cd, + :ta_cd, + :emp_cd, + :bu_cd, + :start_date, + :end_date, + :main_chg_flg, + 'Y', + CURRENT_USER(), + SYSDATE(), + CURRENT_USER(), + SYSDATE() + ) + """ + params = { + 'dup_opp_cd': dup_opp_cd, + 'ta_cd': emp_row['ta_cd'], + 'emp_cd': emp_row['emp_cd'], + 'bu_cd': emp_row['bu_cd'], + 'start_date': set_start_date, + 'end_date': emp_row['end_date'], + 'main_chg_flg': emp_row['main_chg_flg'] if emp_row['main_chg_flg'] is None else None + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _select_dct_inst_merge(db: Database, muko_flg: int, is_null_dcf_inst_cd_new: bool): + # + try: + sql = """ + SELECT + dim.dcf_inst_cd, + dim.dup_opp_cd, + dim.tekiyo_month + FROM + src05.dcf_inst_merge AS dim + INNER JOIN + src05.hdke_tbl AS ht + ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') + WHERE + dim.muko_flg =: muko_flg + AND dim.enabled_flg = 'Y' + AND dim.dcf_inst_cd_new <= >: is_null_dcf_inst_cd_new + """ + params = { + 'muko_flg': muko_flg, + 'is_null_dcf_inst_cd_new': None + } + dst_inst_merge_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + + return dst_inst_merge_records + + +def _update_dcf_inst_merge(db: Database, muko_flg: int): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.dcf_inst_merge AS updim + INNER JOIN( + SELECT + dim.dcf_inst_cd AS base_dcf_inst_cd, + dim.dup_opp_cd AS base_dup_opp_cd, + dim.tekiyo_month AS base_tekiyo_month, + dim.muko_flg AS base_muko_flg, + dim.enabled_flg AS base_enabled_flg + FROM + src05.dcf_inst_merge AS dim + INNER JOIN + src05.hdke_tbl AS ht + ON dim.tekiyo_month=DATE_FORMAT(ht.syor_date, '%Y%m') + WHERE + dim.muko_flg= :muko_flg + AND dim.enabled_flg='Y' + AND dim.dcf_inst_cd_new IS {$dcfInstCdNew}NULL + ) AS bf_dim + SET + updim.dcf_inst_cd_new = {column}, + updim.updater = CURRENT_USER(), + updim.update_date = SYSDATE() + WHERE + updim.dcf_inst_cd = base_dcf_inst_cd + AND updim.dup_opp_cd = base_dup_opp_cd + AND updim.tekiyo_month = base_tekiyo_month + AND updim.muko_flg =base_muko_flg + AND updim.enabled_flg =base_enabled_flg + """ + params = { + 'muko_flg': muko_flg + } + res = db.execute(sql.format( + column='base_dup_opp_cd' if muko_flg == 1 else 'NULL' + ), params) + logging_sql(logger, sql) + logger.info(f'DCF施設統合マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.dcf_inst_merge + SET + dcf_inst_cd_new = :dcf_inst_cd_new_after, + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + dcf_inst_cd_new = :dcf_inst_cd_new_before + AND enabled_flg = 'Y' + AND muko_flg = 0 + """ + params = {'dcf_inst_cd_new_after': dcf_inst_cd_new_after, 'dcf_inst_cd_new_before': dcf_inst_cd_new_before} + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'移行先DCF施設コードの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_record: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.ult_ident_presc + SET end_date = :end_date, + updater = CURRENT_USER(), + update_date= SYSDATE() + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :start_date + """ + params = { + 'end_date': last_end_date, + 'ta_cd': ult_ident_presc_record['ta_cd'], + 'ult_ident_cd': ult_ident_presc_record['ult_ident_cd'], + 'ratio': ult_ident_presc_record['ratio'], + 'start_date': ult_ident_presc_record['start_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'終了日 > 開始月のため適用終了日を更新, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('') + raise e + + +def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, ult_row: CursorResult): + # + try: + elapsed_time = ElapsedTime() + sql = """ + INSERT INTO + src05.ult_ident_presc( + ta_cd, + ult_ident_cd, + ratio, + start_date, + presc_cd, + end_date, + creater, + create_date, + update_date, + updater + ) + VALUES( + :ta_cd, + :ult_ident_cd, + :ratio, + :start_date, + :presc_cd, + :end_date, + CURRENT_USER(), + SYSDATE(), + SYSDATE(), + CURRENT_USER() + ) + """ + params = { + 'ta_cd': ult_row['ta_cd'], + 'ult_ident_cd': ult_row['ult_ident_cd'], + 'ratio': ult_row['ratio'], + 'start_date': set_Start_Date, + 'presc_cd': dup_opp_cd, + 'end_date': ult_row['end_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'納入先処方元マスタに追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('納入先処方元マスタに追加に失敗') + raise e + + +def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: + # + try: + sql = """ + SELECT + eci.inst_cd, + eci.ta_cd, + eci.emp_cd, + eci.bu_cd, + eci.start_date, + eci.end_date, + eci.main_chg_flg, + eci.enabled_flg, + (SELECT COUNT(eciopp.inst_cd) FROM src05.emp_chg_inst AS eciopp WHERE eciopp.inst_cd = :dup_opp_cd) AS opp_count + FROM + src05.emp_chg_inst AS eci + WHERE + eci.inst_cd = :dcf_inst_cd + AND eci.enabled_flg = 'Y' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < eci.end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + emp_chg_inst_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + return emp_chg_inst_records + + +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: + # + try: + sql = """ + SELECT + uip.ta_cd, + uip.ult_ident_cd, + uip.ratio, + uip.start_date, + uip.end_date, + (SELECT COUNT(uipopp.ta_cd) FROM ult_ident_presc AS uipopp WHERE uipopp.presc_cd = :dup_opp_cd) AS opp_count + FROM + src05.ult_ident_presc AS uip + WHERE + uip.presc_cd = '{$dcfInstCd}' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + ult_ident_presc_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + return ult_ident_presc_records + + +def _count_duplicate_ult_ident_presc(db: Database, set_Start_Date: str, ult_row: CursorResult): + # + try: + sql = """ + SELECT + COUNT(ta_cd) AS cnt + FROM + src05.ult_ident_presc + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :set_Start_Date + """ + params = { + 'ta_cd': ult_row['ta_cd'], + 'ult_ident_cd': ult_row['ult_ident_cd'], + 'ratio': ult_row['ratio'], + 'start_date': set_Start_Date + } + result = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('') + except Exception as e: + logger.debug('') + raise e + return result[0]['cnt'] + + +def _get_first_day_of_month(month_day: str): + return datetime.datetime.strptime(month_day, '%Y%m01') + + +def _str_to_date_time(str_date_time: str): + return datetime.datetime.strptime(str_date_time, '%Y%m%d') + + +def _date_time_to_str(date_time: datetime): + return date_time.strptime('%Y%m%d') From 75db3d2251968850ce4d1834d83ed39bef0d4461 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 19 May 2023 16:37:03 +0900 Subject: [PATCH 26/86] =?UTF-8?q?feat:=20DCF=E6=96=BD=E8=A8=AD=E7=B5=B1?= =?UTF-8?q?=E5=90=88=E3=83=9E=E3=82=B9=E3=82=BF=E6=97=A5=E6=AC=A1=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E3=83=90=E3=83=83=E3=83=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Integrate_dcf_inst_merge.py | 358 ++++++++++-------- 1 file changed, 193 insertions(+), 165 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py index fa6ce77a..c37a3a30 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py @@ -7,8 +7,8 @@ from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.time.elapsed_time import ElapsedTime -logger = get_logger('DCF施設統合マスタ日次更新バッチ') batch_context = BatchContext.get_instance() +logger = get_logger('DCF施設統合マスタ日次更新バッチ') def exec(): @@ -16,19 +16,17 @@ def exec(): try: db.connect() db.begin() - # - enabled_dst_inst_merge_records = _laundering_enabled_dct_inst_merge(db) - # - _laundering_disabled_dct_inst_merge(db) - # + logger.debug('DCF施設統合マスタ日次更新バッチ処理開始') + # DCF施設統合マスタ移行先コードのセット(無効フラグが『0(有効)』) + enabled_dst_inst_merge_records = _set_enabled_dct_inst_merge(db) + # DCF施設統合マスタ移行先コードのセット(無効フラグが『1(無効)』) + _set_disabled_dct_inst_merge(db) + # DCF施設統合マスタに無効フラグが『0(有効)』データが存在する場合 if len(enabled_dst_inst_merge_records) > 0: - logger.info('') _add_emp_chg_inst(db, enabled_dst_inst_merge_records) - logger.info('') _add_ult_ident_presc(db, enabled_dst_inst_merge_records) db.commit() - # - logger.info('') + logger.info('DCF施設統合マスタ日次更新バッチ処理終了') except Exception as e: db.rollback() raise BatchOperationException(e) @@ -36,68 +34,101 @@ def exec(): db.disconnect() -def _laundering_enabled_dct_inst_merge(db: Database) -> list[dict]: +def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: # データ取得(無効フラグが『0(有効)』) - valid_dst_inst_merge_records = _select_dct_inst_merge(db, 0, True) + enabled_dst_inst_merge_records = _select_dct_inst_merge(db, 0) # 移行先DCF施設コードの更新(無効フラグが『0(有効)』) - _update_dcf_inst_merge(db, 0) - # DCF施設統合マスタの過去分の洗い替え - if len(valid_dst_inst_merge_records) == 0: - return - for row in valid_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) + if _update_dcf_inst_merge(db, 0) > 0: + # DCF施設統合マスタの過去分の洗い替え + for row in enabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) - return valid_dst_inst_merge_records + return enabled_dst_inst_merge_records -def _laundering_disabled_dct_inst_merge(db: Database): +def _set_disabled_dct_inst_merge(db: Database): # データ取得(無効フラグが『1(無効)』) - disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1, False) + disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1) # 移行先DCF施設コードの更新(無効フラグが『1(無効)』) - _update_dcf_inst_merge(db, 1) - # DCF施設統合マスタの過去分の洗い替え - if len(disabled_dst_inst_merge_records) == 0: - return - for row in disabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) + if _update_dcf_inst_merge(db, 1) > 0: + # DCF施設統合マスタの過去分の洗い替え + for row in disabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) -def _add_ult_ident_presc(db: Database, valid_dst_inst_merge_records: list[dict]): - # - for data_inst_cnt, row in enumerate(valid_dst_inst_merge_records, start=1): +def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): + # 納入先処方元マスタの追加 + logger.info('納入先処方元マスタの登録 開始') + for data_inst_cnt, row in enumerate(enabled_dst_inst_merge_records, start=1): tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) ult_ident_presc_records = _select_ult_ident_presc(db, row['dcf_inst_cd'], row['dup_opp_cd']) - for data_cnt, ult_row in enumerate(ult_ident_presc_records, start=1): + for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') # 処方元コード=重複時相手先コードが発生した場合 - if ult_row['opp_count'] > 0: + if ult_ident_presc_row['opp_count'] > 0: break - start_date = _str_to_date_time(ult_row['start_date']) + start_date = _str_to_date_time(ult_ident_presc_row['start_date']) set_start_date = start_date \ if start_date > tekiyo_month_first_day else tekiyo_month_first_day set_start_date = _date_time_to_str(set_start_date) - is_delete_duplicate_key = False - if _count_duplicate_ult_ident_presc(db, set_start_date, ult_row): - _delete_ult_ident_presc(db, set_start_date, ult_row) - is_delete_duplicate_key = True + is_exists_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: + _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, + '納入先処方元マスタの重複予定データの削除') + is_exists_duplicate_key = True else: logger.info('納入先処方元マスタの重複予定データなし') - _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_row) + _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_ident_presc_row) - if _str_to_date_time(ult_row['end_date']) < start_date: - _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: + _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, + '開始月>適用開始日のため物理削除') continue - if not is_delete_duplicate_key: + if not is_exists_duplicate_key: last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_row) - if start_date > last_end_date: - _delete_ult_ident_presc(db, ult_row['start_date'], ult_row) + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + if start_date > last_end_date: + _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, + '適用終了日更新後 開始日>終了日のため物理削除') + + logger.info('納入先処方元マスタの登録 終了') -def _delete_ult_ident_presc(db: Database, start_date: str, ult_row: CursorResult): - # +def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): + # 従業員担当施設マスタの登録 + logger.info('従業員担当施設マスタの登録 開始') + for row in enabled_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for emp_chg_inst_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_chg_inst_row['opp_count'] > 0: + break + + start_date = _str_to_date_time(emp_chg_inst_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + + _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), + emp_chg_inst_row) + + if start_date < tekiyo_month_first_day: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), + emp_chg_inst_row) + if start_date <= last_end_date: + continue + _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) + + logger.info('従業員担当施設マスタの登録 終了') + + +def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: CursorResult, + log_message: str): + # ult_ident_prescのDelete try: elapsed_time = ElapsedTime() sql = """ @@ -107,56 +138,24 @@ def _delete_ult_ident_presc(db: Database, start_date: str, ult_row: CursorResult ta_cd = :ta_cd AND ult_ident_cd = :ult_ident_cd AND ratio = :ratio - AND start_date = :set_start_date + AND start_date = :start_date """ params = { - 'ta_cd': ult_row['ta_cd'], - 'ult_ident_cd': ult_row['ult_ident_cd'], - 'ratio': ult_row['ratio'], + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], 'start_date': start_date } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info(f'{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug(f'{log_message} 失敗') raise e -def _add_emp_chg_inst(db: Database, valid_dst_inst_merge_records: list[dict]): - # - for row in valid_dst_inst_merge_records: - tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) - emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) - for emp_row in emp_chg_inst_records: - # 重複時相手先コードが存在したかのチェック - if emp_row['opp_count'] > 0: - break - - start_date = _str_to_date_time(emp_row['start_date']) - set_start_date = start_date \ - if start_date > tekiyo_month_first_day else tekiyo_month_first_day - - _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), emp_row) - - if start_date < tekiyo_month_first_day: - last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_row) - if start_date <= last_end_date: - continue - _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], emp_row['start_date']) - - # if start_date >= tekiyo_month_first_day: - # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) - # continue - # last_end_date = tekiyo_month_first_day - timedelta(days=1) - # _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], last_end_date, emp_row) - # if start_date > last_end_date: - # _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_row['ta_cd'], start_date) - - def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): - # + # emp_chg_instを更新 try: elapsed_time = ElapsedTime() sql = """ @@ -174,14 +173,15 @@ def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, params = {'dcf_inst_cd': dcf_inst_cd, 'ta_cd': ta_cd, 'start_date': start_date} res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'従業員担当施設マスタのYorNフラグ更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタのYorNフラグ更新に失敗') raise e -def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, emp_row: CursorResult): - # +def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, + emp_chg_inst_row: CursorResult): + # emp_chg_instを更新 try: elapsed_time = ElapsedTime() sql = """ @@ -200,21 +200,22 @@ def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: params = { 'end_date': last_end_date, 'dcf_inst_cd': dcf_inst_cd, - 'ta_cd': emp_row['ta_cd'], - 'emp_cd': emp_row['emp_cd'], - 'bu_cd': emp_row['bu_cd'], - 'start_date': emp_row['start_date'] + 'ta_cd': emp_chg_inst_row['ta_cd'], + 'emp_cd': emp_chg_inst_row['emp_cd'], + 'bu_cd': emp_chg_inst_row['bu_cd'], + 'start_date': emp_chg_inst_row['start_date'] } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'従業員担当施設マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'従業員担当施設マスタの適用終了日更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタの適用終了日更新 失敗') raise e -def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, emp_row: CursorResult): - # +def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, + emp_chg_inst_row: CursorResult): + # emp_chg_instにInsert try: elapsed_time = ElapsedTime() sql = """ @@ -250,23 +251,24 @@ def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, emp """ params = { 'dup_opp_cd': dup_opp_cd, - 'ta_cd': emp_row['ta_cd'], - 'emp_cd': emp_row['emp_cd'], - 'bu_cd': emp_row['bu_cd'], + 'ta_cd': emp_chg_inst_row['ta_cd'], + 'emp_cd': emp_chg_inst_row['emp_cd'], + 'bu_cd': emp_chg_inst_row['bu_cd'], 'start_date': set_start_date, - 'end_date': emp_row['end_date'], - 'main_chg_flg': emp_row['main_chg_flg'] if emp_row['main_chg_flg'] is None else None + 'end_date': emp_chg_inst_row['end_date'], + 'main_chg_flg': None + if emp_chg_inst_row['main_chg_flg'] is None else emp_chg_inst_row['main_chg_flg'] } res = db.execute(sql, params) logging_sql(logger, sql) logger.info(f'従業員担当施設マスタの追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタの追加に失敗') raise e -def _select_dct_inst_merge(db: Database, muko_flg: int, is_null_dcf_inst_cd_new: bool): - # +def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: + # dcf_inst_mergeからSelect try: sql = """ SELECT @@ -279,25 +281,26 @@ def _select_dct_inst_merge(db: Database, muko_flg: int, is_null_dcf_inst_cd_new: src05.hdke_tbl AS ht ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') WHERE - dim.muko_flg =: muko_flg + dim.muko_flg = :muko_flg AND dim.enabled_flg = 'Y' - AND dim.dcf_inst_cd_new <= >: is_null_dcf_inst_cd_new - """ + AND dim.dcf_inst_cd_new IS {not_null}NULL + """.format( + not_null='' if muko_flg == 0 else 'NOT ' + ) params = { - 'muko_flg': muko_flg, - 'is_null_dcf_inst_cd_new': None + 'muko_flg': muko_flg } dst_inst_merge_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('DCF施設統合マスタの取得に成功') except Exception as e: - logger.debug('') + logger.debug('DCF施設統合マスタの取得に失敗') raise e return dst_inst_merge_records -def _update_dcf_inst_merge(db: Database, muko_flg: int): +def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: # try: elapsed_time = ElapsedTime() @@ -319,7 +322,7 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int): WHERE dim.muko_flg= :muko_flg AND dim.enabled_flg='Y' - AND dim.dcf_inst_cd_new IS {$dcfInstCdNew}NULL + AND dim.dcf_inst_cd_new IS {not_null}NULL ) AS bf_dim SET updim.dcf_inst_cd_new = {column}, @@ -329,24 +332,27 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int): updim.dcf_inst_cd = base_dcf_inst_cd AND updim.dup_opp_cd = base_dup_opp_cd AND updim.tekiyo_month = base_tekiyo_month - AND updim.muko_flg =base_muko_flg - AND updim.enabled_flg =base_enabled_flg - """ + AND updim.muko_flg = base_muko_flg + AND updim.enabled_flg = base_enabled_flg + """.format( + not_null='' if muko_flg == 0 else 'NOT ', + column='base_dup_opp_cd' if muko_flg == 0 else 'NULL' + ) params = { 'muko_flg': muko_flg } - res = db.execute(sql.format( - column='base_dup_opp_cd' if muko_flg == 1 else 'NULL' - ), params) + res = db.execute(sql, params) logging_sql(logger, sql) logger.info(f'DCF施設統合マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('DCF施設統合マスタの更新に失敗') raise e + return res.rowcount + def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): - # + # dcf_inst_mergeをUpdate try: elapsed_time = ElapsedTime() sql = """ @@ -361,17 +367,20 @@ def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_c AND enabled_flg = 'Y' AND muko_flg = 0 """ - params = {'dcf_inst_cd_new_after': dcf_inst_cd_new_after, 'dcf_inst_cd_new_before': dcf_inst_cd_new_before} + params = { + 'dcf_inst_cd_new_after': dcf_inst_cd_new_after, + 'dcf_inst_cd_new_before': dcf_inst_cd_new_before + } res = db.execute(sql, params) logging_sql(logger, sql) logger.info(f'移行先DCF施設コードの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('移行先DCF施設コードの更新に失敗') raise e -def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_record: CursorResult): - # +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: CursorResult): + # ult_ident_presc_endをUpdate try: elapsed_time = ElapsedTime() sql = """ @@ -388,21 +397,22 @@ def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident """ params = { 'end_date': last_end_date, - 'ta_cd': ult_ident_presc_record['ta_cd'], - 'ult_ident_cd': ult_ident_presc_record['ult_ident_cd'], - 'ratio': ult_ident_presc_record['ratio'], - 'start_date': ult_ident_presc_record['start_date'] + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': ult_ident_presc_row['start_date'] } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'終了日 > 開始月のため適用終了日を更新, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'終了日 > 開始月のため適用終了日を更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('') + logger.debug('終了日 > 開始月のため適用終了日を更新 失敗') raise e -def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, ult_row: CursorResult): - # +def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, + ult_ident_presc_row: CursorResult): + # ult_ident_prescにInsert try: elapsed_time = ElapsedTime() sql = """ @@ -419,7 +429,7 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, update_date, updater ) - VALUES( + VALUES( :ta_cd, :ult_ident_cd, :ratio, @@ -433,23 +443,23 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, ) """ params = { - 'ta_cd': ult_row['ta_cd'], - 'ult_ident_cd': ult_row['ult_ident_cd'], - 'ratio': ult_row['ratio'], + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], 'start_date': set_Start_Date, 'presc_cd': dup_opp_cd, - 'end_date': ult_row['end_date'] + 'end_date': ult_ident_presc_row['end_date'] } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'納入先処方元マスタに追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'納入先処方元マスタに追加 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('納入先処方元マスタに追加に失敗') + logger.debug('納入先処方元マスタに追加 失敗') raise e def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # + # emp_chg_instから取得 try: sql = """ SELECT @@ -461,7 +471,14 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis eci.end_date, eci.main_chg_flg, eci.enabled_flg, - (SELECT COUNT(eciopp.inst_cd) FROM src05.emp_chg_inst AS eciopp WHERE eciopp.inst_cd = :dup_opp_cd) AS opp_count + ( + SELECT + COUNT(eciopp.inst_cd) + FROM + src05.emp_chg_inst AS eciopp + WHERE + eciopp.inst_cd = :dup_opp_cd + ) AS opp_count FROM src05.emp_chg_inst AS eci WHERE @@ -472,15 +489,15 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} emp_chg_inst_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('従業員担当施設マスタの取得 成功') except Exception as e: - logger.debug('') + logger.debug('従業員担当施設マスタの取得 失敗') raise e return emp_chg_inst_records def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # + # ult_ident_prescから取得 try: sql = """ SELECT @@ -489,25 +506,33 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> uip.ratio, uip.start_date, uip.end_date, - (SELECT COUNT(uipopp.ta_cd) FROM ult_ident_presc AS uipopp WHERE uipopp.presc_cd = :dup_opp_cd) AS opp_count + ( + SELECT + COUNT(uipopp.ta_cd) + FROM + ult_ident_presc AS uipopp + WHERE + uipopp.presc_cd = :dup_opp_cd + ) AS opp_count FROM src05.ult_ident_presc AS uip WHERE - uip.presc_cd = '{$dcfInstCd}' + uip.presc_cd = :dcf_inst_cd AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date """ params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} ult_ident_presc_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('納入先処方元マスタの取得 成功') except Exception as e: - logger.debug('') + logger.debug('納入先処方元マスタの取得 失敗') raise e return ult_ident_presc_records -def _count_duplicate_ult_ident_presc(db: Database, set_Start_Date: str, ult_row: CursorResult): - # +def _count_duplicate_ult_ident_presc(db: Database, set_start_date: str, + ult_ident_presc_row: CursorResult) -> int: + # ult_ident_prescの重複時相手先コードの件数取得 try: sql = """ SELECT @@ -518,30 +543,33 @@ def _count_duplicate_ult_ident_presc(db: Database, set_Start_Date: str, ult_row: ta_cd = :ta_cd AND ult_ident_cd = :ult_ident_cd AND ratio = :ratio - AND start_date = :set_Start_Date + AND start_date = :start_date """ params = { - 'ta_cd': ult_row['ta_cd'], - 'ult_ident_cd': ult_row['ult_ident_cd'], - 'ratio': ult_row['ratio'], - 'start_date': set_Start_Date + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': set_start_date } result = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('') + logger.info('納入先処方元マスタの重複予定データの存在チェック 成功') except Exception as e: - logger.debug('') + logger.debug('納入先処方元マスタの重複予定データの存在チェック 失敗') raise e return result[0]['cnt'] -def _get_first_day_of_month(month_day: str): - return datetime.datetime.strptime(month_day, '%Y%m01') +def _get_first_day_of_month(year_month: str) -> datetime: + # year_monthの初日の日付を日付型に変換し返却する + return datetime.strptime(year_month + '01', '%Y%m%d') -def _str_to_date_time(str_date_time: str): - return datetime.datetime.strptime(str_date_time, '%Y%m%d') +def _str_to_date_time(str_date_time: str) -> datetime: + # str_date_timeを日付型に変換して返却する + return datetime.strptime(str_date_time, '%Y%m%d') -def _date_time_to_str(date_time: datetime): - return date_time.strptime('%Y%m%d') +def _date_time_to_str(date_time: datetime) -> str: + # date_timeをYmd型に変換して返却する + return date_time.strftime('%Y%m%d') From 3b369b8fc40b258792455aa647e4007f99708398 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 19 May 2023 16:48:11 +0900 Subject: [PATCH 27/86] =?UTF-8?q?feat:=20=E8=B5=B7=E5=8B=95=E3=81=97?= =?UTF-8?q?=E3=81=AA=E3=81=84=E4=B8=8D=E5=85=B7=E5=90=88=E3=81=AE=E5=AF=BE?= =?UTF-8?q?=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/src/jobctrl_daily.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 1a82f226..71fbea90 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -7,6 +7,7 @@ from src.batch.batch_functions import ( update_batch_processing_flag_in_processing) from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile +from src.batch.dcf_inst_merge import Integrate_dcf_inst_merge from src.batch.laundering import create_dcf_inst_merge, mst_inst_laundering from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException @@ -114,6 +115,14 @@ def exec(): logger.exception(f'DCF施設統合マスタ作成エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS + try: + logger.info('DCF施設統合マスタ日次更新バッチ:起動') + Integrate_dcf_inst_merge.exec() + logger.info('DCF施設統合マスタ日次更新バッチ:終了') + except BatchOperationException as e: + logger.exception(f'DCF施設統合マスタ日次更新バッチエラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + # バッチ処理完了とし、処理日、バッチ処置中フラグ、dump取得状態区分を更新 logger.info('業務日付更新・バッチステータスリフレッシュ:起動') try: From 7da4be471c97a1eba6b3fe86e6342c8272ff9021 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 25 May 2023 15:30:51 +0900 Subject: [PATCH 28/86] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...t_merge.py => integrate_dcf_inst_merge.py} | 87 ++++++++++--------- .../src/batch/laundering/sales_laundering.py | 3 + ecs/jskult-batch-daily/src/jobctrl_daily.py | 9 -- 3 files changed, 51 insertions(+), 48 deletions(-) rename ecs/jskult-batch-daily/src/batch/dcf_inst_merge/{Integrate_dcf_inst_merge.py => integrate_dcf_inst_merge.py} (83%) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py similarity index 83% rename from ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py rename to ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index c37a3a30..ffb2ba06 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/Integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -1,5 +1,4 @@ from datetime import datetime, timedelta -from sqlalchemy import CursorResult from src.batch.batch_functions import logging_sql from src.batch.common.batch_context import BatchContext from src.db.database import Database @@ -8,7 +7,7 @@ from src.logging.get_logger import get_logger from src.time.elapsed_time import ElapsedTime batch_context = BatchContext.get_instance() -logger = get_logger('DCF施設統合マスタ日次更新バッチ') +logger = get_logger('DCF施設統合マスタ日次更新') def exec(): @@ -16,7 +15,7 @@ def exec(): try: db.connect() db.begin() - logger.debug('DCF施設統合マスタ日次更新バッチ処理開始') + logger.debug('DCF施設統合マスタ日次更新処理開始') # DCF施設統合マスタ移行先コードのセット(無効フラグが『0(有効)』) enabled_dst_inst_merge_records = _set_enabled_dct_inst_merge(db) # DCF施設統合マスタ移行先コードのセット(無効フラグが『1(無効)』) @@ -26,7 +25,7 @@ def exec(): _add_emp_chg_inst(db, enabled_dst_inst_merge_records) _add_ult_ident_presc(db, enabled_dst_inst_merge_records) db.commit() - logger.info('DCF施設統合マスタ日次更新バッチ処理終了') + logger.debug('DCF施設統合マスタ日次更新処理終了') except Exception as e: db.rollback() raise BatchOperationException(e) @@ -59,10 +58,11 @@ def _set_disabled_dct_inst_merge(db: Database): def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): # 納入先処方元マスタの追加 logger.info('納入先処方元マスタの登録 開始') - for data_inst_cnt, row in enumerate(enabled_dst_inst_merge_records, start=1): - tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) + for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): + tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) - ult_ident_presc_records = _select_ult_ident_presc(db, row['dcf_inst_cd'], row['dup_opp_cd']) + ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], + enabled_merge_record['dup_opp_cd']) for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') # 処方元コード=重複時相手先コードが発生した場合 @@ -80,13 +80,17 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict is_exists_duplicate_key = True else: logger.info('納入先処方元マスタの重複予定データなし') - _insert_ult_ident_presc(db, set_start_date, row['dup_opp_cd'], ult_ident_presc_row) + _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) + # 適用終了日 < 適用開始日の場合 if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: + # 対象レコードを物理削除する _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, '開始月>適用開始日のため物理削除') continue + # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 if not is_exists_duplicate_key: + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 last_end_date = tekiyo_month_first_day - timedelta(days=1) _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) if start_date > last_end_date: @@ -99,9 +103,9 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): # 従業員担当施設マスタの登録 logger.info('従業員担当施設マスタの登録 開始') - for row in enabled_dst_inst_merge_records: - tekiyo_month_first_day = _get_first_day_of_month(row['tekiyo_month']) - emp_chg_inst_records = _select_emp_chg_inst(db, row['dcf_inst_cd'], row['dup_opp_cd']) + for enabled_merge_record in enabled_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) + emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd']) for emp_chg_inst_row in emp_chg_inst_records: # 重複時相手先コードが存在したかのチェック if emp_chg_inst_row['opp_count'] > 0: @@ -111,22 +115,24 @@ def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): set_start_date = start_date \ if start_date > tekiyo_month_first_day else tekiyo_month_first_day - _insert_emp_chg_inst(db, row['dup_opp_cd'], _date_time_to_str(set_start_date), + _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), emp_chg_inst_row) + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 if start_date < tekiyo_month_first_day: + # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_emp_chg_inst_end_date(db, row['dcf_inst_cd'], _date_time_to_str(last_end_date), + _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_chg_inst_row) - if start_date <= last_end_date: - continue - _update_emp_chg_inst_enabled_flg(db, row['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], - emp_chg_inst_row['start_date']) + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) logger.info('従業員担当施設マスタの登録 終了') -def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: CursorResult, +def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: dict, log_message: str): # ult_ident_prescのDelete try: @@ -154,8 +160,8 @@ def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: raise e -def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): - # emp_chg_instを更新 +def _update_emp_chg_inst_disabled(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): + # emp_chg_instをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -180,8 +186,8 @@ def _update_emp_chg_inst_enabled_flg(db: Database, dcf_inst_cd: str, ta_cd: str, def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, - emp_chg_inst_row: CursorResult): - # emp_chg_instを更新 + emp_chg_inst_row: dict): + # emp_chg_instをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -214,8 +220,8 @@ def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, - emp_chg_inst_row: CursorResult): - # emp_chg_instにInsert + emp_chg_inst_row: dict): + # emp_chg_instにINSERT try: elapsed_time = ElapsedTime() sql = """ @@ -268,7 +274,8 @@ def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: - # dcf_inst_mergeからSelect + # dcf_inst_mergeからSELECT + # muko_flgの値によって、SQLのWHERE条件を変更 try: sql = """ SELECT @@ -301,9 +308,11 @@ def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: - # - try: + # dcf_inst_mergeをUPDATE + # muko_flgの値によって、SQLのWHERE条件とSET句を変更 + try: elapsed_time = ElapsedTime() + log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' sql = """ UPDATE src05.dcf_inst_merge AS updim @@ -318,9 +327,9 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: src05.dcf_inst_merge AS dim INNER JOIN src05.hdke_tbl AS ht - ON dim.tekiyo_month=DATE_FORMAT(ht.syor_date, '%Y%m') + ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') WHERE - dim.muko_flg= :muko_flg + dim.muko_flg = :muko_flg AND dim.enabled_flg='Y' AND dim.dcf_inst_cd_new IS {not_null}NULL ) AS bf_dim @@ -343,16 +352,16 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'DCF施設統合マスタの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'DCF施設統合マスタの有効データを{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('DCF施設統合マスタの更新に失敗') + logger.debug(f'DCF施設統合マスタの{log_message} 失敗') raise e return res.rowcount def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): - # dcf_inst_mergeをUpdate + # dcf_inst_mergeをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -379,8 +388,8 @@ def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_c raise e -def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: CursorResult): - # ult_ident_presc_endをUpdate +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: dict): + # ult_ident_presc_endをUPDATE try: elapsed_time = ElapsedTime() sql = """ @@ -411,8 +420,8 @@ def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, - ult_ident_presc_row: CursorResult): - # ult_ident_prescにInsert + ult_ident_presc_row: dict): + # ult_ident_prescにINSERT try: elapsed_time = ElapsedTime() sql = """ @@ -459,7 +468,7 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # emp_chg_instから取得 + # emp_chg_instからSELECT try: sql = """ SELECT @@ -497,7 +506,7 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: - # ult_ident_prescから取得 + # ult_ident_prescからSELECT try: sql = """ SELECT @@ -531,7 +540,7 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> def _count_duplicate_ult_ident_presc(db: Database, set_start_date: str, - ult_ident_presc_row: CursorResult) -> int: + ult_ident_presc_row: dict) -> int: # ult_ident_prescの重複時相手先コードの件数取得 try: sql = """ diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index f6d682b4..019f8c29 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -1,5 +1,6 @@ from src.batch.common.batch_context import BatchContext from src.batch.laundering import create_inst_merge_for_laundering, emp_chg_inst_laundering, ult_ident_presc_laundering +from src.batch.dcf_inst_merge import integrate_dcf_inst_merge from src.logging.get_logger import get_logger batch_context = BatchContext.get_instance() @@ -16,6 +17,8 @@ def exec(): return # 洗替用マスタ作成 create_inst_merge_for_laundering.exec() + # DCF施設統合マスタ日次更新 + integrate_dcf_inst_merge.exec() # 施設担当者洗替 emp_chg_inst_laundering.exec() # 納入先処方元マスタ洗替 diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index 71fbea90..1a82f226 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -7,7 +7,6 @@ from src.batch.batch_functions import ( update_batch_processing_flag_in_processing) from src.batch.common.batch_context import BatchContext from src.batch.common.calendar_file import CalendarFile -from src.batch.dcf_inst_merge import Integrate_dcf_inst_merge from src.batch.laundering import create_dcf_inst_merge, mst_inst_laundering from src.batch.ultmarc import ultmarc_process from src.error.exceptions import BatchOperationException @@ -115,14 +114,6 @@ def exec(): logger.exception(f'DCF施設統合マスタ作成エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS - try: - logger.info('DCF施設統合マスタ日次更新バッチ:起動') - Integrate_dcf_inst_merge.exec() - logger.info('DCF施設統合マスタ日次更新バッチ:終了') - except BatchOperationException as e: - logger.exception(f'DCF施設統合マスタ日次更新バッチエラー(異常終了){e}') - return constants.BATCH_EXIT_CODE_SUCCESS - # バッチ処理完了とし、処理日、バッチ処置中フラグ、dump取得状態区分を更新 logger.info('業務日付更新・バッチステータスリフレッシュ:起動') try: From 5d16bceec95f4bd064d18246f55fc32dd5736e54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 25 May 2023 16:22:05 +0900 Subject: [PATCH 29/86] =?UTF-8?q?feat:=20SQL=E4=BF=AE=E6=AD=A3=E3=80=81?= =?UTF-8?q?=E5=8D=98=E4=BD=93=E8=A9=A6=E9=A8=93=E6=9B=B8=E3=83=AC=E3=83=93?= =?UTF-8?q?=E3=83=A5=E3=83=BC=E6=99=82=E3=81=AE=E6=8C=87=E6=91=98=E3=81=AE?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrate_dcf_inst_merge.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index ffb2ba06..6e9bf5e4 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -90,12 +90,15 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict continue # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 if not is_exists_duplicate_key: - # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + # 適用開始日 > DCF施設統合マスタの適用月度の前月末日の場合 if start_date > last_end_date: + # 対象レコードを物理削除する _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, '適用終了日更新後 開始日>終了日のため物理削除') + continue + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) logger.info('納入先処方元マスタの登録 終了') @@ -195,7 +198,7 @@ def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: src05.emp_chg_inst SET end_date = :end_date, updater = CURRENT_USER(), - update_date= SYSDATE() + update_date = SYSDATE() WHERE inst_cd = :dcf_inst_cd AND ta_cd = :ta_cd @@ -310,9 +313,9 @@ def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: # dcf_inst_mergeをUPDATE # muko_flgの値によって、SQLのWHERE条件とSET句を変更 - try: + try: elapsed_time = ElapsedTime() - log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' + log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' sql = """ UPDATE src05.dcf_inst_merge AS updim @@ -330,7 +333,7 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') WHERE dim.muko_flg = :muko_flg - AND dim.enabled_flg='Y' + AND dim.enabled_flg ='Y' AND dim.dcf_inst_cd_new IS {not_null}NULL ) AS bf_dim SET @@ -397,7 +400,7 @@ def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident src05.ult_ident_presc SET end_date = :end_date, updater = CURRENT_USER(), - update_date= SYSDATE() + update_date = SYSDATE() WHERE ta_cd = :ta_cd AND ult_ident_cd = :ult_ident_cd @@ -519,7 +522,7 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> SELECT COUNT(uipopp.ta_cd) FROM - ult_ident_presc AS uipopp + src05.ult_ident_presc AS uipopp WHERE uipopp.presc_cd = :dup_opp_cd ) AS opp_count From 0f8fb0026f88accd75207484ad115213360a99c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 25 May 2023 18:16:09 +0900 Subject: [PATCH 30/86] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 6e9bf5e4..182b7232 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -278,7 +278,8 @@ def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: # dcf_inst_mergeからSELECT - # muko_flgの値によって、SQLのWHERE条件を変更 + # 無効フラグがOFFのときは、移行先DCF施設コードが設定されてないデータを抽出する。 + # ONのときは、移行先DCF施設コードが設定されているデータを抽出する。 try: sql = """ SELECT @@ -312,7 +313,10 @@ def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: # dcf_inst_mergeをUPDATE - # muko_flgの値によって、SQLのWHERE条件とSET句を変更 + # 無効フラグがOFFのときは、 + # 移行先DCF施設コードが設定されていないデータを抽出し、移行先DCF施設コードに重複時相手先コードを上書きする + # 無効フラグがONのときは、 + # 移行先DCF施設コードが設定されているデータを抽出し、移行先DCF施設コードにNULLを上書きする。 try: elapsed_time = ElapsedTime() log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' From f3e772e0f34b8f824a2283cdd3ba7926cd622d4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 30 May 2023 16:25:48 +0900 Subject: [PATCH 31/86] =?UTF-8?q?feat:=20=E5=8D=98=E4=BD=93=E8=A9=A6?= =?UTF-8?q?=E9=A8=93=E5=AE=9F=E6=96=BD=E4=B8=AD=E3=81=AB=E7=99=BA=E8=A6=8B?= =?UTF-8?q?=E3=81=95=E3=82=8C=E3=81=9F=E3=83=87=E3=83=83=E3=83=88=E3=82=B3?= =?UTF-8?q?=E3=83=BC=E3=83=89=E3=81=AE=E5=89=8A=E9=99=A4=E3=80=81=E3=81=BB?= =?UTF-8?q?=E3=81=8B=E3=80=81=E3=83=87=E3=83=BC=E3=82=BF=E3=81=AE=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E6=9D=A1=E4=BB=B6=E3=81=AE=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dcf_inst_merge/integrate_dcf_inst_merge.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 182b7232..20ac5fcc 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -91,12 +91,6 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 if not is_exists_duplicate_key: last_end_date = tekiyo_month_first_day - timedelta(days=1) - # 適用開始日 > DCF施設統合マスタの適用月度の前月末日の場合 - if start_date > last_end_date: - # 対象レコードを物理削除する - _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, - '適用終了日更新後 開始日>終了日のため物理削除') - continue # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) @@ -121,14 +115,15 @@ def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), emp_chg_inst_row) - # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 - if start_date < tekiyo_month_first_day: + # 適用終了日 ≧ 適用開始日の場合 + if _str_to_date_time(emp_chg_inst_row['end_date']) >= start_date: # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する last_end_date = tekiyo_month_first_day - timedelta(days=1) _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_chg_inst_row) - continue - # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + if last_end_date >= start_date: + continue + # DCF施設統合マスタの適用月度の前月末日 < 適用開始日、または適用終了日 < 適用開始日の場合、N(論理削除レコード)に設定する _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], emp_chg_inst_row['start_date']) From 9ae92789678fad933360267f7f675b50b5074348 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 1 Jun 2023 15:58:52 +0900 Subject: [PATCH 32/86] =?UTF-8?q?feat:=20=E3=83=AD=E3=82=B0=E5=87=BA?= =?UTF-8?q?=E5=8A=9B=E3=83=A1=E3=83=83=E3=82=BB=E3=83=BC=E3=82=B8=E3=82=92?= =?UTF-8?q?=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dcf_inst_merge/integrate_dcf_inst_merge.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 20ac5fcc..832cc20c 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -40,7 +40,7 @@ def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: if _update_dcf_inst_merge(db, 0) > 0: # DCF施設統合マスタの過去分の洗い替え for row in enabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd']) + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], 0) return enabled_dst_inst_merge_records @@ -52,7 +52,7 @@ def _set_disabled_dct_inst_merge(db: Database): if _update_dcf_inst_merge(db, 1) > 0: # DCF施設統合マスタの過去分の洗い替え for row in disabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd']) + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], 1) def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): @@ -356,15 +356,16 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: logging_sql(logger, sql) logger.info(f'DCF施設統合マスタの有効データを{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug(f'DCF施設統合マスタの{log_message} 失敗') + logger.debug(f'DCF施設統合マスタの有効データを{log_message} 失敗') raise e return res.rowcount -def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str): +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, muko_flg: int): # dcf_inst_mergeをUPDATE try: + log_message = '' if muko_flg == 0 else '戻し' elapsed_time = ElapsedTime() sql = """ UPDATE @@ -384,9 +385,9 @@ def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_c } res = db.execute(sql, params) logging_sql(logger, sql) - logger.info(f'移行先DCF施設コードの更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + logger.info(f'移行先DCF施設コードの{log_message}更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') except Exception as e: - logger.debug('移行先DCF施設コードの更新に失敗') + logger.debug(f'移行先DCF施設コードの{log_message}更新に失敗') raise e From b9e4a73b941909476e503768647abc7fa57bd3d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 2 Jun 2023 09:51:16 +0900 Subject: [PATCH 33/86] =?UTF-8?q?feat:=20=E3=83=AD=E3=82=B0=E5=87=BA?= =?UTF-8?q?=E5=8A=9B=E9=83=A8=E5=88=86=E3=82=92=E5=A4=89=E6=9B=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 832cc20c..b4b6e4e7 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -40,7 +40,7 @@ def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: if _update_dcf_inst_merge(db, 0) > 0: # DCF施設統合マスタの過去分の洗い替え for row in enabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], 0) + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], '') return enabled_dst_inst_merge_records @@ -52,7 +52,7 @@ def _set_disabled_dct_inst_merge(db: Database): if _update_dcf_inst_merge(db, 1) > 0: # DCF施設統合マスタの過去分の洗い替え for row in disabled_dst_inst_merge_records: - _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], 1) + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): @@ -362,10 +362,9 @@ def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: return res.rowcount -def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, muko_flg: int): +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, log_message: str): # dcf_inst_mergeをUPDATE try: - log_message = '' if muko_flg == 0 else '戻し' elapsed_time = ElapsedTime() sql = """ UPDATE From 9c8e3d1bac519a1c82ebbe9170ad20ff8e96dc72 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 18:15:11 +0900 Subject: [PATCH 34/86] =?UTF-8?q?=E5=8F=97=E9=A0=98=E6=99=82=E3=81=AE?= =?UTF-8?q?=E5=BD=A2=E5=BC=8F=E3=81=8B=E3=82=89S3download=E3=83=BB?= =?UTF-8?q?=E8=A7=A3=E5=87=8D=E3=83=BB=E7=99=BB=E9=8C=B2=E3=83=BB=E3=83=90?= =?UTF-8?q?=E3=83=83=E3=82=AF=E3=82=A2=E3=83=83=E3=83=97=E3=81=AE=E4=B8=80?= =?UTF-8?q?=E9=80=A3=E5=87=A6=E7=90=86=E3=82=92=E7=A2=BA=E8=AA=8D=E3=81=99?= =?UTF-8?q?=E3=82=8B=E7=94=A8=E3=81=AE=E3=83=86=E3=82=B9=E3=83=88=E3=83=87?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E3=82=92=E3=82=B5=E3=83=96=E3=83=95=E3=82=A9?= =?UTF-8?q?=E3=83=AB=E3=83=80=E3=81=AB=E7=A7=BB=E5=8B=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bio_slip_data_202304270000.gz | Bin .../fcl_mst_202304270000.gz | Bin .../hld_mst_202304270000.gz | Bin .../lot_num_mst_202304270000.gz | Bin .../mdb_conv_mst_202304270000.gz | Bin .../mkr_org_horizon_202304270000.gz | Bin .../org_cnv_mst_202304270000.gz | Bin .../phm_prd_mst_202304270000.gz | Bin .../phm_price_mst_202304270000.gz | Bin .../slip_data_202304270000.gz | Bin .../stock_slip_data_202304270000.gz | Bin .../tran_kbn_mst_202304270000.gz | Bin .../vop_hco_merge_202304270000.gz | Bin .../whs_customer_mst_202304270000.gz | Bin .../whs_mst_202304270000.gz | Bin 15 files changed, 0 insertions(+), 0 deletions(-) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/bio_slip_data_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/fcl_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/hld_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/lot_num_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/mdb_conv_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/mkr_org_horizon_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/org_cnv_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/phm_prd_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/phm_price_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/slip_data_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/stock_slip_data_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/tran_kbn_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/vop_hco_merge_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/whs_customer_mst_202304270000.gz (100%) rename ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/{ => TestImportFileToDb}/whs_mst_202304270000.gz (100%) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz similarity index 100% rename from ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304270000.gz rename to ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz From 368fef1f5bc68de02f0042aa48112160e1073290 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 18:16:19 +0900 Subject: [PATCH 35/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E5=9C=A8=E5=BA=AB?= =?UTF-8?q?=E3=83=87=E3=83=BC=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 97 +++++++++++++------ .../testdata/stock_slip_data_202304280000.tsv | 5 + .../testdata/stock_slip_data_202304290000.tsv | 5 + .../tests/testing_vjsk_utility.py | 91 +++++++++++++++++ 4 files changed, 166 insertions(+), 32 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv create mode 100644 ecs/jskult-batch-daily/tests/testing_vjsk_utility.py diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 8dac0688..e1d3571d 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -4,22 +4,28 @@ import pytest from src.aws.s3 import VjskReceiveBucket from src.batch.common.batch_context import BatchContext +from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager # from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_importer import (_check_received_files, _import_file_to_db) from src.db.database import Database +# from tests.testing_vjsk_utility import create_vjsk_assertion_dictionary +from tests.testing_vjsk_utility import (assert_table_results, + create_vjsk_assertion_list) class TestImportFileToDb: db: Database batch_context: BatchContext - test_file_path: str + test_file_path_import_all: str + test_file_path_load_individual: str @pytest.fixture(autouse=True, scope='function') def pre_test(self, database: Database): """テスト実行前後処理""" # setup - self.test_file_path = path.join(path.dirname(__file__), "testdata") + self.test_file_path_import_all = path.join(path.dirname(__file__), "testdata", "TestImportFileToDb") + self.test_file_path_load_individual = path.join(path.dirname(__file__), "testdata") self.batch_context = BatchContext.get_instance() @@ -69,7 +75,7 @@ class TestImportFileToDb: "lot_num_mst_202304270000.gz" ] for test_file in test_files: - file_name = path.join(self.test_file_path, test_file) + file_name = path.join(self.test_file_path_import_all, test_file) key = f"{receive_folder}/{test_file}" s3_client.upload_file(file_name, bucket_name, key) @@ -93,8 +99,6 @@ class TestImportFileToDb: received_s3_files = _check_received_files() _import_file_to_db(received_s3_files) - # self.db.connect() - # # 検証 (卸在庫データファイル) # table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) # table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) @@ -116,36 +120,65 @@ class TestImportFileToDb: key = f"{receive_folder}/{test_file}" s3_client.delete_object(Bucket=bucket_name, Key=key) - # def test_load_stock_slip_data_ok(self, mapper): - # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) - # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + def test_load_01_stock_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) - # # setup - # self.batch_context.is_vjsk_stock_import_day = True - # self.db.execute(f"truncate table {table_name_src}") + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") - # # assertion (insert) - # target_dict = { - # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, - # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304280000.tsv") - # } - # VjskDataLoadManager.load(target_dict) + # assertion1 (insert 4row) - # result = self.db.execute(f"select * from {table_name_org}") - # assert result.rowcount == 4 - # result = self.db.execute(f"select * from {table_name_src}") - # assert result.rowcount == 4 + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "stock_slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) - # # assertion (update) - # target_dict = { - # "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, - # "src_file_path": path.join(self.test_file_path, "stock_slip_data_202304290000.tsv") - # } - # VjskDataLoadManager.load(target_dict) + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) - # result_org = self.db.execute(f"select * from {table_name_org}") - # assert result_org.rowcount == 4 - # result_src1 = self.db.execute(f"select * from {table_name_src}") - # assert result_src1.rowcount == 6 + # assertion2 (update 2row +insert 2row) - # # teardown + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "stock_slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv new file mode 100644 index 00000000..cc6c3902 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_sto_place" "rec_stock_ymd" "rec_comm_cd" "rec_amt" "rev_stok_no_sign" "rev_jan_cd" "rec_free_item" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "exec_dt" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "rec_sts_kbn" "ins_dt" "ins_usr" +"D463630101 23022849630021900003500000 セトロタイドチユウシヤヨウ0.25MG 1V" "363" "01" "01 " "230228" "496300219" "000035" "0" "0000" " セトロタイドチユウシヤヨウ0.25MG 1V" "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000059" "59" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963001270000040000001ゴナールエフヒカチユウペン450 1トウ40 " "253" "01" "026 " "230228" "496300127" "000004" "0" "0000" "01ゴナールエフヒカチユウペン450 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000060" "60" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963001340000220000001ゴナールエフヒカチユウペン900 1トウ40 " "253" "01" "026 " "230228" "496300134" "000022" "0" "0000" "01ゴナールエフヒカチユウペン900 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000061" "61" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963004170000500000001オビドレルヒカチユウシリンジ250MCG 140 " "253" "01" "026 " "230228" "496300417" "000050" "0" "0000" "01オビドレルヒカチユウシリンジ250MCG 140 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000062" "62" "202303142041" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "23-03-14 20:41:57" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv new file mode 100644 index 00000000..0b180c24 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_sto_place" "rec_stock_ymd" "rec_comm_cd" "rec_amt" "rev_stok_no_sign" "rev_jan_cd" "rec_free_item" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "exec_dt" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "rec_sts_kbn" "ins_dt" "ins_usr" +"DAY2-301026 2302284963001340000220000001ゴナールエフヒカチユウペン900 1トウ40 " "253" "01" "026 " "230228" "496300134" "000022" "0" "0000" "01ゴナールエフヒカチユウペン900 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000061" "61" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301026 2302284963004170000500000001オビドレルヒカチユウシリンジ250MCG 140 " "253" "01" "026 " "230228" "496300417" "000050" "0" "0000" "01オビドレルヒカチユウシリンジ250MCG 140 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000062" "62" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301027 2302284963001100000020000001ゴナールエフヒカチユウペン300 1トウ40 " "253" "01" "027 " "230228" "496300110" "000002" "0" "0000" "01ゴナールエフヒカチユウペン300 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000063" "63" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301027 2302284963001270000110000001ゴナールエフヒカチユウペン450 1トウ40 " "253" "01" "027 " "230228" "496300127" "000011" "0" "0000" "01ゴナールエフヒカチユウペン450 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000064" "64" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py new file mode 100644 index 00000000..1849e325 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -0,0 +1,91 @@ + + +import csv +from datetime import datetime + + +def create_vjsk_assertion_list(file_path: str) -> list: + """DB登録期待値リストを作成する + + Args: + file_path (str): DB登録期待値ファイル(tsvファイル)のパス + ※DB登録期待値ファイルの前提 + 受領データファイルと同じ + BOM付きtsv形式 + 一行目はカラム名になっているヘッダ行 + + Returns: + List(dict) DB登録期待値辞書リスト + """ + with open(file_path, encoding='utf_8_sig', newline='') as tsv_file: + header = tsv_file.readline().strip('\n').replace('"', '').split('\t') + reader = csv.DictReader(tsv_file, fieldnames=header, delimiter='\t') + rows = [r for r in reader] + + # DB抽出値と比較できるように、リテラル値をDB抽出値と同じデータフォーマットに変換 + for row in rows: + for k, v in row.items(): + converted_value = v + if v == 'NULL': + converted_value = None + if is_valid_date_format(v, '%Y/%m/%d') is True: # YYYY/MM/DD + converted_value = datetime.strptime(v, '%Y/%m/%d').date() + if is_valid_date_format(v, '%Y-%m-%d') is True: # YYYY-MM-DD + converted_value = datetime.strptime(v, '%Y-%m-%d').date() + if is_valid_date_format(v, '%Y/%m/%d %H:%M:%S') is True: # YYYY/MM/DD HH:MM:SS + converted_value = datetime.strptime(v, '%Y/%m/%d %H:%M:%S') + if is_valid_date_format(v, '%Y-%m-%d %H:%M:%S') is True: # YYYY-MM-DD HH:MM:SS + converted_value = datetime.strptime(v, '%Y-%m-%d %H:%M:%S') + if is_valid_date_format(v, '%y-%m-%d %H:%M:%S') is True: # YY-MM-DD HH:MM:SS + converted_value = datetime.strptime(v, '%y-%m-%d %H:%M:%S') + + row[k] = converted_value + + return rows + + +def is_valid_date_format(date_str: str, date_format): + """日付文字列が、与えられたフォーマットにマッチするかを検査する + + Args: + date_str (str): 日付文字列 + date_format (str, optional): 日付のフォーマット + + Returns: + _type_: 正しい日付文字列の場合、True、それ以外はFalse + """ + try: + datetime.strptime(date_str, date_format) + return True + except ValueError: + return False + + +def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_name: list = None) -> None: + """テーブル同士の取得結果突き合わせ + + Args: + actual_rows (list[dict]): テスト結果の辞書リスト + expect_rows (list[dict]): 期待値の辞書リスト + ignore_col_name (list): 比較を無視するDBのカラム名. Default None. + """ + # 取得件数が一致すること + assert len(actual_rows) == len(expect_rows) + + line_number = 0 + # 1行ずつ調査 + for actual_row, expect_row in zip(actual_rows, expect_rows): + line_number += 1 + # 1カラムずつ調査 + for actual_col_name, expect_col_name in zip(actual_row, expect_row): + # テストメソッド側で個別に確認するものはスキップさせる + if ignore_col_name is not None and actual_col_name in ignore_col_name: + continue + else: + actual_value = actual_row[actual_col_name] + expect_value = expect_row[expect_col_name] + if isinstance(actual_value, (int)): + expect_value = int(expect_value) + if isinstance(actual_value, (float)): + expect_value = float(expect_value) + assert actual_value == expect_value, f'{line_number}行目:{actual_col_name}が、期待値と一致しませんでした' From 36c18835f1641ab9da05d15c198b626791558921 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 20:43:51 +0900 Subject: [PATCH 36/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E8=B2=A9=E5=A3=B2?= =?UTF-8?q?=E3=83=87=E3=83=BC=E3=82=BF=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=80=81=EF=BC=B6=E5=8D=B8=E7=B5=84=E7=B9=94=E5=A4=89=E6=8F=9B?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=80=81=EF=BC=B6=E6=96=BD=E8=A8=AD?= =?UTF-8?q?=E7=B5=B1=E5=90=88=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_recv_file_mapper.py | 56 +++--- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 189 ++++++++++++++++++ .../testdata/org_cnv_mst_202304280000.tsv | 5 + .../testdata/org_cnv_mst_202304290000.tsv | 5 + .../testdata/slip_data_202304280000.tsv | 5 + .../testdata/slip_data_202304290000.tsv | 5 + .../testdata/vop_hco_merge_202304280000.tsv | 5 + .../testdata/vop_hco_merge_202304290000.tsv | 5 + .../tests/testing_vjsk_utility.py | 13 +- 9 files changed, 259 insertions(+), 29 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 8b372dc2..352e2f91 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -201,7 +201,7 @@ class VjskReceiveFileMapper: ,t.tksnbk_kbn ,t.fcl_exec_kbn ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.sales AS t @@ -287,7 +287,7 @@ class VjskReceiveFileMapper: ,tksnbk_kbn=t.tksnbk_kbn ,fcl_exec_kbn=t.fcl_exec_kbn ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dT=SYSDATE() ; @@ -326,7 +326,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.hld_mst_v AS t @@ -340,7 +340,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -389,7 +389,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.whs_mst_v AS t @@ -408,7 +408,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -517,7 +517,7 @@ class VjskReceiveFileMapper: ,t.start_date ,t.end_date ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.mkr_org_horizon_v AS t @@ -566,7 +566,7 @@ class VjskReceiveFileMapper: ,start_date=t.start_date ,end_date=t.end_date ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -605,7 +605,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.org_cnv_mst_v AS t @@ -619,7 +619,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -654,7 +654,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.tran_kbn_mst_v AS t @@ -666,7 +666,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -731,7 +731,7 @@ class VjskReceiveFileMapper: ,t.admin_kbn ,t.fcl_type ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.fcl_mst_v AS t @@ -758,7 +758,7 @@ class VjskReceiveFileMapper: ,admin_kbn=t.admin_kbn ,fcl_type=t.fcl_type ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -831,7 +831,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.phm_prd_mst_v AS t @@ -862,7 +862,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -899,7 +899,7 @@ class VjskReceiveFileMapper: ,t.end_date ,t.dsp_odr ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.phm_price_mst_v AS t @@ -912,7 +912,7 @@ class VjskReceiveFileMapper: ,end_date=t.end_date ,dsp_odr=t.dsp_odr ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -995,7 +995,7 @@ class VjskReceiveFileMapper: ,t.postal_cd ,t.tel_num ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.whs_customer_mst_v AS t @@ -1015,7 +1015,7 @@ class VjskReceiveFileMapper: ,postal_cd=t.postal_cd ,tel_num=t.tel_num ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -1048,7 +1048,7 @@ class VjskReceiveFileMapper: ,t.reliability ,t.start_date ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.upd_dt ,SYSDATE() FROM org05.mdb_cnv_mst_v AS t @@ -1059,7 +1059,7 @@ class VjskReceiveFileMapper: ,reliability=t.reliability ,start_date=t.start_date ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,upd_dt=t.upd_dt ,dwh_upd_dt=SYSDATE() ; @@ -1134,7 +1134,7 @@ class VjskReceiveFileMapper: ,t.err_flg9 ,t.err_flg10 ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.whole_stock AS t @@ -1166,7 +1166,7 @@ class VjskReceiveFileMapper: ,err_flg9=t.err_flg9 ,err_flg10=t.err_flg10 ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dt=SYSDATE() ; @@ -1339,7 +1339,7 @@ class VjskReceiveFileMapper: ,t.tksnbk_kbn ,t.fcl_exec_kbn ,t.rec_sts_kbn - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.bio_sales AS t @@ -1420,7 +1420,7 @@ class VjskReceiveFileMapper: ,tksnbk_kbn=t.tksnbk_kbn ,fcl_exec_kbn=t.fcl_exec_kbn ,rec_sts_kbn=t.rec_sts_kbn - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dt=SYSDATE() ; @@ -1449,7 +1449,7 @@ class VjskReceiveFileMapper: ,t.lot_num ,t.expr_dt ,t.frst_mov_dt - ,t.ins_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する ,t.ins_usr ,SYSDATE() FROM org05.lot_num_mst AS t @@ -1458,7 +1458,7 @@ class VjskReceiveFileMapper: ,lot_num=t.lot_num ,expr_dt=t.expr_dt ,frst_mov_dt=t.frst_mov_dt - ,ins_dt=t.ins_dt + ,ins_dt=nullif(t.ins_dt, 0) ,ins_usr=t.ins_usr ,dwh_upd_dt=SYSDATE() ; diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index e1d3571d..fc3cee21 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -182,3 +182,192 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_02_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_03_org_cnv_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_ORG_CNV_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_ORG_CNV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "org_cnv_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_ORG_CNV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "org_cnv_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.whs_cd = o.whs_cd and s.whs_sub_cd = o.whs_sub_cd and s.org_cd = o.org_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_04_vop_hco_merge_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_VOP_HCO_MERGE) + table_name_src = mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_VOP_HCO_MERGE, + "src_file_path": path.join(self.test_file_path_load_individual, "vop_hco_merge_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_VOP_HCO_MERGE, + "src_file_path": path.join(self.test_file_path_load_individual, "vop_hco_merge_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_inst_cd = o.v_inst_cd and s.apply_dt = o.apply_dt)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv new file mode 100644 index 00000000..208d1a20 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "org_cd" "sub_no" "v_org_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"042" "01" "361007" "0" "300006657" "20230401" "99991231" "0" "0" "23-04-12 11:24:06" "23-04-12 11:24:06" +"042" "01" "381207" "0" "300006658" "20230401" "99991231" "0" "0" "23-04-12 11:24:27" "23-04-12 11:24:27" +"080" "00" "02780" "0" "300006526" "20220401" "99991231" "0" "9" "22-04-11 15:57:35" "23-04-12 10:46:48" +"080" "00" "02780" "1" "300006526" "20220401" "20230331" "0" "0" "23-04-12 10:46:48" "23-04-12 10:46:48" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv new file mode 100644 index 00000000..c91eafee --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "org_cd" "sub_no" "v_org_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"080" "00" "02780" "0" "300006526" "20220401" "99991231" "0" "9" "22-04-11 15:57:35" "23-04-12 10:46:48" +"080" "00" "02780" "1" "300006526" "20220401" "20230331" "0" "0" "23-04-12 10:46:48" "23-04-12 10:46:48" +"080" "00" "21807" "2" "300006649" "20230401" "99991231" "0" "0" "23-04-12 10:49:23" "23-04-12 10:49:23" +"080" "00" "25110" "0" "300005251" "20000101" "99991231" "0" "9" "21-03-11 14:59:47" "23-04-12 10:47:42" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv new file mode 100644 index 00000000..d9b11f17 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"recvdata" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rev_hsdnYmd_wrk" "rev_hsdnYmd_srk" "rec_urag_no" "rec_amt" "rec_unit_price" "rec_price" "rec_comm_nm" "rec_nnskFcl_nm" "free_item" "rec_nnsk_fcl_addr" "rec_nnsk_fcl_post" "rec_nnsk_fcl_tel" "rec_bef_hsdn_ymd" "rec_bef_slip_no" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_kn" "fcl_nm" "fcl_addr_v" "comm_cd" "comm_nm" "nn_amt" "nn_unitPrice" "nn_price" "unit_price" "unit_amt" "drag_price" "drag_amt" "whsPos_err_kbn" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "bef_hsdn_ymd" "bef_slip_no" "slip_org_kbn" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D4420202011611A4 0183733 23030133625911102303 4963500230000020003110000000622000000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vハコダテチユウオウビヨウイン 00000408585ハコダテシホンチヨウ33バン2ゴウ " "202" "02" "011611A4 " "0183733 " "496350023" "110" "230301" "20230301" "3362591" "000002" "00031100" "0000062200" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ハコダテチユウオウビヨウイン " "0408585ハコダテシホンチヨウ33バン2ゴウ " "ハコダテシホンチヨウ33バン2ゴウ " "0408585" "" "000000" " " "20230222" "J" "VJSK_J_MERCK_2023022" "J2023022200000022" "29" "20230301" "202303142041" "110" "売上" "01161" "300000383" "函館支店" "" "200000016" "株式会社スズケン" "0183733 " "670234934576694289" "シャカイフクシホウジンハコダテコウセイイン ハコダテチュウオウビョウイン" "社会福祉法人函館厚生院 函館中央病院" "040-0011 北海道函館市本町33−2" "496350023" "アービタックス 注射剤 100mg 1VIAL" "2" "31100" "62200" "31438" "62876" "35309" "70618" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4420202011611A4 0183733 23030133625921102303 4963500230000080003110000002488000000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vハコダテチユウオウビヨウイン 00000408585ハコダテシホンチヨウ33バン2ゴウ " "202" "02" "011611A4 " "0183733 " "496350023" "110" "230301" "20230301" "3362592" "000008" "00031100" "0000248800" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ハコダテチユウオウビヨウイン " "0408585ハコダテシホンチヨウ33バン2ゴウ " "ハコダテシホンチヨウ33バン2ゴウ " "0408585" "" "000000" " " "20230222" "J" "VJSK_J_MERCK_2023022" "J2023022200000023" "30" "20230301" "202303142041" "110" "売上" "01161" "300000383" "函館支店" "" "200000016" "株式会社スズケン" "0183733 " "670234934576694289" "シャカイフクシホウジンハコダテコウセイイン ハコダテチュウオウビョウイン" "社会福祉法人函館厚生院 函館中央病院" "040-0011 北海道函館市本町33−2" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "31100" "248800" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4416101311101A8 5140013 23030173719811122303 4963500230000120002738100003285720000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7371981" "000012" "00027381" "0000328572" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000011" "16" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "12" "27381" "328572" "31438" "377256" "35309" "423708" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4416101311101A8 5140013 23030173720211122303 4963500230000080002738100002190480000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7372021" "000008" "00027381" "0000219048" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000012" "17" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "27381" "219048" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv new file mode 100644 index 00000000..5299839a --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"recvdata" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rev_hsdnYmd_wrk" "rev_hsdnYmd_srk" "rec_urag_no" "rec_amt" "rec_unit_price" "rec_price" "rec_comm_nm" "rec_nnskFcl_nm" "free_item" "rec_nnsk_fcl_addr" "rec_nnsk_fcl_post" "rec_nnsk_fcl_tel" "rec_bef_hsdn_ymd" "rec_bef_slip_no" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_kn" "fcl_nm" "fcl_addr_v" "comm_cd" "comm_nm" "nn_amt" "nn_unitPrice" "nn_price" "unit_price" "unit_amt" "drag_price" "drag_amt" "whsPos_err_kbn" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "bef_hsdn_ymd" "bef_slip_no" "slip_org_kbn" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"DAY2-101311101A8 5140013 23030173719811122303 4963500230000120002738100003285720000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7371981" "000012" "00027381" "0000328572" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000011" "16" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "12" "27381" "328572" "31438" "377256" "35309" "423708" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-101311101A8 5140013 23030173720211122303 4963500230000080002738100002190480000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7372021" "000008" "00027381" "0000219048" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000012" "17" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "27381" "219048" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-202041131A1 1409581 23030106357711102303 4963500230000070002966000002076200000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトウホクロウサイビヨウイン 00009818563センダイシアオバクダイノハラ " "202" "02" "041131A1 " "1409581 " "496350023" "110" "230301" "20230301" "0635771" "000007" "00029660" "0000207620" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トウホクロウサイビヨウイン " "9818563センダイシアオバクダイノハラ " "センダイシアオバクダイノハラ " "9818563" "" "000000" " " "20230227" "J" "VJSK_J_MERCK_2023022" "J2023022700000128" "135" "20230301" "202303142041" "110" "売上" "04113" "300000354" "北仙台支店" "" "200000016" "株式会社スズケン" "1409581 " "670232828063007745" "ドクリツギョウセイホウジンロウドウシャケンコウアンゼンキコウ トウホクロウサイビョウイン" "独立行政法人労働者健康安全機構 東北労災病院" "981-0911 宮城県仙台市青葉区台原4−3−21" "496350023" "アービタックス 注射剤 100mg 1VIAL" "7" "29660" "207620" "31438" "220066" "35309" "247163" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-202141234B1 2607506 23030109289511102303 4963500230000100003036600003036600000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vヨコスカキヨウサイビヨウイン 00002380011カナガワケンヨコスカシヨネガハマドオリ1-16 " "202" "02" "141234B1 " "2607506 " "496350023" "110" "230301" "20230301" "0928951" "000010" "00030366" "0000303660" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ヨコスカキヨウサイビヨウイン " "2380011カナガワケンヨコスカシヨネガハマドオリ1-16 " "カナガワケンヨコスカシヨネガハマドオリ1-16 " "2380011" "" "000000" " " "20230228" "J" "VJSK_J_MERCK_2023022" "J2023022800000094" "101" "20230301" "202303142041" "110" "売上" "14123" "300000274" "磯子支店" "" "200000016" "株式会社スズケン" "2607506 " "670236609488110605" "コッカコウムインキョウサイクミアイレンゴウカイ ヨコスカキョウサイビョウイン" "国家公務員共済組合連合会 横須賀共済病院" "238-0011 神奈川県横須賀市米が浜通1−16" "496350023" "アービタックス 注射剤 100mg 1VIAL" "10" "30366" "303660" "31438" "314380" "35309" "353090" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv new file mode 100644 index 00000000..dca02347 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "hco_vid__v_merge" "apply_dt" "merge_reason" +"100000001" "900000001" "20230509" "事由01" +"100000002" "900000002" "20230509" "事由02" +"100000003" "900000003" "20230509" "事由03" +"100000004" "900000004" "20230509" "事由04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv new file mode 100644 index 00000000..c99d9f76 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "hco_vid__v_merge" "apply_dt" "merge_reason" +"100000003" "900000003" "20230509" "DAY-2事由03" +"100000004" "900000004" "20230509" "DAY-2事由04" +"100000005" "900000005" "20230509" "DAY-2事由05" +"100000006" "900000006" "20230509" "DAY-2事由06" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index 1849e325..4aec424d 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -1,6 +1,7 @@ import csv +import decimal from datetime import datetime @@ -84,8 +85,18 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor else: actual_value = actual_row[actual_col_name] expect_value = expect_row[expect_col_name] + + # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする if isinstance(actual_value, (int)): expect_value = int(expect_value) if isinstance(actual_value, (float)): expect_value = float(expect_value) - assert actual_value == expect_value, f'{line_number}行目:{actual_col_name}が、期待値と一致しませんでした' + if isinstance(actual_value, (decimal.Decimal)): + expect_value = decimal.Decimal(expect_value) + # if type(actual_value) == datetime.date: + if type(actual_value).__name__ == "date": + if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD + expect_value = datetime.strptime(expect_value, '%Y%m%d').date() + + # 検証 + assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' From 4b21279d5f8b5f53cf2061f0bce125d07d794d74 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 23:48:52 +0900 Subject: [PATCH 37/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E3=83=9E=E3=82=B9?= =?UTF-8?q?=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/whs_mst_202304280000.tsv | 5 ++ .../testdata/whs_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index fc3cee21..62d6245c 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -371,3 +371,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_05_whs_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_WHS_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_WHS_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_whs_cd = o.v_whs_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv new file mode 100644 index 00000000..e9280e36 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_whs_cd" "sub_no" "nm" "kn_nm" "sht_nm" "zip_cd" "addr" "kn_addr" "tel_no" "v_hld_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"200000002" "0" "株式会社モロオ" "カナ01" "モロオ" "1110001" "住所01" "ジュウショ01" "00-0000-0001" "0" "20000101" "99991231" "20" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000005" "0" "岩渕薬品株式会社" "カナ02" "岩渕薬品" "1110002" "住所02" "ジュウショ02" "00-0000-0002" "0" "20000101" "99991231" "50" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000009" "0" "株式会社マルタケ" "カナ03" "マルタケ" "1110003" "住所03" "ジュウショ03" "00-0000-0003" "0" "20000101" "99991231" "90" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000010" "0" "株式会社ファイネス" "カナ04" "ファイネス" "1110004" "住所04" "ジュウショ04" "00-0000-0004" "0" "20000101" "99991231" "100" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv new file mode 100644 index 00000000..c16f188e --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_whs_cd" "sub_no" "nm" "kn_nm" "sht_nm" "zip_cd" "addr" "kn_addr" "tel_no" "v_hld_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"200000009" "0" "株式会社マルタケ" "" "マルタケ" "" "" "" "" "0" "20000101" "99991231" "90" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000010" "0" "株式会社ファイネス" "" "ファイネス" "" "" "" "" "0" "20000101" "99991231" "100" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000011" "0" "鍋林株式会社" "" "鍋林" "" "" "" "" "0" "20000101" "99991231" "110" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000012" "0" "岡野薬品株式会社" "" "岡野薬品" "" "" "" "" "0" "20000101" "99991231" "120" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" From fa0ff77851cba603fa753cde9754d6eeffaa566a Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 2 Jun 2023 23:57:08 +0900 Subject: [PATCH 38/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E5=8D=B8=E3=83=9B=E3=83=BC?= =?UTF-8?q?=E3=83=AB=E3=83=87=E3=82=A3=E3=83=B3=E3=82=B0=E3=82=B9=E3=83=9E?= =?UTF-8?q?=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/hld_mst_202304280000.tsv | 5 ++ .../testdata/hld_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 62d6245c..1c200856 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -434,3 +434,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_06_hld_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_HLD_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_HLD_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_HLD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "hld_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_HLD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "hld_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_hld_cd = o.v_hld_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv new file mode 100644 index 00000000..2c4a8c1f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_hld_cd" "sub_no" "nm" "kn_nm" "sht_nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "1" "卸ホールディングス名01-1" "オロシホールディングスメイ01-1" "卸名01-1" "20230101" "99991231" "1" "0" "23-05-09 12:00:01" "23-05-09 13:00:01" +"10001" "2" "卸ホールディングス名01-2" "オロシホールディングスメイ01-2" "卸名01-2" "20230102" "99991231" "1" "0" "23-05-09 12:00:02" "23-05-09 13:00:02" +"10001" "3" "卸ホールディングス名01-3" "オロシホールディングスメイ01-3" "卸名01-3" "20230103" "99991231" "1" "0" "23-05-09 12:00:03" "23-05-09 13:00:03" +"10001" "4" "卸ホールディングス名01-4" "オロシホールディングスメイ01-4" "卸名01-4" "20230104" "99991231" "1" "0" "23-05-09 12:00:04" "23-05-09 13:00:04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv new file mode 100644 index 00000000..9728bb5f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_hld_cd" "sub_no" "nm" "kn_nm" "sht_nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "3" "卸ホールディングス名01-3" "オロシホールディングスメイ01-3" "卸名01-3" "20230101" "99991231" "1" "0" "23-05-09 12:00:03" "23-05-09 13:00:03" +"10001" "4" "卸ホールディングス名01-4" "オロシホールディングスメイ01-4" "卸名01-4" "20230102" "99991231" "1" "0" "23-05-09 12:00:04" "23-05-09 13:00:04" +"10001" "5" "卸ホールディングス名01-5" "オロシホールディングスメイ01-5" "卸名01-5" "20230103" "99991231" "1" "0" "23-05-09 12:00:05" "23-05-09 13:00:05" +"10002" "1" "卸ホールディングス名02-1" "オロシホールディングスメイ02-1" "卸名01-1" "20230104" "99991231" "1" "0" "23-05-09 12:00:06" "23-05-09 13:00:06" From bf7f76e3478446de552f98d244f5b8bee4cebf9c Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Sat, 3 Jun 2023 01:00:47 +0900 Subject: [PATCH 39/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E6=96=BD=E8=A8=AD?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 67 +++++++++++++++++++ .../testdata/fcl_mst_202304280000.tsv | 5 ++ .../testdata/fcl_mst_202304290000.tsv | 5 ++ .../tests/testing_vjsk_utility.py | 17 +++-- 4 files changed, 88 insertions(+), 6 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 1c200856..73ad96de 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -497,3 +497,70 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_07_fcl_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_FCL_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_FCL_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_FCL_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "fcl_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns, force_cast_to_str_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns, force_cast_to_str_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_FCL_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "fcl_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns, force_cast_to_str_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_inst_cd = o.v_inst_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns, force_cast_to_str_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv new file mode 100644 index 00000000..9615f392 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"fcl_cd" "sub_no" "start_dt" "end_dt" "closed_dt" "nm" "kn_nm" "sht_nm" "sht_kn_nm" "mkr_cd" "jsk_proc_kbn" "fmt_addr" "fmt_kn_addr" "post_cd" "prft_cd" "prft_nm" "city_nm" "addr_line_1" "tel_no" "admin_kbn" "fcl_type" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229430760653825" "0" "20000101" "99991231" "" "駅前町歯科診療所" "エキマエチョウシカシンリョウジョ" "駅前町歯科診療所" "エキマエチョウシカシンリョウジョ" "" "0" "700-0023 岡山県岡山市北区駅前町1−6−20" "オカヤマケン オカヤマシキタク エキマエチョウ1-6-20" "700-0023" "33" "岡山県" "岡山市北区" "駅前町1−6−20" "0862236468" "33101" "30" "1" "" "" +"670229435466662922" "0" "20000101" "99991231" "" "医療法人社団仁卓会 ほりかわ歯科クリニック" "イリョウホウジンシャダンジンタクカイ ホリカワシカクリニック" "ほりかわ歯科クリニック (医社)" "ホリカワシカクリニック (イシャ)" "" "0" "675-0101 兵庫県加古川市平岡町新在家1573−1−4F" "ヒョウゴケン カコガワシ ヒラオカチョウシンザイケ1573-1-4F" "675-0101" "28" "兵庫県" "加古川市" "平岡町新在家1573−1−4F" "0794244617" "28210" "30" "1" "" "" +"670229435785430019" "0" "20000101" "99991231" "" "株式会社コミュニティメディカル なつめ薬局 千歳船橋店" "カブシキガイシャコミュニティメディカル ナツメヤッキョク チトセフナバシテン" "なつめ薬局 千歳船橋店 (株)" "ナツメヤッキョク チトセフナバシテン (カ)" "" "0" "156-0054 東京都世田谷区桜丘2−24−2" "トウキョウト セタガヤク サクラガオカ2-24-2" "156-0054" "13" "東京都" "世田谷区" "桜丘2−24−2" "0364136189" "13112" "20" "1" "" "" +"670229447437206529" "0" "20000101" "20230407" "2023-04-07" "ヒカリ薬局" "ヒカリヤッキョク" "ヒカリ薬局" "ヒカリヤッキョク" "" "0" "670-0955 兵庫県姫路市安田4−47−8−1F" "ヒョウゴケン ヒメジシ ヤスダ4-47-8-1F" "670-0955" "28" "兵庫県" "姫路市" "安田4−47−8−1F" "0792846396" "28201" "20" "1" "" "" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv new file mode 100644 index 00000000..b4c05930 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"fcl_cd" "sub_no" "start_dt" "end_dt" "closed_dt" "nm" "kn_nm" "sht_nm" "sht_kn_nm" "mkr_cd" "jsk_proc_kbn" "fmt_addr" "fmt_kn_addr" "post_cd" "prft_cd" "prft_nm" "city_nm" "addr_line_1" "tel_no" "admin_kbn" "fcl_type" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229435785430019" "0" "20000202" "99991231" "" "株式会社コミュニティメディカル なつめ薬局 千歳船橋店" "カブシキガイシャコミュニティメディカル ナツメヤッキョク チトセフナバシテン" "なつめ薬局 千歳船橋店 (株)" "ナツメヤッキョク チトセフナバシテン (カ)" "" "0" "156-0054 東京都世田谷区桜丘2−24−2" "トウキョウト セタガヤク サクラガオカ2-24-2" "156-0054" "13" "東京都" "世田谷区" "桜丘2−24−2" "0364136189" "13112" "20" "1" "" "" +"670229447437206529" "0" "20000202" "20230407" "2023-04-07" "ヒカリ薬局" "ヒカリヤッキョク" "ヒカリ薬局" "ヒカリヤッキョク" "" "0" "670-0955 兵庫県姫路市安田4−47−8−1F" "ヒョウゴケン ヒメジシ ヤスダ4-47-8-1F" "670-0955" "28" "兵庫県" "姫路市" "安田4−47−8−1F" "0792846396" "28201" "20" "1" "" "" +"670229463350395910" "0" "20000101" "99991231" "" "こうだ歯科" "コウダシカ" "こうだ歯科" "コウダシカ" "" "0" "770-0831 徳島県徳島市寺島本町西1−10" "トクシマケン トクシマシ テラシマホンチョウニシ1-10" "770-0831" "36" "徳島県" "徳島市" "寺島本町西1−10" "0886552625" "36201" "30" "1" "" "" +"670229489380246545" "0" "20020521" "99991231" "" "社会医療法人社団埼玉巨樹の会 狭山中央病院" "シャカイイリョウホウジンシャダンサイタマキョジュノカイ サヤマチュウオウビョウイン" "狭山中央病院 (社社)" "サヤマチュウオウビョウイン (シャシャ)" "" "0" "350-1306 埼玉県狭山市富士見2−19−35" "サイタマケン サヤマシ フジミ2-19-35" "350-1306" "11" "埼玉県" "狭山市" "富士見2−19−35" "0429597111" "11215" "10" "1" "" "" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index 4aec424d..a7982a05 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -62,7 +62,7 @@ def is_valid_date_format(date_str: str, date_format): return False -def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_name: list = None) -> None: +def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_names: list = None, force_cast_to_str_columns: list = []) -> None: """テーブル同士の取得結果突き合わせ Args: @@ -80,23 +80,28 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor # 1カラムずつ調査 for actual_col_name, expect_col_name in zip(actual_row, expect_row): # テストメソッド側で個別に確認するものはスキップさせる - if ignore_col_name is not None and actual_col_name in ignore_col_name: + if ignore_col_names is not None and actual_col_name in ignore_col_names: continue else: actual_value = actual_row[actual_col_name] expect_value = expect_row[expect_col_name] # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする - if isinstance(actual_value, (int)): + if actual_col_name in force_cast_to_str_columns: + if type(expect_value).__name__ == 'date': + expect_value = expect_value.strftime('%Y-%m-%d') + elif isinstance(actual_value, (int)): expect_value = int(expect_value) - if isinstance(actual_value, (float)): + elif isinstance(actual_value, (float)): expect_value = float(expect_value) - if isinstance(actual_value, (decimal.Decimal)): + elif isinstance(actual_value, (decimal.Decimal)): expect_value = decimal.Decimal(expect_value) # if type(actual_value) == datetime.date: - if type(actual_value).__name__ == "date": + elif type(actual_value).__name__ == "date": if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD expect_value = datetime.strptime(expect_value, '%Y%m%d').date() + elif actual_value is None and expect_value == "": + expect_value = None # 検証 assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' From 0b2c6fe1512cc451a1fb68012e19e17163ab8a48 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Sat, 3 Jun 2023 01:51:55 +0900 Subject: [PATCH 40/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E3=83=A1=E3=83=BC?= =?UTF-8?q?=E3=82=AB=E3=83=BC=E5=8D=B8=E7=B5=84=E7=B9=94=E5=B1=95=E9=96=8B?= =?UTF-8?q?=E8=A1=A8=E3=80=80=E2=80=BB=E6=A4=9C=E8=A8=BC=E4=B8=AD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/mkr_org_horizon_202304280000.tsv | 5 ++ .../testdata/mkr_org_horizon_202304290000.tsv | 5 ++ .../tests/testing_vjsk_utility.py | 2 +- 4 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 73ad96de..fcdd5682 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -564,3 +564,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_08_mkr_org_horizon_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_MKR_ORG_HORIZON) + table_name_src = mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MKR_ORG_HORIZON, + "src_file_path": path.join(self.test_file_path_load_individual, "mkr_org_horizon_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MKR_ORG_HORIZON, + "src_file_path": path.join(self.test_file_path_load_individual, "mkr_org_horizon_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_cd_1 = o.v_cd_1 and s.v_cd_2 = o.v_cd_2)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv new file mode 100644 index 00000000..3755e224 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv @@ -0,0 +1,5 @@ +"vid_kind_1" "v_cd_1" "nm_1" "dsp_odr_1" "vid_kind_2" "v_cd_2" "nm_2" "dsp_odr_2" "vid_kind_3" "v_cd_3" "nm_3" "dsp_odr_3" "vid_kind_4" "v_cd_4" "nm_4" "dsp_odr_4" "vid_kind_5" "v_cd_5" "nm_5" "dsp_odr_5" "vid_kind_6" "v_cd_6" "nm_6" "dsp_odr_6" "vid_kind_7" "v_cd_7" "nm_7" "dsp_odr_7" "vid_kind_8" "v_cd_8" "nm_8" "dsp_odr_8" "vid_kind_9" "v_cd_9" "nm_9" "dsp_odr_9" "vid_kind_10" "v_cd_10" "nm_10" "dsp_odr_10" "v_whs_cd" "start_dt" "end_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"3" "300003202" "その他営業本部卸" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003138" "北関東甲信越営業本部" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003202" "その他営業本部卸" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"a" "300003144" "メディカル営業本部1" "1" "b" "300003202" "東海スタッフ医療2" "2" "c" "300003203" "東海スタッフ医療3" "3" "d" "300003204" "東海スタッフ医療4" "4" "e" "300003205" "東海スタッフ医療5" "5" "f" "300003206" "東海スタッフ医療6" "6" "g" "300003207" "東海スタッフ医療7" "7" "h" "300003208" "東海スタッフ医療8" "8" "i" "300003209" "東海スタッフ医療9" "9" "j" "300003210" "東海スタッフ医療10" "10" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv new file mode 100644 index 00000000..71f9d85b --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv @@ -0,0 +1,5 @@ +"vid_kind_1" "v_cd_1" "nm_1" "dsp_odr_1" "vid_kind_2" "v_cd_2" "nm_2" "dsp_odr_2" "vid_kind_3" "v_cd_3" "nm_3" "dsp_odr_3" "vid_kind_4" "v_cd_4" "nm_4" "dsp_odr_4" "vid_kind_5" "v_cd_5" "nm_5" "dsp_odr_5" "vid_kind_6" "v_cd_6" "nm_6" "dsp_odr_6" "vid_kind_7" "v_cd_7" "nm_7" "dsp_odr_7" "vid_kind_8" "v_cd_8" "nm_8" "dsp_odr_8" "vid_kind_9" "v_cd_9" "nm_9" "dsp_odr_9" "vid_kind_10" "v_cd_10" "nm_10" "dsp_odr_10" "v_whs_cd" "start_dt" "end_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"3" "300003202" "その他営業本部卸" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003144" "メディカル営業本部" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003202" "その他営業本部卸" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003143" "医薬営業統括本部" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "200000007" "20190401" "20190930" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index a7982a05..15813e5a 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -71,7 +71,7 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor ignore_col_name (list): 比較を無視するDBのカラム名. Default None. """ # 取得件数が一致すること - assert len(actual_rows) == len(expect_rows) + assert len(actual_rows) == len(expect_rows), f'レコード件数が一致しません。DBレコード数 : {len(actual_rows)} 期待値 : {len(expect_rows)}' line_number = 0 # 1行ずつ調査 From 993c0497e5f786605404e818da0bf6ef1b2faecb Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 15:28:25 +0900 Subject: [PATCH 41/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E3=83=A1=E3=83=BC?= =?UTF-8?q?=E3=82=AB=E3=83=BC=E5=8D=B8=E7=B5=84=E7=B9=94=E5=B1=95=E9=96=8B?= =?UTF-8?q?=E8=A1=A8=E3=80=80=E2=80=BBPK=E9=A0=85=E7=9B=AE=E3=81=8C?= =?UTF-8?q?=E3=81=AA=E3=81=84=E3=83=86=E3=83=BC=E3=83=96=E3=83=AB=E3=81=AF?= =?UTF-8?q?INSERT=E3=82=92=E6=9C=9F=E5=BE=85=E5=80=A4=E3=81=A8=E3=81=99?= =?UTF-8?q?=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index fcdd5682..ef3c973a 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1,3 +1,4 @@ +import time from os import path import pytest @@ -597,7 +598,12 @@ class TestImportFileToDb: # srcテーブル結果が期待値通りかを突合 assert_table_results(result_src, assert_list, ignore_columns) - # assertion2 (update 2row +insert 2row) + # assertion2 (update 0row +insert 4row) ※PK項目がないテーブルなのですべてinsertになる + + assetion1_done_dt = self.db.execute_select("select SYSDATE()")[0]["SYSDATE()"] + + # assertion2でinsertされたレコードをdwh_upd_dtで判断するため、assertion1からの実行間隔を明確に空けるためにスリープを挟む + time.sleep(3) # 処理実行 target_dict = { @@ -616,14 +622,14 @@ class TestImportFileToDb: assert_table_results(result_org, assert_list, ignore_columns) # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 result_src = self.db.execute_select( - f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_cd_1 = o.v_cd_1 and s.v_cd_2 = o.v_cd_2)") + f"select * from {table_name_src} where dwh_upd_dt > :dt_value", {"dt_value": assetion1_done_dt}) # 突合から除外する項目 ignore_columns = ['dwh_upd_dt'] # srcテーブル結果が期待値通りかを突合 assert_table_results(result_src, assert_list, ignore_columns) - # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + # srcテーブル結果のレコード件数 (insert 4row + update 0row + insert 4row = 8row) ※PK項目がないテーブルなのですべてinsertになる result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") - assert result_src_count[0]['count(*)'] == 6 + assert result_src_count[0]['count(*)'] == 8 # teardown From 8e186cc3f310035630e99f597df719d643258446 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 16:04:59 +0900 Subject: [PATCH 42/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E5=8F=96=E5=BC=95?= =?UTF-8?q?=E5=8C=BA=E5=88=86=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/tran_kbn_mst_202304280000.tsv | 5 ++ .../testdata/tran_kbn_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index ef3c973a..d530f3ce 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -633,3 +633,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 8 # teardown + + def test_load_08_tran_kbn_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_TRAN_KBN_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_TRAN_KBN_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "tran_kbn_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_TRAN_KBN_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "tran_kbn_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_tran_cd = o.v_tran_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv new file mode 100644 index 00000000..cb5d5ff8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_tran_cd" "sub_no" "nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "1" "Veeva取引区分名01-1" "20230101" "99991231" "1" "0" "2023-05-09 12:00:01" "2023-05-09 13:00:01" +"10001" "2" "Veeva取引区分名01-2" "20230102" "99991231" "1" "0" "2023-05-09 12:00:02" "2023-05-09 13:00:02" +"10001" "3" "Veeva取引区分名01-3" "20230103" "99991231" "1" "0" "2023-05-09 12:00:03" "2023-05-09 13:00:03" +"10001" "4" "Veeva取引区分名01-4" "20230104" "99991231" "1" "0" "2023-05-09 12:00:04" "2023-05-09 13:00:04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv new file mode 100644 index 00000000..0813b3df --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_tran_cd" "sub_no" "nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "3" "Veeva取引区分名01-3" "20230201" "20301231" "1" "0" "2023-05-09 12:00:03" "2023-05-09 13:00:03" +"10001" "4" "Veeva取引区分名01-4" "20230202" "20301231" "1" "0" "2023-05-09 12:00:04" "2023-05-09 13:00:04" +"10001" "5" "Veeva取引区分名01-5" "20230203" "20301231" "1" "0" "2023-05-09 12:00:05" "2023-05-09 13:00:05" +"10002" "1" "Veeva取引区分名02-1" "20230204" "20301231" "1" "0" "2023-05-09 12:00:06" "2023-05-09 13:00:06" From 800f3eba45a1c6d531e8c63e1db3aad2e7f51a9a Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 16:50:32 +0900 Subject: [PATCH 43/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E8=A3=BD=E5=93=81?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 65 ++++++++++++++++++- .../testdata/phm_prd_mst_202304280000.tsv | 5 ++ .../testdata/phm_prd_mst_202304290000.tsv | 5 ++ .../tests/testing_vjsk_utility.py | 13 +++- 4 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index d530f3ce..47601d58 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -634,7 +634,7 @@ class TestImportFileToDb: # teardown - def test_load_08_tran_kbn_mst_ok(self, mapper): + def test_load_09_tran_kbn_mst_ok(self, mapper): table_name_org = mapper.get_org_table(mapper.CONDKEY_TRAN_KBN_MST) table_name_src = mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST) @@ -696,3 +696,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_10_phm_prd_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_PHM_PRD_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_prd_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_prd_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.prd_cd = o.prd_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv new file mode 100644 index 00000000..9c4d4d86 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"prd_cd" "sub_no" "prd_nm" "prd_e_nm" "mkr_cd" "mkr_inf_1" "mkr_inf_2" "phm_itm_cd" "itm_nm" "itm_sht_nm" "form_cd" "form_nm" "vol_cd" "vol_nm" "cont_cd" "cont_nm" "pkg_cd" "pkg_nm" "cnv_num" "jsk_start_dt" "prd_sale_kbn" "jsk_proc_kbn" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"496201110" "0" "セロフェン 錠剤 50mg 30TAB" "" "F21206A0" "セロフェン 錠 50MG" "SEROPHENE TAB. 50 MG. (30)" "001" "セロフェン" "SP" "F003" "錠剤" "0000" "" "V009" "50mg" "P007" "30TAB" "30" "" "0" "0" "20080101" "20190930" "140" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" +"496201127" "0" "セロフェン 錠剤 50mg 30TAB" "" "F21206A0" "セロフェン 錠 50MG" "SEROPHENE TAB. 50 MG. (30)" "001" "セロフェン" "SP" "F003" "錠剤" "0000" "" "V009" "50mg" "P007" "30TAB" "30" "" "0" "0" "20070401" "20190930" "150" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" +"496300110" "2" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "F1990608" "ゴナールエフ皮下注ペン 300" "GONAL-F PEN 300IU (1) - JPN" "005" "セロスティム" "ST" "F005" "皮下注ペン" "0000" "" "V017" "300IU" "P011" "1PEN" "1" "" "0" "0" "20190501" "20190930" "100" "9" "19-04-23 16:35:36" "19-04-23 16:40:38" +"496300127" "2" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "F19D0608" "ゴナールエフ皮下注ペン450" "Gonalef Pen 450 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V018" "450IU" "P011" "1PEN" "1" "" "0" "0" "20190501" "20190930" "120" "9" "19-04-23 16:37:10" "19-04-23 16:40:54" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv new file mode 100644 index 00000000..29824385 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"prd_cd" "sub_no" "prd_nm" "prd_e_nm" "mkr_cd" "mkr_inf_1" "mkr_inf_2" "phm_itm_cd" "itm_nm" "itm_sht_nm" "form_cd" "form_nm" "vol_cd" "vol_nm" "cont_cd" "cont_nm" "pkg_cd" "pkg_nm" "cnv_num" "jsk_start_dt" "prd_sale_kbn" "jsk_proc_kbn" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"496300110" "2" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "F1990608" "ゴナールエフ皮下注ペン 300" "GONAL-F PEN 300IU (1) - JPN" "005" "セロスティム" "ST" "F005" "皮下注ペン" "0000" "" "V017" "300IU" "P011" "1PEN" "1" "20230601" "0" "0" "20190501" "20190930" "100" "9" "19-04-23 16:35:36" "19-04-23 16:40:38" +"496300127" "2" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "F19D0608" "ゴナールエフ皮下注ペン450" "Gonalef Pen 450 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V018" "450IU" "P011" "1PEN" "1" "20230602" "0" "0" "20190501" "20190930" "120" "9" "19-04-23 16:37:10" "19-04-23 16:40:54" +"496300134" "2" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "F19B0608" "ゴナールエフ皮下注ペン900" "Gonalef Pen 900 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V019" "900IU" "P011" "1PEN" "1" "20230603" "0" "0" "20190501" "20190930" "110" "9" "19-04-23 16:38:36" "19-04-23 16:41:05" +"496301315" "0" "プロファシー 注射剤 5000IU 10VIAL" "" "F47706A0" "プロファシー 注 5000IU" "PROFASI M AMP. 5000 IU (10)" "002" "プロファシー" "PF" "F002" "注射剤" "0000" "" "V020" "5000IU" "P005" "10VIAL" "10" "20230604" "0" "0" "20070401" "20190930" "160" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py index 15813e5a..441c5c9e 100644 --- a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -88,20 +88,31 @@ def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignor # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする if actual_col_name in force_cast_to_str_columns: + # DB項目(varchar)に日付型としてキャスト可能な値が期待値である場合、force_cast_to_str_columnsに基づいて強制的に文字列キャストする if type(expect_value).__name__ == 'date': expect_value = expect_value.strftime('%Y-%m-%d') elif isinstance(actual_value, (int)): + # DB項目(int)の場合、期待値もintにキャストする expect_value = int(expect_value) elif isinstance(actual_value, (float)): + # DB項目(float)の場合、期待値もfloatにキャストする expect_value = float(expect_value) elif isinstance(actual_value, (decimal.Decimal)): + # DB項目(decimal)の場合、期待値もdecimalにキャストする expect_value = decimal.Decimal(expect_value) - # if type(actual_value) == datetime.date: elif type(actual_value).__name__ == "date": + # DB項目(date)の場合、期待値("YYYYMMDD")もdateにキャストする if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD expect_value = datetime.strptime(expect_value, '%Y%m%d').date() elif actual_value is None and expect_value == "": + # DB項目値がNULLの場合、期待値が""であればNoneに置換する expect_value = None + elif actual_value == "0000-00-00" and expect_value == "": + # DB項目(date)がゼロ日付(NULL代替値)の場合、期待値が""であれば"0000-00-00"に置換する + expect_value = "0000-00-00" + elif actual_value == "0000-00-00 00:00:00" and expect_value == "": + # DB項目(datetime)がゼロ日付(NULL代替値)の場合、期待値が""であれば"0000-00-00 00:00:00"に置換する + expect_value = "0000-00-00 00:00:00" # 検証 assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' From 5ac63d6f4f34e30d5af701a79e9e039ff47c5c15 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 17:01:17 +0900 Subject: [PATCH 44/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E8=A3=BD=E5=93=81?= =?UTF-8?q?=E4=BE=A1=E6=A0=BC=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/phm_price_mst_202304280000.tsv | 5 ++ .../testdata/phm_price_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 47601d58..2fb187ae 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -759,3 +759,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_11_phm_price_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_PHM_PRICE_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.phm_prd_cd = o.phm_prd_cd and s.phm_price_kind = o.phm_price_kind and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv new file mode 100644 index 00000000..b47ae0e6 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"114430502" "01" "2" "10060.2" "20180401" "20190930" "10" "0" "18-03-07 09:33:37" "19-09-19 11:23:47" +"114430502" "01" "3" "10237.2" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" +"114430502" "03" "2" "100602" "20180401" "20190930" "30" "0" "18-03-07 09:39:48" "19-09-19 11:23:47" +"114430502" "03" "3" "102372" "20191001" "99991231" "30" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv new file mode 100644 index 00000000..c5331854 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"114430502" "03" "2" "100602" "20180401" "20190930" "30" "0" "18-03-07 09:39:48" "19-09-19 11:23:47" +"114430502" "03" "3" "102372" "20191001" "99991231" "30" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" +"114430601" "01" "2" "12362.4" "20180401" "20190930" "10" "0" "18-03-07 09:48:00" "19-09-19 11:23:47" +"114430601" "01" "3" "12587.8" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" From c33940eee1e4658af2283ebe9685a62c07a24814 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 19:48:07 +0900 Subject: [PATCH 45/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=EF=BC=B6=E5=8D=B8=E5=BE=97?= =?UTF-8?q?=E6=84=8F=E5=85=88=E6=83=85=E5=A0=B1=E3=83=9E=E3=82=B9=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../whs_customer_mst_202304280000.tsv | 5 ++ .../whs_customer_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 2fb187ae..ca63e86d 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -822,3 +822,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_12_whs_customer_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_WHS_CUSTOMER_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_CUSTOMER_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_customer_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_CUSTOMER_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_customer_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.whs_cd = o.whs_cd and s.whs_sub_cd = o.whs_sub_cd and s.customer_cd = o.customer_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv new file mode 100644 index 00000000..0c23e674 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "customer_cd" "sub_no" "start_dt" "end_dt" "org_cd" "src_org_cd" "nm" "kn_nm" "addr" "kn_addr" "zip_cd" "tel_no" "rec_sts_kbn" "ins_dt" "upd_dt" +"006" "01" "1002900000" "0" "20000101" "99991231" "11" "1131A2283316" "辻内科小児科医院               " "ツジナイカシヨウニカ イイン" "長崎県 佐世保市皆瀬町29                   " "ナガサキケン サセボシカイゼチヨウ 29" "8570144" "0956492319" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1005400000" "0" "20000101" "99991231" "12" "1211C3415515" "医療法人 愛恵会 佐世保愛恵病院       " "イリヨウホウジンアイケイカイサセボアイケイビ" "長崎県 佐世保市瀬戸越4丁目 2−15             " "ナガサキケン サセボシセトゴシ 4チヨウメ 2-15" "8570134" "0956493335" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007200000" "0" "20000101" "99991231" "11" "1131A2407312" "医療法人 山祇診療所             " "イリヨウホウジンヤマズミシンリヨウジヨ" "長崎県 佐世保市山祇町 19−36               " "ナガサキケン サセボシヤマズミチョウ" "8570822" "0956313633" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007800000" "0" "20000101" "99991231" "11" "1121A2402213" "医療法人道仁会 品川医院           " "イリヨウホウジンドウジンカイ シナガワイイン" "長崎県 佐世保市柚木町2188                 " "ナガサキケン サセボシユノキチヨウ 2188" "8570112" "0956460005" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv new file mode 100644 index 00000000..e5c22a27 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "customer_cd" "sub_no" "start_dt" "end_dt" "org_cd" "src_org_cd" "nm" "kn_nm" "addr" "kn_addr" "zip_cd" "tel_no" "rec_sts_kbn" "ins_dt" "upd_dt" +"006" "01" "1007200000" "0" "20000101" "99991231" "11" "1131A2407312" "医療法人 山祇診療所2            " "イリヨウホウジンヤマズミシンリヨウジヨ" "長崎県 佐世保市山祇町 19−36               " "ナガサキケン サセボシヤマズミチョウ" "8570822" "0956313633" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007800000" "0" "20000101" "99991231" "11" "1121A2402213" "医療法人道仁会 品川医院2          " "イリヨウホウジンドウジンカイ シナガワイイン" "長崎県 佐世保市柚木町2188                 " "ナガサキケン サセボシユノキチヨウ 2188" "8570112" "0956460005" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1008000000" "0" "20000101" "99991231" "11" "1131A2283316" "北原整形外科医院2              " "キタハラセイケイゲカ イイン" "長崎県 佐世保市瀬戸越町4丁目1298−1           " "ナガサキケン サセボシセトゴシチヨウ 1298-1" "8570135" "0956497773" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1009100000" "0" "20000101" "99991231" "11" "1121A2224212" "山口医院2                  " "ヤマグチイイン" "長崎県 佐世保市春日町29−14                " "ナガサキケン サセボシカスガチヨウ29-14" "8570011" "0956228610" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" From 1cbe72fbb03aac0d99004dfcc9eedabfe0de7ee3 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Mon, 5 Jun 2023 22:50:20 +0900 Subject: [PATCH 46/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80MDB=E3=82=B3=E3=83=BC?= =?UTF-8?q?=E3=83=89=E5=A4=89=E6=8F=9B=E8=A1=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/mdb_conv_mst_202304280000.tsv | 5 ++ .../testdata/mdb_conv_mst_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index ca63e86d..7d06b39c 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -885,3 +885,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_13_mdb_conv_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_MDB_CONV_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MDB_CONV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "mdb_conv_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MDB_CONV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "mdb_conv_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.hco_vid_v = o.hco_vid_v and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv new file mode 100644 index 00000000..f27cf3ce --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "sub_no" "mdb_cd" "reliability" "start_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229780011959315" "1" "003410424" "0" "20020601" "0" "22-03-09 13:56:19" "22-03-09 13:56:19" +"670230081112654862" "0" "004101420" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:17:34" +"670230081112654862" "1" "004104997" "2" "20000101" "0" "22-03-09 14:17:33" "22-03-09 14:17:33" +"670230100414841865" "0" "003622111" "3" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:13:49" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv new file mode 100644 index 00000000..fe0ab79c --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "sub_no" "mdb_cd" "reliability" "start_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"670230081112654862" "1" "004104997" "0" "20000101" "0" "22-03-09 14:17:33" "22-03-09 14:17:33" +"670230100414841865" "0" "003622111" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:13:49" +"670230100414841865" "1" "003636480" "0" "20000101" "0" "22-03-09 14:13:49" "22-03-09 14:13:49" +"670230330673742853" "0" "004804003" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 15:20:35" From 0849ae8365be9de8706bd91774fe45778d61e5ac Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 6 Jun 2023 09:52:54 +0900 Subject: [PATCH 47/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E7=94=9F=E7=89=A9=E7=94=B1?= =?UTF-8?q?=E6=9D=A5=E3=83=87=E3=83=BC=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 63 +++++++++++++++++++ .../testdata/bio_slip_data_202304280000.tsv | 5 ++ .../testdata/bio_slip_data_202304290000.tsv | 5 ++ 3 files changed, 73 insertions(+) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 7d06b39c..31717661 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -948,3 +948,66 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_load_14_bio_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_BIO_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_BIO_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "bio_slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_BIO_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "bio_slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_org_kbn = o.slip_org_kbn)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv new file mode 100644 index 00000000..0c345954 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rec_hsdnYmd_wrk" "rec_hsdnYmd_srk" "rec_urag_no" "rec_comm_nm" "rec_nnskFcl_nm" "rec_nnsk_fcl_addr" "rec_lot_num1" "rec_amt1" "rec_lot_num2" "rec_amt2" "rec_lot_num3" "rec_amt3" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_nm" "fcl_kn" "fcl_addr_v" "comm_cd" "comm_nm" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "amt1" "amt2" "amt3" "slip_org_kbn" "bef_slip_mgt_no" "whs_rep_comm_nm" "whs_rep_nnskFcl_nm" "whs_rep_nnsk_fcl_addr" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D452960211JD1111311102503851400002304016427519111 496350122バベンチオテンテキ200MG 1V ソウゴウメデイカルニホンコウカンビツクバシ タカサキ 753 BAVB007 000003 000000 000000 " "296" "02" "11JD11113111025" "0385140000" "496350122" "111" "230401" "20230401" "6427519" "バベンチオテンテキ200MG 1V " "ソウゴウメデイカルニホンコウカンビ" "ツクバシ タカサキ 753 " "BAVB007 " "000003" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000126" "129" "20230401" "202305082041" "110" "売上" "11JD" "300001370" "川崎南支店" "" "200000007" "アルフレッサ株式会社" "0385140000" "670235967013012526" "医療法人社団こうかん会 日本鋼管病院" "イリョウホウジンシャダンコウカンカイ ニホンコウカンビョウイン" "210-0852 神奈川県川崎市川崎区鋼管通1−2−1" "496350122" "バベンチオ 注射剤 200mg 1VIAL" "" "1" "" "3" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211G11111377452402930640002304016433215111 496300127ゴナ-ルエフヒカチユウペン450 1トウ セコム)オギクボビヨウイン トウキヨウト シブヤク ジングウマエ 1-5-1 GF4C001 000002 000000 000000 " "296" "02" "11G111113774524" "0293064000" "496300127" "111" "230401" "20230401" "6433215" "ゴナ-ルエフヒカチユウペン450 1トウ " "セコム)オギクボビヨウイン " "トウキヨウト シブヤク ジングウマエ 1-5-1 " "GF4C001 " "000002" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000127" "130" "20230401" "202305082041" "110" "売上" "11G1" "300001351" "杉並・中野支店" "" "200000007" "アルフレッサ株式会社" "0293064000" "670234652241314835" "医療法人財団荻窪病院 荻窪病院" "イリョウホウジンザイダンオギクボビョウイン オギクボビョウイン" "167-0035 東京都杉並区今川3−1−24" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "2" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211V11121120604204799500002304016461276111 496300134ゴナ-ルエフヒカチユウペン900 1トウ ニチイサ-ビスキユウシユウフクオカサンフクオカシ サワラク モモチハマ 1-7-5 7F GF9C002 000010 000000 000000 " "296" "02" "11V111211206042" "0479950000" "496300134" "111" "230401" "20230401" "6461276" "ゴナ-ルエフヒカチユウペン900 1トウ " "ニチイサ-ビスキユウシユウフクオカサン" "フクオカシ サワラク モモチハマ 1-7-5 7F " "GF9C002 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000128" "131" "20230401" "202305082041" "110" "売上" "11V1" "300001491" "福岡第一支店" "" "200000007" "アルフレッサ株式会社" "0479950000" "670235883412145206" "医療法人社団高邦会 福岡山王病院" "イリョウホウジンシャダンコウホウカイ フクオカサンノウビョウイン" "814-0001 福岡県福岡市早良区百道浜3−6−45" "496300134" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211JB1121309300202875030002304016523689111 496300127ゴナ-ルエフヒカチユウペン450 1トウ イツカンドウヤツキヨク0561 カワサキシ アサオク フルサワ 172-1 GF4C001 000001 000000 000000 " "296" "02" "11JB11213093002" "0287503000" "496300127" "111" "230401" "20230401" "6523689" "ゴナ-ルエフヒカチユウペン450 1トウ " "イツカンドウヤツキヨク0561 " "カワサキシ アサオク フルサワ 172-1 " "GF4C001 " "000001" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000129" "132" "20230401" "202305082041" "110" "売上" "11JB" "300001369" "川崎北支店" "" "200000007" "アルフレッサ株式会社" "0287503000" "670237078008644636" "株式会社キリン堂 一貫堂薬局" "カブシキガイシャキリンドウ イッカンドウヤッキョク" "215-0026 神奈川県川崎市麻生区古沢172−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "1" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv new file mode 100644 index 00000000..e7f85ba8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rec_hsdnYmd_wrk" "rec_hsdnYmd_srk" "rec_urag_no" "rec_comm_nm" "rec_nnskFcl_nm" "rec_nnsk_fcl_addr" "rec_lot_num1" "rec_amt1" "rec_lot_num2" "rec_amt2" "rec_lot_num3" "rec_amt3" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_nm" "fcl_kn" "fcl_addr_v" "comm_cd" "comm_nm" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "amt1" "amt2" "amt3" "slip_org_kbn" "bef_slip_mgt_no" "whs_rep_comm_nm" "whs_rep_nnskFcl_nm" "whs_rep_nnsk_fcl_addr" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D452960211V11121120604204799500002304016461276111 496300134ゴナ-ルエフヒカチユウペン900 1トウ ニチイサ-ビスキユウシユウフクオカサンフクオカシ サワラク モモチハマ 1-7-5 7F GF9C002 000010 000000 000000 " "296" "02" "11V111211206042" "0479950000" "496300134" "111" "230401" "20230401" "6461276" "ゴナ-ルエフヒカチユウペン900 1トウ " "ニチイサ-ビスキユウシユウフクオカサン" "フクオカシ サワラク モモチハマ 1-7-5 7F " "GF9C002 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000128" "131" "20230401" "202305082041" "110" "売上" "11V1" "300001491" "福岡第一支店" "" "200000007" "アルフレッサ株式会社" "0479950000" "670235883412145206" "医療法人社団高邦会 福岡山王病院" "イリョウホウジンシャダンコウホウカイ フクオカサンノウビョウイン" "814-0001 福岡県福岡市早良区百道浜3−6−45" "496300134" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211JB1121309300202875030002304016523689111 496300127ゴナ-ルエフヒカチユウペン450 1トウ イツカンドウヤツキヨク0561 カワサキシ アサオク フルサワ 172-1 GF4C001 000001 000000 000000 " "296" "02" "11JB11213093002" "0287503000" "496300127" "111" "230401" "20230401" "6523689" "ゴナ-ルエフヒカチユウペン450 1トウ " "イツカンドウヤツキヨク0561 " "カワサキシ アサオク フルサワ 172-1 " "GF4C001 " "000001" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000129" "132" "20230401" "202305082041" "110" "売上" "11JB" "300001369" "川崎北支店" "" "200000007" "アルフレッサ株式会社" "0287503000" "670237078008644636" "株式会社キリン堂 一貫堂薬局" "カブシキガイシャキリンドウ イッカンドウヤッキョク" "215-0026 神奈川県川崎市麻生区古沢172−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "1" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D4529602K1201130032696508657140002304016527757111 496300110ゴナ-ルエフヒカチユウペン300 1トウ モリノクスリヤヤツキヨク オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 GF3B013 000005 000000 000000 " "296" "02" "K12011300326965" "0865714000" "496300110" "111" "230401" "20230401" "6527757" "ゴナ-ルエフヒカチユウペン300 1トウ " "モリノクスリヤヤツキヨク " "オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 " "GF3B013 " "000005" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000130" "133" "20230401" "202305082041" "110" "売上" "K120" "300006583" "沖縄第二営業部" "" "200000007" "アルフレッサ株式会社" "0865714000" "670232348519842842" "有限会社吾妻サンライズ 森の薬屋薬局" "ユウゲンガイシャアガツマサンライズ モリノクスリヤヤッキョク" "901-0406 沖縄県島尻郡八重瀬町屋宜原238−1" "496300110" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "1" "" "5" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D4529602K1201130032696508657140002304016527757111 496300127ゴナ-ルエフヒカチユウペン450 1トウ モリノクスリヤヤツキヨク オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 GF4B011 000010 000000 000000 " "296" "02" "K12011300326965" "0865714000" "496300127" "111" "230401" "20230401" "6527757" "ゴナ-ルエフヒカチユウペン450 1トウ " "モリノクスリヤヤツキヨク " "オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 " "GF4B011 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000131" "134" "20230401" "202305082041" "110" "売上" "K120" "300006583" "沖縄第二営業部" "" "200000007" "アルフレッサ株式会社" "0865714000" "670232348519842842" "有限会社吾妻サンライズ 森の薬屋薬局" "ユウゲンガイシャアガツマサンライズ モリノクスリヤヤッキョク" "901-0406 沖縄県島尻郡八重瀬町屋宜原238−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" From 35b56d0de331af4d3eab3f26c1b9472d07776df9 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 6 Jun 2023 10:08:55 +0900 Subject: [PATCH 48/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E3=83=AD=E3=83=83=E3=83=88?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=83=87=E3=83=BC=E3=82=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 65 ++++++++++++++++++- .../testdata/lot_num_mst_202304280000.tsv | 5 ++ .../testdata/lot_num_mst_202304290000.tsv | 5 ++ 3 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 31717661..5f373fe0 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1000,7 +1000,70 @@ class TestImportFileToDb: assert_table_results(result_org, assert_list, ignore_columns) # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 result_src = self.db.execute_select( - f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_org_kbn = o.slip_org_kbn)") + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_15_lot_num_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_LOT_NUM_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_LOT_NUM_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "lot_num_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_LOT_NUM_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "lot_num_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.ser_num = o.ser_num and s.lot_num = o.lot_num)") # 突合から除外する項目 ignore_columns = ['dwh_upd_dt'] # srcテーブル結果が期待値通りかを突合 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv new file mode 100644 index 00000000..3acfb7cc --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"ser_no" "lot_num" "expr_dt" "frst_mov_dt" "ins_dt" "ins_usr" +"F0110601" "BAVA001" "20230331" "20210510" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA002" "20230331" "20210615" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA003" "20231031" "20210719" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA004" "20231031" "20210823" "23-05-08 20:40:41" "batch" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv new file mode 100644 index 00000000..c7e86b9a --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"ser_no" "lot_num" "expr_dt" "frst_mov_dt" "ins_dt" "ins_usr" +"F0110601" "BAVA003" "20231031" "20210719" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA004" "20231031" "20210823" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA005" "20231031" "20210927" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA006" "20240131" "20211025" "23-05-08 20:40:41" "batch" From 607784bc50a8597647e145b74812d175f578460d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 6 Jun 2023 17:55:47 +0900 Subject: [PATCH 49/86] =?UTF-8?q?feat:=20=E5=BE=93=E6=A5=AD=E5=93=A1?= =?UTF-8?q?=E6=8B=85=E5=BD=93=E6=96=BD=E8=A8=AD=E3=83=9E=E3=82=B9=E3=82=BF?= =?UTF-8?q?=E5=87=A6=E7=90=86=E3=82=92=E7=8F=BE=E8=A1=8CMINE=E3=81=AE?= =?UTF-8?q?=E5=87=A6=E7=90=86=E3=81=AB=E3=83=AD=E3=83=BC=E3=83=AB=E3=83=90?= =?UTF-8?q?=E3=83=83=E3=82=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index b4b6e4e7..e325d7a4 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -115,15 +115,14 @@ def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), emp_chg_inst_row) - # 適用終了日 ≧ 適用開始日の場合 - if _str_to_date_time(emp_chg_inst_row['end_date']) >= start_date: + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 + if start_date < tekiyo_month_first_day: # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する last_end_date = tekiyo_month_first_day - timedelta(days=1) _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), emp_chg_inst_row) - if last_end_date >= start_date: - continue - # DCF施設統合マスタの適用月度の前月末日 < 適用開始日、または適用終了日 < 適用開始日の場合、N(論理削除レコード)に設定する + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], emp_chg_inst_row['start_date']) From 0390e23ca8cd73e0ef7c9c687ae3d44d89211795 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 6 Jun 2023 19:32:46 +0900 Subject: [PATCH 50/86] =?UTF-8?q?=E8=87=AA=E5=8B=95=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=AE=9F=E8=A3=85=E3=80=80=E7=95=B0=E5=B8=B8=E7=B3=BB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../vjsk_file_check/test_vjsk_file_check.py | 4 +- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 278 ++++++++++++++++-- .../UnzipError/bio_slip_data_202304270000.gz | Bin 0 -> 2649 bytes .../UnzipError/fcl_mst_202304270000.gz | Bin 0 -> 2003 bytes .../UnzipError/hld_mst_202304270000.gz | Bin 0 -> 541 bytes .../UnzipError/lot_num_mst_202304270000.gz | Bin 0 -> 415 bytes .../UnzipError/mdb_conv_mst_202304270000.gz | Bin 0 -> 533 bytes .../mkr_org_horizon_202304270000.gz | Bin 0 -> 808 bytes .../UnzipError/org_cnv_mst_202304270000.gz | Bin 0 -> 508 bytes .../UnzipError/phm_prd_mst_202304270000.gz | Bin 0 -> 1175 bytes .../UnzipError/phm_price_mst_202304270000.gz | Bin 0 -> 529 bytes .../UnzipError/slip_data_202304270000.gz | Bin 0 -> 2890 bytes .../stock_slip_data_202304270000.gz | Bin 0 -> 928 bytes .../UnzipError/tran_kbn_mst_202304270000.gz | Bin 0 -> 498 bytes .../UnzipError/vop_hco_merge_202304270000.gz | Bin 0 -> 398 bytes .../whs_customer_mst_202304270000.gz | Bin 0 -> 1305 bytes .../UnzipError/whs_mst_202304270000.gz | Bin 0 -> 589 bytes .../testdata/phm_price_mst_dataerror.tsv | 3 + .../testdata/phm_price_mst_formaterror.tsv | 2 + 19 files changed, 255 insertions(+), 32 deletions(-) create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/hld_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mdb_conv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_price_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/tran_kbn_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv create mode 100644 ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py index 99b31d68..1c523d0e 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -10,7 +10,7 @@ def test_check_received_files_ok1(init_check_received_files_ok1): 観点 正常系 : 卸在庫データ取込対象日 期待値 - 例外が発生する + 例外が発生しない """ batch_context = BatchContext.get_instance() batch_context.is_vjsk_stock_import_day = True @@ -29,7 +29,7 @@ def test_check_received_files_ok2(init_check_received_files_ok2): 観点 正常系 : 卸在庫データ取込対象日以外 期待値 - 例外が発生する + 例外が発生しない """ batch_context = BatchContext.get_instance() batch_context.is_vjsk_stock_import_day = False diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index 5f373fe0..a648724f 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -10,6 +10,7 @@ from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager from src.batch.vjsk.vjsk_importer import (_check_received_files, _import_file_to_db) from src.db.database import Database +from src.error.exceptions import BatchOperationException # from tests.testing_vjsk_utility import create_vjsk_assertion_dictionary from tests.testing_vjsk_utility import (assert_table_results, create_vjsk_assertion_list) @@ -20,6 +21,7 @@ class TestImportFileToDb: batch_context: BatchContext test_file_path_import_all: str test_file_path_load_individual: str + test_file_path_unzip_error: str @pytest.fixture(autouse=True, scope='function') def pre_test(self, database: Database): @@ -27,6 +29,7 @@ class TestImportFileToDb: # setup self.test_file_path_import_all = path.join(path.dirname(__file__), "testdata", "TestImportFileToDb") self.test_file_path_load_individual = path.join(path.dirname(__file__), "testdata") + self.test_file_path_unzip_error = path.join(path.dirname(__file__), "testdata", "UnzipError") self.batch_context = BatchContext.get_instance() @@ -80,41 +83,72 @@ class TestImportFileToDb: key = f"{receive_folder}/{test_file}" s3_client.upload_file(file_name, bucket_name, key) - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") - # self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") # assertion received_s3_files = _check_received_files() _import_file_to_db(received_s3_files) - # # 検証 (卸在庫データファイル) - # table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) - # table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) - # result = self.db.execute(f"select * from {table_name_org}") - # assert result.rowcount == 10 - # result = self.db.execute(f"select * from {table_name_src}") - # assert result.rowcount == 10 - - # # 検証 (卸販売データ) - # table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) - # table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) - # result = self.db.execute(f"select * from {table_name_org}") - # assert result.rowcount == 10 - # result = self.db.execute(f"select * from {table_name_src}") - # assert result.rowcount == 10 + # 検証 + condkey = mapper.CONDKEY_STOCK_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_ORG_CNV_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_VOP_HCO_MERGE + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_WHS_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_HLD_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_FCL_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_MKR_ORG_HORIZON + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_TRAN_KBN_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_PHM_PRD_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_PHM_PRICE_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_WHS_CUSTOMER_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_MDB_CONV_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_BIO_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_LOT_NUM_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) # teardown for test_file in test_files: @@ -1074,3 +1108,187 @@ class TestImportFileToDb: assert result_src_count[0]['count(*)'] == 6 # teardown + + def test_unzip_to_error(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 異常系 : gzファイルが解凍できない + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + # ※.gzだが、7zipで圧縮してあるので、解凍に失敗するのが期待値 + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_unzip_error, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # assertion + received_s3_files = _check_received_files() + with pytest.raises(Exception) as e: + _import_file_to_db(received_s3_files) + + # 検証 + assert str(e.value) == "file could not be opened successfully" + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_load_data_error(self, mapper): + """ + 観点 + 異常系 : 日付型矛盾のデータ ※製品価格マスタファイルで確認 + 期待値 + 例外が発生する + """ + + # setup + self.batch_context.is_vjsk_stock_import_day = True + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_dataerror.tsv") + } + + with pytest.raises(BatchOperationException) as e: + VjskDataLoadManager.load(target_dict) + + # 検証 + assert str(e.value).startswith("SQL Error:") > 0 + + # teardown + + def test_load_format_error(self, mapper): + """ + 観点 + 異常系 : tsvファイルが途中で欠落している + 期待値 + 例外が発生する + """ + + # setup + self.batch_context.is_vjsk_stock_import_day = True + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_formaterror.tsv") + } + + with pytest.raises(BatchOperationException) as e: + VjskDataLoadManager.load(target_dict) + + # 検証 + assert str(e.value).startswith("SQL Error:") > 0 + + # teardown + + def test_s3backup_ok(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 正常系 : S3受領ファイルのバックアップフォルダ移動が完了する + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + # ※.gzだが、7zipで圧縮してあるので、解凍に失敗するのが期待値 + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_import_all, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # assertion + received_s3_files = _check_received_files() + vjsk_recv_bucket.backup_dat_file(received_s3_files, "test") + + # 検証 + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_s3backup_to_error(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 異常系 : S3受領ファイルのバックアップフォルダ移動ができない + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup + + # assertion + with pytest.raises(Exception) as e: + # 有りもしないファイルをバックアップフォルダにコピーさせてコケさせる + received_s3_files = [] + received_s3_files.append({"filename": "dummy.dummy"}) + vjsk_recv_bucket.backup_dat_file(received_s3_files, "test") + + # 検証 + assert str(e.value) == "An error occurred (404) when calling the HeadObject operation: Not Found" + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..c609baff8d5bb8aa1df1dc627ae1f42d55a619b7 GIT binary patch literal 2649 zcmV-f3a0fpdc3bE8~_CBuppAJ2><{90001r000000002GB5Tj!L;nevo}N7H!}`6!|44bB1Zd zA;|%jBml99_4nA8eWK{mQ|@aUvr5q)p#iS!JzLh+jS-F0>q!Bu{9h#Aoj1PvK-DQg zi?9bnmd5E++p12iIy(d3{*^a$C5|$R?1=2i^oYv0|l|J6A+9t?vymZlos?NLqJ)*&N?E0 zhg-e=Aw*J-Rp~pNmm&&7Tk^5diW_I8v=d{v#wP*|A64&Pjbodn9>ckgn57f%;I9w` z`lkg7pF{UzJwO^w9q8(b*4ha^rvw8!hrZORZZ1S#U>-KnSbVR4lO}q|=@7wfHd1nf z#zpDD6wYe*d4_F62!#HN%?=-DYj5&ekvY-XsRK3UPyrj<-xZZuVU*2&=XHn~ge42V zaUIg&;+WL~&@Vw?&sU7M06xLxk37`OGDPq83lmzz75>NX zPUt|te?LLOLBoz&T`9NFVIfl_F2-I~@wX51_d~arq-=eF(R&~|=B@G|&gPXm z;3RwHhlA@T5D;r>;jff_^YAiq))e$|aLFTL{IRj8h+R;F6vOlX&a^~p^!SRrfE@rR zLAz>Q)!p}5e4rn?%9jKJ9Curv_mb*RJr9x<+P$Q{xW|J7+5dGPV7ecS@7}s6axR*< zGpw|GNGxPv?AYZ`mxWIu%pJV(@uUW;E9ufu|6-3WEiR4;tNh*#-Ds@D3Tdu#hewR& zcUlyKZ;YjS8b`AEck*u+v=t;7YTypsqX+VlrRPBlLaRMr#$X*nago$1J5V&*>hrz; zQ}v~v9ous#a5B3 zihpgfhH5Yy;qIqas;n*E?OcArL)<;PmQfD)k!s5~qN-q~6vz~2!wboq#&nahQI+=grbyO?+s1UTDRCVKFk7?< zLOL%Id&}H4J{jP8Ubl}Ukflf&&=t)#VI_nHp?Bnx9Th@taa!iA_=Yl@%)57 z*Ic7(SoB78L<7yZz)8z1ON3`VbLCy$IpTi_NQjk=f>1FXo$23!*234_ylNh{|FWA7 zeONyc7YndnfOqIJA-CQQ^Ch|_sNvlOFIwd44&Z(H6q~@PK!p-)on|;W+5bYM3xk1; zijOge=hlkkZQV7f?5+g`IuGwC(t58FT@q@5ZM0ARhd~Ed6fh2W#E%1#_Ei}l+?Fcg9tLI%kWkoR=Zhdd+QrU&WZ9YldCSYB>H_sC^eEXXZr&Tio=pl$ zMTYH3t<1_B@yDHMH4(Y|BaubYW~fv(K|Amveb9)XwIm0o>8}h03fF`s?39Ec`e+&A zg^TTn=@$_vb`owMPUEGwS)O_-Krp2napxc#IX*>d zD;1U?7e7YmbwHm!&aBQ%WTk?M9jtz;7PxiEY!~t5E}QDeVxAQdId@#+On^)LK(_E1 zqd#t}KuqJVk47dGtJe`KeuXSIN;~zNP6c(2ND{?#lVV`e)hQ0RhT~ztY*b@UH}{j% zHz20;z+RtTm+zH5Y8GG_3!wozuIqUV!NHE^mWvb<_gOsRhf3qoEPjlK6Iq!3 zX}b6by{T@(J`NW7B$G3g0lHC6aR4q?nFrzo-@Bmh_sJ)-MHu86r%REf=mf_{9N5Qt zy1AlqdCr-@*S1G+Ok@GuLHMdFry&fc9l#TL(_@Cq9wo!OZxfYi*wd+~mEpEVcK{do zNtG>OU0NzDGnoFv6AO}|Akq)ZMF>dq8@P1)HZqMC%kXa6OWGs+1hjGMvQxFcO3Rg3 z>~)dlzT9pwl0gW4oG|zZ5;u@HXn3K>b%9biv&jGE@hT9f1!9Oy{@FSxbxS^*zWJL7B~!zHJ&&nnv<@3Hg^ge^(IjgH`Rg8f`MP`EU#S#e}E z!9K;^z*au7lisX?RkljrQ_YkTly&EpD9Xy(q)6s#I|cX|IXj9PPk4Z+Z<+K++10RSKX H00000Q4Zck literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..7f532e3b495ea3917ad7209461f48275b23036a0 GIT binary patch literal 2003 zcmV;^2Q2tEdc3bE8~_ATkz9m12LJ#70001b000000000D$omW68~+C}T>vu}$=Ybg zKS(NMs1ba1$3U6Yg*cVK^I7S$l??jsA_&s9t!8@)(gVV<48xNp`>0y;N6li5^G&s> z(w`j%W2~w}7?mn}+_ZKl)3=&trzk5KXRoy{CCwzz^kCk1f!bmGZ7u<3m4ZSdG|hxe zZeG_eoBUP?3wfQ4t4T_fx(Db%H>W7xUwfi|C}7qc#@hW8bk7TLZEgCxQMccGvO{|K zH*0lR%C;^115Eu$w2pJ61?Yumyp`k7G^JojBS)cDm7F%5@umk@%sT+CID9@d9z+Bz zib@3$9Nq(ie74YI(GtiWrx*zrnCtiZa(vn}%}v36?zukFNQ(f2*6yJ)3A618gpThH z)qjS_>*_nj%ydxOgf{uZHlu07XyO06-$g7=92Z|%7B1TnQX#$JX^nY#P^B0#-3y6V zzaI&9FO7WQgC>iAOG|ObvNxJYdR%<`@k#li*Tc&Ef4Z&%D{&PAgis9{{HCXA0Z48G zDbEf6BMl6ia!I#eJtM^k;iExkBCR}u6|cV;nU*Sw&t2Vr)V1vX(qZ{p3>A|nD6j~L?|D8;Ba!o$u~k5JOb2+` z7Z!$&H8)QEmKgZcFPaZ(RKEp*orZBlcsh-TXD($mq=IfD2?^Q9(Xk%l3N;PKtc4L- zKSC4Wr5Y0z*n4a?z4j~Ai0HX=_!k5OO%-%34I!o%p$D|N3eHhSN%EJFAr(c{#K=FD!L_S2jSHj{qW{i#w8Osoq5sSF` zLuW`+{QpNqFL;2FE~y$(n*0-xuJcP+VuH5>*wOPtKzfWdD;Oi<@vNTBUM69zd_bb( zj)S}|a6@Vmd2gDU#D>4$DV(OSVIlsED6*3wh1t0!JCeP?=#=x{aY|504W&YxEA|_X z4_zsIVa%QNn+bHHjJ4r~2eNQPq4J{){7^TgV^8!FM_Ka4f(F`ecaO>;wzp1^g@6L_ z{^W?~Crv(35YdbkH9M_m#aGb3ZlZp(ORumKo5j>x*=fx=JKh}-E?T8pFc~CUydJHb zhOuq>r)qn&HhiS>8F>7qYE^L)ti(QXlfJAuIqO(WTwg*793dX*PKdUnBW-}mCvTEZ z+Zsc4rgE;-$~f@kJfm04kZe1fL52Z=jk4RFQrz-~L}mr6vITF{@tbdesu}*rxYmId zmkok~_C!?l4fIzu;b#v63Lrp3fA=1mGM00JvmLQ8V;W)bzdzBW+?$2K;p}2W0&rtn zFY0rjGUX)bxkB=8!Whct9Eo9ks;gFAFU*~dJ8)jB(rNnN^pIFn6~_&W;k~P*fq#mU zPSn9K-d_dmU1OesB5MgQT&N+ou!wi<2V7G0CaICB)XV_y*^jrBCOLY zz4J-XARn2VioD(zN+UlT>OjtIzf%lQM4;iFp^Oylljx^8mY=GLe-Wa|$;_13)9UzZ z`=XXkEg9%Jb3)p$K*Fg7mNns}8k7z*eozs`JG%H%D}=ie_1l!NaI~uSb$YC7DoE`# zVg|4|Q-=_T1zRR(0tN?b4eNCPo-tm1hEM)a;e7SLH}4}bJ)@+gzoH;yl(O4}3eg32 zRtc2-;WUeS53;J;+>2+@GAKVO}; zE=g`7o?}-qxMflL#W7TZ=v;hN5}$2(>+7hcGOtS53BE3MprR_-8rzf`+`&>RL<&;5 zS(isjmZ{RGR?%}2Wj-fID6$I(VRg@P$rpDyL5}+Kxb7vHvse6P*(K`!az#r#bmaIg zr#yoyXp0R#pB0SSjW00#>J00AK(0Rjx1000OI0Sc-z$p8Qa0T~Ja z000000000001-0)W&mRVYye*XZ2)rsbO2ufG5|0DG5|9GFaR_FG5|LKFaR(BFaR(B lE&y}@VE}Rf000>P6bbffd4;Zbu5F4G{^cY?e-p8DJukPN{>6t zyxM9p^}y%Imz$^Z{CcOvacSGy+`e8{>x87(O{QEiCt}qNG8ljVGmWlL?$j@`6=hj- zrn|n8=S$1+zL{HBQ&c%XqvQ0%|4$IZjW`vVTncr~Z$ zn$^0M6Q4?}tuEL&|EKgxPd)}l7B&V(&c;jHZmD{=85md@ zCApvx2BZXy88R4h7*ZJG8FCql8A^b(5rY8`8v{ubAZ-q$Ay5w}o5)ZE)FjCu!o|qY T@c6V|UCT{IQ8q>f1+a+#BE!|7 literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..821eccd466b905bf80dd76d301e3fc124198d007 GIT binary patch literal 415 zcmXr7+Ou9=hJnQ+czXF?1_)?^(rr5&Vjpn+XZRY+U?zR)U7?hcI@hH{|VX9u-=XKe36y9`r@mgK= z^qVvOJ3Z`!uVkcLk~?Om_vG~7b(b$TyLM_{2!4_LafRxW1(vtwU$L0A%W0O00?W}Y z=F(>$Rh)cg^IY|L`otoOpquRXPtDo7Q!2Z~VrMA>10xF?10!d{Uj}w=Mg~SjMMegm zE(Qh;E=I`~u51PdRz^uKXixzuL2HH_hJ1z+hIocNhEj%HAf3xl4CWg#7%&(y7z0TY nAZ-q$Ay5w}o5)ZE)GWy$!o|qY(3z=I*K(6ll#P)=0c~w=ef?Nu>KIY#xD?H7A{`BuP2R?L6JT$q}XRFAw=kq--eR=(CO74vLulBns6=WC@#!{a5UT=lgwS4b$YL-h5i2C#V@!ENR z%X&et!W7A;0?+h6y=>SdP@z#{Ca%TQnYqT`ne6*LGs~pv`B}I9lMdLr<@&*Q8Y<2& zZ)Kmm=zG*zDX=J6c*6DG{>8~l((9k;#J-iArSM)<);?cqGixU2SFyvnE90il(vxrI zFZH~&=&sYT105`4=gpT`A98q6Ji~wclG{n%e;td0!%D)P7*n66t_|M4fBHhkM|0*s zVPIfnVPjzAY%FGA=VoMJR8(YS;OSvt;NW6Ba6bDm0|P6gBo{P-fRvyuLoP!KLlQ$g zLo!1?LmopJkd@0&%uoWPjTj7o*ceEf0BLg|4S{+<*+hmSpmq^1MurA`cCEUWn~b7t Jj0_53lL2C#&i?=a literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..d6327182e541ade13aa5222abf35750d251a2f3e GIT binary patch literal 808 zcmXr7+Ou9=hJoetg8Nc+OblSq1)&(U!$oBt$o^+4i)ApIb!t_ya;SWc`v;pf4GED3 zPFw}NN6xeO3pTzCzVOE&Qo+j3&f1x+MAd`aA;Lg7(`TWk$gbB3f&GpeK^m1U1&+-72P{oM(N=j3tpC`{Q7X~gr)tW zzDqwh2?&+>Fo}K;S~tPG_HOT~(>vcC7T!_adivez<83SYm3Do&9kFG-$yUP&eAY^q zyXMb4^mk`O>HcZ!R~2l#@B1*!ESZ0*F#loCL)Tx6w^JPyJ z_UI->*qrlup=n$C;mpR2MHOL8PM@0Bi6kdT)imFi+{mu2_*hp!@lv|(Z%NKV`II#A zIm?be-W(S;VNtSTnv%fr@VEN;dBEflM&i^-*IBGY0>6Kp`hlZ8yrLvlwv<8CmY+kA7+j-06< zy6&Ax-WjNI-HT_}_ z{shjLutetR0|o02U(Tgr8B%SzKN%QV*ccc&o9YGAMv;004SbOXvUq literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..1efc8e0f44b3a75dbc3d87909461e23a1dd9f481 GIT binary patch literal 508 zcmXr7+Ou9=hJnR#wD3-x|PN!h(6vh|7_smk;c6sUS z?2CKUmx+ZoX{}3H7S6(*{FpoH+@H2L9jx);+KcTDuK4j@!HA_-B~bkNPsPaHp^1O5 zyh+zOab;27N|m;bMBf|cEFE>ih6lA2O-^?%KJ6=c`gx80Jhqi0T2tQtbCu_8U#s#| z_wzsTx%Qqf(#P44o&=W|U{3tkkS-apYY)Of!K z|G{VRiVV_o)S1s8-uWr(!@51jM}ACaSLL5sTYpIR_{*)cj?b9TziZvsE0MyB#8S;Q zwyLbU?P(WRHd~VGi)uq-?>~pMJZ-~kAEwqyC$;?Dc5cyb6Fbw-%lcY=B&!RSv`yF~ z({N_`#P!mxK7yLJR6gaJ*qqLru=kpg?w`Ot8>$uW3sxz=I`d-6F5kUe{ol5Hj#gZn zrCgl*e`B24=I5R(9_(4WHRkO@{gb9PTRN2(7+KgD7&#lG7}&WP85k8685wwb7#KLX z7}uLugfK9$GD>nm;{iwsS~KJ`6fvYT#4{u_1>0E|lh7urc#9#o##z4{pNSgy` j2-E}0CIa;^Fi0|pa4|A8R6D8GwcKPBWn*Me0GkW|=Sjg? literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..c45076da977e36b40f0eaf997380aa19e0be62e7 GIT binary patch literal 1175 zcmV;I1Zev=dc3bE8~_BBw@YXB0{{R30001j000000001AoC>$#6aNG4T>v;53HoF+ z&@S2Jrr0QE?D8ym#8}7ee4+>R!;{%NTS~jgD*WP%kP&<&E-J~ zg)?oPJtV|Hp2x^k5>oGD0d%7G5lrFWyF`h|8!gbCElES@8U-MQ*WkOhVMj1v%)sRV zk+gTXUJ8D3Ry;!0i=UyRC&MCp0uV%+qEjb{fPI0h9z1lK#Idx{3S@&+T+bcfIWB;jJYd>>G`|4uN0tle&~folAVu|^swnMKXSge6`*{w zPvz%!B|My(%wa9hZ5fd`8P8JLYm#)fx1j&YmcuYK8O>1!%1mxxO)~MCMq_GDojUV| z+tIJb^5TUsw@rIbA#WuFi7eSe=>gl1@c!|(FTZHUY}jmSk&gJ=pnEVm1S18e)Dow8 zVhEvW|w#99o4T5uC9+LSA8&Qo-7nU%G?7S^J7PQ z9N*2c&DDzi=U4#oLv`djy@ph4ZDRZ@kPxpiB*!^vmQ3 z=X?rNZ*-fZXZP}b00D9WPsUwAq!R9FECbj)E(3TLZfK~_S>AUEN`dSzmk6Y;R63ZA z7ZYE}K1nBx%-{=QWmgq`BP*nP4k8XFYjY+Ft8%DO5$ljIf;>j!1XshYH6p)rmi@<6 zPKKfhNw`AOAOgNMw>6%^ph4`mf`#BPJ^}yav#+)lx8Rk?bl*t>Ef-+1X?oV*FV0lU zaE$l2Uq5E7zWzilLn-7*4O0;f4}=fkBamNqBJO=m+H*rEuB_Y{<4D@NV{A7uwmVq- z7HjQa5iT)n3j^A zhy(Ky75coj}zC}v^%mb>JWpi0l-GvO8yxSA7cNgm`3v^t-Kz)<(ope`gD#* z=Xzbcvc%_rsV+Tqm za+gJSG_X=t8o)P{>6KUE<{K*O^WQ(?1LEI5>x;IQ>SG!SHFc nqTiQ$(3w<5Ls2 z9Bm2JEhoy5lu-Zx0R#pB0SSZk00#>J00AK(0Raq@000OI0okdpP5=M}0T~Ja00000 z0000001-O?Z~$lkZ2(^YZ~$@uWB^|PZ2)rsbO2ufG5|0DG5|9GFaR_FG5|LKFaR(B pFaR(BE&y}@VE}Rf000>P6bbks85zK!3qmpEaG%h8!2O>wK9<2kit|^B@danGb1&^9ou16R zG-dOH-WRSPzF(<~Q_oU-QkR_KE#%G7pRa%Lh4Hz~lbsJsn>053c=3PdJWHwVhj%Qs z@_9LVqq@Ke&4>N_LCL_i_s|REtgeEx%xP=aD`swStTX z4tq7e%q=RaPr83!()g)KzrC!>)qF`C7Iryk4vJUc7JXm@5}_oh!fISir}t zy3Kl)_?_VEl?JBYp5Ds4oU{FAVz9mCh2vW6)j4ZU*z{Wn6wX$D^Wt?l)2vk!S1dfv z!GHd-W>HJwRJM|>x8@&vAbZZ!{MR4?qCBW1d_1qJ|FHLOp~s%bVKNPKKX56e9~0d6*JS;?zy-E7Q-3^( zxgD5pIa}u4m&RoaIvmy~ZoJo?Y#QYdd2n&6p+Ka+;{>nGg^P6r7#LaD7#KMl(;3*g z85tNA6&V?LdKefuxEKZazXD^0l~Ixl8Yw_Z(4L`yA%h{8As$Ef1&|;E07>V` AX#fBK literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..9c2dc3ad8b220f26cf496de8e62e50a8c31e1031 GIT binary patch literal 2890 zcmV-Q3$^q&dc3bE8~_A${EO?T3IG5A0001j000000002Gkw~N9NB;_-T>v?o31RZB zjjL?HEs`wV6&99nWD#rdJ2+Du{Ag@I6IJ|;VedSs$UwB-) z%l9JDE;s%nQzq~*D&e{SX@fcwG?|0{&(VkB4p7mN>%4lB=aL*)BQ2-0rVUAA%2U%Q zn6+v5aBMh!U=m2Hd0R>IZL*&Yj{xUwS~ymP7N``?3R|s7w2p7yT4D%_X|+gkGvW?N zcSq+w>A#((UQ{w;mk81NRs9BgsNE6rh9C^yc;v$<3u}Ouq5-o>y*U07;J(-0>=1BvKgyn=nvSFe1u2PhYU|yawdfzg6f7 z=xyphyn_WGlZ-j8Z^_up>M!}md1PJLj}fwsVrNzTW>7mkCUu;&e%?|;Hxa%#^Rgyg zv*>gow@?g!>XGjPUDgD-1*UiO&hbsWH`2rUW=_{jX8aSn6WV+@7NWc~gt^&*Ryc_- z19a5vuG<|Kbd#H^3|M7Ma$P|>agrZmQ=q5ows`^HnXfgGrn)^O(N}HPh_~*%+EDWj zY5^CHQpyl0GD(LXId$e69$0U_Woy?3a)+YYT$x;w!V14 z!7X20t&+@bx<`sNbp-E5JlbXfOpv9VmL?L!QB;Bx4uW)*0gZjhho_*Z4VHMw$+qFq zJOZ(v_h^~(?g0Qbevy>zY3tL>K2kS}#2sP46O@$`QWgMB*8^>lFUGW>Idih4I<9U;el7ly~j4X;mlx$kfM1s5%-C4$J8>zG127UoB8It z%Kzn%_2(((RK{t&cX)QI^VlJHe*Sq6Dvql5J%abZvtez?yXSFLI(ZEnkh(y-#(MM) zEy)7n28U0>oA*5;5>S4*iKiV6_fxj6Yk5m*`gV5^15N9`RMBbj-pDmoSP`OaAf6SW z9@Byk4t~J#bisL$W;T!3Jh(aE{m8PZqGQFXx2E@%N)`LmPtpA$$^eWJF1Lj$ z?T)QxQ70V)3(bryZn>(*FqU|1vAs|JGnX)T9q|GKfX}>Q7)QFV*#tt@UXNZe-FNy8Og4FtU@u z^>Dci?P;(1dsC`3`*IhSl2(gNkOPWbJznj^kTct1%(`X$Ez$CwvoA-tKjOgi<-iv) z7l?ts4}?Vl)5;R%YVez@-%?KlAbov;^7+Z!0d-AZb*P=yeA`$16pORe!)$q1fB~wW zHpJah#cnPrVMRr}ulPM^LiWZXaY~PnCG51<)?Z|06~`E?RE;`DNrB9>j!S??BiL(y z@1(U4Or!)VY(M)Ea$*Xh5LYgS6}9oOXXW1+nRy%4Nh0|zW>VTj7`ZMdSRtl+gA7xd z6XpVKH87Jl3aRl<&R+I6?0Z?m@kHY-85Y7($`h4KY;3v9$9YRzy|8&2h**F@LSGzT;zfM;&gnp_VQ6%AUzro1V&m(vzM zXf9d5^p}H>cuD=v3(EruAri3gnW+s}T*+;@+ywhm#j|Y<7BEnD(K*^~(=TG}5Q!g% zOT#8c%-`Bby@k<z=tgoanCyaE3QO0 zstAPD!1ZY?V*hRfdD^6#CE^7`5+7SQcmgk1gG1`9&_8%NTnpb;hffzRbAwQITJuhP z;_ifm9l?6!31DMMpdLfEH3$lwze3tRCegv9vN*uI>y-fAQ8Af>&hU~ti7s>at4sSA zSf{aD__30;;tm7pvNRwAypjr*?eBqgmqt7xl@XsLPkS@7iILQRTI=FPeXTxZDzC2O zm{w+7FK)%Pklm&l^5J$w5f`#3D06atW~LFT3NH>p7TGEcUt z(l``eqb;=!cad0L{?R2Q+y@_0vGw^YH$uQi3U{YtO#hZCD0rb6-)C4QSkV?v0W+>@ z_+srBXhn3xz~#n=N5k>N@}nUb{|&6@NJ3vJFJk30FQ3=Mn>dS9Pw$C5+vvCFV}pjb zipp&=G}p>I?ivlXsEGl8$EW)Yz{SCRPNqvH&b@*3;KVS8rxjI~T+}bJa*Vc}b01C+ zpu(hQvJrX+@(-BO(@NvTiV7}1PhnEWO7ju-qIEh>ThFrQzEZb>58XotGsAO-!{azn zpyT4>Y;3A2i(ispT?a~FZLwo+euoD(J$0^XgP3Sv20l>6z*@6uEQ%<)^wayu1C1z> zwwQdT19~dpQM|rS#b23lMWs<6Ii1NP!?X~>%~R)f%KuVCnhMj=4SP0p2E3138tgIe zURopdqGg=?_*lRGt>Ji`_L9pS@^X!218qr!_9WUvDB3b|EZ%@6EAHe1x zQDKy^y0aZM%h|a+KxnCkzcl~Tm=yF(S3wV<$?792rwMdbFdQ7q3;Ta{Vy4hGcwsM-8S zRujJ1i+8MoUSI$L1O@;B35uuy2MYlJ0U;p)1q{FdNB{^50hzg-rT_p10T~GZ00000 z000005jOyH0Bith0B`_b0Av7R0CWIh0ABzy05AYD05bqE05kwH05gxLWV1_1yd000000O-hNQ~&?~ literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..3d741b23b8d06131ede9740849253dd80d84c9a3 GIT binary patch literal 928 zcmV;R17G|%dc3bE8~_9u_yv?q3L&ZC zEPpun`r`{FYty&VXAAX8Wl4QlPT~VK>FX_(9eL%0^Ps4)<4wI!%pbk`U-(aQy=$&^ z7(B_}8$5lDpAZxK(b}Ht%{Wi~Tnf78a}~Pef1$)d5OLTz$7IsngT=hX!p9@Q$Bl2( ziflrX0zrW-rXCa20O|a_mp`}~gFT#d0MW1zSZBwPE+L2i!KPe*6YfxTZ7w3|31vFq`rE3ch`ju1GoC1T>29ehZ(Mv*1;!uvGJNqZlOP zap1I=$nO4Zxun%M%o=LYlHo%{<9>k`%WLds9o(8)?ee8wu&=enC}X3`NnJU7i*exf zZrvS*1I6?jMRZYLX-~~84647yEm-OFJUX;phqoigF&ne$%v=IPRPd!f7FYVpI+w~Z z(zmR1A0V1)3RFY4C6z zX4Au(7!uHV8zW6!VjeuSWjmxfk#H&3`N-q9FK$OZ42fZY9*WTKaRpv{0k2h@E009IB009Yt_5cS90RRCZAprplm;e9>3IXkjO9ub|1pyff z0000000000000p~0CNCz0B-2RCEI`hR=_8SqAU;jL6 z`^6qDv**&Mdim$7XYQDO2|F$iP<(wB1!j=2r+Iz7W27%PL`g>BhWTs{r zON(nxNU)wgzacPrddNvjEe7?9MXYt7%Tl$=o}{*=KfQ8BUf4Rt<4Z_v*`76BmBnY< z-gzaRVd7jU!My+FtAmdZ;su5c8))Tk3&yHM)C_d1E{1xp?si{=XVQ=YJyhu?2Q za@UKhkUy_}ur%!2Uv%*7ru17;nzyC*38w!!FMOYE#Y^{t`@WnxzPDxnv~>5H+WW_& zr}0HvIX|^{WO;dSh|yw^*+y#?PPpFnzLxRVo2a&J6z1Z^2g7>XDY8S)t78L}CYz;rG{F;Fy~!HB^Ch>d}y36M4i(h#TzluZQcVqg&A WVq|FOQI)7`xydNX#>k)mHW>hN!MgAO literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..a698e6313afebab9adfe57be053e1b8ed0a6bfc0 GIT binary patch literal 398 zcmXr7+Ou9=hJnStwbkYc0|azI>0b@Mwm;zf&u}M}!CLxMTFE2HS%2)euIm`^&-1yS))~!mbX=>gF79|?`E?bj+XSw(Jb9(z@4VY|C zupUZC+IL~!`N>+6pRVl;>{uk)W!t-*>rJgY%frgtMa&frdzwS7GH)Ldd%fXhPjr&> z!^n#L+r@Upe!6$W@mOKIOXJpx;$0ulPT5fWadrO;=j{n=kDl?#Q2({D>`%wS2#u}k z>F2V}ZZYwk6R>s5jB8z29}BIjIh**cY0vo;!Atk*?GwMFc!`X(gaAG18E41XV7CPVMqk3V_=YEVqg&A WVq|DAIxJe(a+6V%jgdhCYy$ue&yYL- literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..88998e266c3d4bd6bff2f4cb86e957988f50d879 GIT binary patch literal 1305 zcmV+!1?KuUdc3bE8~_Aw7im#%1ONa40001r000000001nHI0to7ykrjT>v|p3e0h| zE7|hh_V3-Wz~ik>d2)l$BqOQ2auwXBN$KZk&pb1F-Mhpti{42yhP&J*#uB5A6*le2 zeGf%!{Uyo><>`Yg_(Ib@XG7x5%2$HV)Ue)>rfe||TTFI#c?jI_Q#^}Cwny}`$G-H$ z@i^##6oTm>dh0e=BIpfWf$(*FqvYHXRUD8fcG6NGuU<`gRU{6Z<;ShTHd^H>@$-wp zVHKFCFc5PXB_VTPu08>z_Go8TNYPal}OT=)=0|FlXBOP2V_i6 zX@L`SgSd{|*Eh=~1{a?bAE&;SLws}OC^9=090;M4_H9=vL)U?jiX-~Y^>|Ew=)%dR z3u90k=D<`(L1V#JKP8%dKehLp7*vE}#sm8>2C}K(uf9 z;w^+X4Glz#=LoP^CMLQR8- z!Yw`InaenSWae>Ik=08-W(v?h9U@kNSoxbKmbKV0R{$R8p|kU8;@$sgCn(jog|4z{ zk9reeM=5{21_B4T`w9zXl9j)OiZe*vTmdnfiv=M(xRI$5&7c%;2qXy~*KXi|HtVG& zUQbqQ{umzZ9Ry*p^PA|FJo_hBA1SbBUKNlfSUa-Y2*ZS`=`26_=FbbmE@}r9p&qpt zx4TA%U9QZSzo(w$)*q;L7eyGe7Ewo|_e!30F{p38SH{esHl2!g+~ zW+qZQKDks8E_-GB9u-iy_j4;nvSW8&2FB|2V3kIoQtn)?p`K3XH{T6$isg{N;TmSS zB3pAl>8zBpD^zZfHprBd2oKHopS{RCuLrv1DLBmgloJiAP{62L17d|u@ZfaPK@u)A z4y;3ll*OI~ztuNK(dg$(dXhM76trHE?YtV8tq31Ywi()xdPbwP@+M=SN>fz)o<^sC zRAdEw8-BdJul7{L&;0p8W{8P7!4xVoluUr6|EZ(Pka*v*vxP)YIlfMbE2&u5Ba5bn zGN<7$TfWww=1VaHjWcTGGC%tKe^DP*n6G_A+yDUt1^@vGgl_-`3jqKDAt3<)4441_ z2nqqWr8oxw00jXV3IG5A000000000HMF4jIXaI8nUjSnObpUe!bO3JvZ2)BeasXcd zZ2)rsbO2ufG5|0DG5|9GFaR_FG5|LKFaR(BFaR(BE&y}@VE}Rf000yU0RRB#We|RZ P*#Q*>0RSKX00000NnT7o literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz new file mode 100644 index 0000000000000000000000000000000000000000..a97fdb31ea2712b7afc6e043b35a28ea0bbced1d GIT binary patch literal 589 zcmXr7+Ou9=hJhvN(o>zyj0|8<1)&&rhh7(a!26$Zbu5GREUq&}0vD&fIQ9A0Yi`Na zfB!8m*ivNZb>6)_dtqhHmzzRCRxYNB{;P$;Ro2cv!m#^6(lgr)d0tP<#Fx)^*&pe1 zY%81QMn8w6>-I0Wy!+VWIpQbpGJBZa5ZkrDUgY2ZC4S24J_3@g5nr$Ng!<1p@Y^JG z<8B`N6Z~RHPWHtyoVUf9SKQQYyQ8>xXR3_F1(giO$LpjPX|||~&3N$oOV?>xalf3U zDFG|Ldi|5_-|h5_{hah4Kle2`ebSwW@3efK{bafKRA4*4zAYwUhkmL)i_NGz=#+ zS^TfNSDp1Kww8hY)e+|vzJ?l+)|%TY9$9syeM(tU-0oz&Eo-KK_SM+kxB0*AJGaC4 znEsUK65C7FZ>;#G$nyK)117E{7Z$GhuM5~ZgHO+2zu@z>Em~J!t`aksTj514$DgZ4RU%P!A}Z$WR2-B*`Gc1xzb_ S%zSk%HyK6Q7#S46CISG2=j=uR literal 0 HcmV?d00001 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv new file mode 100644 index 00000000..3df124c1 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv @@ -0,0 +1,3 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"123456701" "01" "1" "12345.6" "yyyy0401" "20190930" "10" "0" "18-03-07 09:48:00" "19-09-19 11:23:47" +"123456701" "02" "1" "12587.8" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv new file mode 100644 index 00000000..20ebb14d --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv @@ -0,0 +1,2 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"123456701" "01" "1" "12345.6" "202304 \ No newline at end of file From 0bdcc1fc4afe44302f6d94f37d2ae4db8cf649b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Wed, 7 Jun 2023 15:49:20 +0900 Subject: [PATCH 51/86] =?UTF-8?q?feat:=20=E9=96=8B=E7=99=BA=E4=B8=AD?= =?UTF-8?q?=E3=82=B3=E3=83=9F=E3=83=83=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/laundering/sales_laundering.py | 2 + .../laundering/sales_results_laundering.py | 139 +++++ .../src05/hco_to_mdb_laundering.sql | 98 +++ .../src05/inst_merge_laundering.sql | 52 ++ .../src05/sales_lau_delete.sql | 37 ++ .../src05/sales_lau_upsert.sql | 568 ++++++++++++++++++ .../src05/v_inst_merge_laundering.sql | 71 +++ .../src05/whs_org_laundering.sql | 118 ++++ 8 files changed, 1085 insertions(+) create mode 100644 ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py create mode 100644 rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql create mode 100644 rds_mysql/stored_procedure/src05/inst_merge_laundering.sql create mode 100644 rds_mysql/stored_procedure/src05/sales_lau_delete.sql create mode 100644 rds_mysql/stored_procedure/src05/sales_lau_upsert.sql create mode 100644 rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql create mode 100644 rds_mysql/stored_procedure/src05/whs_org_laundering.sql diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index f6d682b4..3862177e 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -20,6 +20,8 @@ def exec(): emp_chg_inst_laundering.exec() # 納入先処方元マスタ洗替 ult_ident_presc_laundering.exec() + # 卸販売洗替 + # # 並列処理のテスト用コード # import time diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py new file mode 100644 index 00000000..979f1042 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -0,0 +1,139 @@ +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.batch.batch_functions import logging_sql + +logger = get_logger('卸実績洗替') + + +def exec(): + db = Database.get_instance() + try: + db.connect() + logger.debug('処理開始') + # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 + _call_sales_lau_delete(db, 'sales_lau', 5) + # 卸販売実績テーブル(洗替後)作成 + _call_sales_lau_upsert(db, 'sales_lau', '', '') + # 1:卸組織洗替 + _call_whs_org_laundering(db, 'sales_lau') + # 3:HCO施設コードの洗替 + _update_sales_lau_from_vop_hco_merge_v(db, 'sales_lau') + # 4:メルク施設コードの洗替 + _update_mst_inst_laundering(db, 'sales_lau') + logger.debug('処理終了') + except Exception as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + +def _call_sales_lau_delete(db: Database, target_table: str, set_year: int): + # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 + logger.info('sales_lau_delete(プロシージャ―) 開始') + db.execute(f'CALL src05.sales_lau_delete("{target_table}", {set_year})') + logger.info('sales_lau_delete(プロシージャ―) 終了') + return + + +def _call_sales_lau_upsert(db: Database, target_table: str, extract_from_date: str, + extract_to_date: str): + # 卸販売実績テーブル(洗替後)作成 + logger.info('sales_lau_delete(プロシージャ―) 開始') + db.execute(f'CALL src05.sales_lau_delete("{target_table}", "{extract_from_date}", "{extract_to_date}")') + logger.info('sales_lau_delete(プロシージャ―) 終了') + return + + +def _call_whs_org_laundering(db: Database, target_table: str): + # 卸組織洗替 + logger.info('whs_org_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.whs_org_laundering("{target_table}")') + logger.info('whs_org_laundering(プロシージャ―) 終了') + return + + +def _update_sales_lau_from_vop_hco_merge_v(db: Database, target_table: str): + # HCO施設コードの洗替 + if _count_vop_hco_merge_v(db) >= 1: + _call_v_inst_merge_laundering(db, target_table) + return + logger.info('V施設統合マスタにデータは存在しません') + return + + +def _count_vop_hco_merge_v(db: Database) -> int: + # V施設統合マスタのデータ件数の取得 + try: + sql = """ + SELECT + COUNT(v_inst_cd) AS cnt + FROM + src05.vop_hco_merge_v + """ + result = db.execute_select(sql) + logging_sql(logger, sql) + logger.info('V施設統合マスタのデータ件数の取得 成功') + except Exception as e: + logger.debug('V施設統合マスタのデータ件数の取得 失敗') + raise e + + return result[0]['cnt'] + + +def _call_v_inst_merge_laundering(db: Database, target_table: str): + # HCO施設コードの洗替(テーブル更新) + logger.info('v_inst_merge_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.v_inst_merge_laundering("{target_table}")') + logger.info('v_inst_merge_laundering(プロシージャ―) 終了') + return + + +def _update_mst_inst_laundering(db: Database, target_table: str): + # メルク施設コードの洗替 + _call_hco_to_mdb_laundering(db, target_table) + _update_sales_lau_from_dcf_inst_merge(db, target_table) + + +def _call_hco_to_mdb_laundering(db: Database, target_table: str): + # A:医療機関のデータはMDB変換表からHCO⇒DCFへ変換 + logger.info('hco_to_mdb_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.hco_to_mdb_laundering("{target_table}")') + logger.info('hco_to_mdb_laundering(プロシージャ―) 終了') + return + + +def _update_sales_lau_from_dcf_inst_merge(db: Database, target_table: str): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする + if _count_dcf_inst_merge(db) >= 1: + _call_inst_merge_laundering(db, target_table) + return + logger.info('DCF施設統合マスタにデータは存在しません') + return + + +def _count_dcf_inst_merge(db: Database) -> int: + # DCF施設統合マスタのデータ件数の取得 + try: + sql = """ + SELECT + COUNT(dcf_inst_cd) AS cnt + FROM + src05.dcf_inst_merge + """ + result = db.execute_select(sql) + logging_sql(logger, sql) + logger.info('DCF施設統合マスタのデータ件数の取得 成功') + except Exception as e: + logger.debug('DCF施設統合マスタのデータ件数の取得 失敗') + raise e + + return result[0]['cnt'] + + +def _call_inst_merge_laundering(db: Database, target_table: str): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(テーブル更新) + logger.info('inst_merge_laundering(プロシージャ―) 開始') + db.execute(f'CALL src05.inst_merge_laundering("{target_table}")') + logger.info('inst_merge_laundering(プロシージャ―) 終了') + return diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql new file mode 100644 index 00000000..03402001 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -0,0 +1,98 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.hco_to_mdb_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'hco_to_mdb_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'hco_to_mdb_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A① 開始'); + + TRUNCATE TABLE internal05.hco_cnv_mdb_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A① 終了'); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A② 開始'); + + INSERT INTO + internal05.hco_cnv_mdb_t ( + hco_vod_v, + mdb_cd, + form_inst_name_kana, + form_inst_name_kanji, + inst_addr, + prefc_cd, + delete_flg, + abolish_ymd, + start_date + ) + SELECT + mcmv.hco_vid_v, + mcmv.mdb_cd, + ci.form_inst_name_kana, + ci.form_inst_name_kanji, + ci.inst_addr, + ci.prefc_cd, + ci.delete_flg, + ci.abolish_ymd, + mcmv.start_date + FROM + src05.mdb_cnv_mst_v AS mcmv + INNER JOIN ( + SELECT + hco_vid_v,MAX(sub_num) AS sno + FROM + src05.mdb_cnv_mst_v + WHERE + rec_sts_kbn != '9' + AND src05.get_syor_date() >= START_DATE + GROUP BY hco_vid_v + ) AS mcmv2 + ON mcmv.hco_vid_v = mcmv2.hco_vid_v + AND mcmv.sub_num = mcmv2.sno + LEFT OUTER JOIN src05.com_inst AS ci + ON mcmv.mdb_cd = ci.dcf_dsf_inst_cd + AND ci.delete_flg = '0' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A② 終了'); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A③ 開始'); + + UPDATE src05.@target_table AS tt, internal05.hco_cnv_mdb_t AS hcmt + SET + tt.inst_cd = hcmt.mdb_cd, + tt.inst_name_kana = hcmt.form_inst_name_kana, + tt.inst_name = hcmt.form_inst_name_kanji, + tt.address = hcmt.inst_addr, + tt.pref_cd = hcmt.prefc_cd + WHERE + tt.v_inst_cd = hcmt.hco_vid_v + AND tt.inst_clas_cd = '1' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_A③ 終了'); + +END diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql new file mode 100644 index 00000000..92ff79a9 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -0,0 +1,52 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.inst_merge_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_B① 開始'); + + UPDATE ( + SELECT + dcf_dsf_inst_cd, + dup_opp_cd, + form_inst_name_kanji, + form_inst_name_kana, + inst_addr, + prefc_cd + FROM + internal05.inst_merge_t + ) AS imt, + src05.@target_table AS tt + SET + tt.inst_cd = imt.dup_opp_cd, + tt.inst_name = imt.form_inst_name_kanji, + tt.inst_name_kana = imt.form_inst_name_kana, + tt.address = imt.inst_addr, + tt.prefc_cd = imt.prefc_cd + WHERE + tt.inst_cd = imt.dcf_dsf_inst_cd + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】4:メルク施設コードの洗替_B① 終了'); + diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql new file mode 100644 index 00000000..652db718 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -0,0 +1,37 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.sales_lau_delete(@target_table VARCHAR(64), @laundering_period_year INT) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_delete'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'sales_lau_deleteでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 開始'); + + DELETE FROM + src05.@target_table + WHERE + kjyo_ym < DATE_FORMAT((src05.get_syor_date() - INTERVAL @laundering_period_year YEAR), '%Y%m') + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); + +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql new file mode 100644 index 00000000..270495e7 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -0,0 +1,568 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.sales_lau_upsert(@target_table VARCHAR(64), @extract_from_date DATETIME, + @extract_to_date DATETIME) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_upsert'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'sales_lau_upsertでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成① 開始' + ); + + TRUNCATE TABLE internal05.bu_prd_name_contrast_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成① 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成② 開始' + ); + + INSERT INTO + internal05.bu_prd_name_contrast_t ( + prd_cd, + bu_cd, + phm_itm_cd, + pp_start_date, + pp_end_date, + update_date, + bp_start_date, + bp_end_date + ) + SELECT + ppmv.prd_cd, + bpnc.bu_cd, + ppmv.phm_itm_cd, + ppmv.start_date AS pp_start_date, + ppmv.end_date AS pp_end_date, + bpnc.update_date AS update_date + bpnc.start_date AS bp_start_date, + bpnc.end_date AS bp_end_date + FROM + src05.phm_prd_mst_v AS ppmv + LEFT OUTER JOIN src05.bu_prd_name_contrast AS bpnc + ON ppmv.phm_itm_cd = bpnc.phm_itm_cd + WHERE + ppmv.rec_sts_kbn != '9' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成② 終了' + ); + + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成③ 開始' + ); + + TRUNCATE TABLE internal05.fcl_mst_v_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成③ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成④ 開始' + ); + + INSERT INTO + internal05.internal05.fcl_mst_v_t + SELECT + v_inst_cd, + sub_num, + start_date, + end_date, + closed_dt, + fcl_name, + fcl_kn_name, + fcl_abb_name, + mkr_cd, + jsk_proc_kbn, + fmt_addr, + fmt_kn_addr, + postal_cd, + prft_cd, + prft_name, + city_name, + addr_line_1, + tel_num, + admin_kbn, + fcl_type, + rec_sts_kbn, + ins_dt, + upd_dt, + dwh_upd_dt + FROM + src05.fcl_mst_v AS fmv1 + INNER JOIN ( + SELECT + fmv.v_inst_cd, + MAX(fmv.sub_num) AS sno + FROM + src05.fcl_mst_v AS fmv + GROUP BY + fmv.v_inst_cd + ) AS fmv2 + ON fmv1.v_inst_cd = fmv2.v_inst_cd + AND fmv1.sub_num = fmv2.sno + WHERE + fmv1.rec_sts_kbn != '9' + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成④ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑤ 開始' + ); + + INSERT INTO + src05.@target_table ( + rec_whs_cd, + rec_whs_sub_cd, + rec_whs_org_cd, + rec_cust_cd, + rec_comm_cd, + rec_tran_kbn, + rev_hsdnymd_wrk, + rev_hsdnymd_srk, + rec_urag_num, + rec_qty, + rec_nonyu_price, + rec_nonyu_amt, + rec_comm_name, + rec_nonyu_fcl_name, + free_item, + rec_nonyu_fcl_addr, + rec_nonyu_fcl_post, + rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd, + rec_bef_slip_num, + rec_ymd, + sale_data_cat, + slip_file_name, + slip_mgt_num, + row_num, + hsdn_ymd, + exec_dt, + v_tran_cd, + tran_kbn_name, + whs_org_cd, + v_whsorg_cd, + whs_org_name, + whs_org_kn, + v_whs_cd, + whs_name, + nonyu_fcl_cd, + inst_name, + inst_name_kana, + address, + comm_cd, + comm_name, + nonyu_qty, + nonyu_price, + nonyu_amt, + shikiri_price, + shikiri_amt, + nhi_price, + nhi_amt, + v_inst_cd, + inst_clas_cd, + bu_cd, + item_cd, + item_name, + item_english_name, + pref_cd, + whspos_err_kbn, + htdnymd_err_kbn, + prd_exis_kbn, + fcl_exis_kbn, + bef_hsdn_ymd, + bef_slip_num, + slip_org_kbn, + kjyo_ym, + tksnbk_kbn, + fcl_exec_kbn, + rec_sts_kbn, + ins_dt, + ins_usr, + dwh_upd_dt + ) + SELECT + s.rec_whs_cd, + s.rec_whs_sub_cd, + s.rec_whs_org_cd, + s.rec_cust_cd, + s.rec_comm_cd, + s.rec_tran_kbn, + s.rev_hsdnymd_wrk, + s.rev_hsdnymd_srk, + s.rec_urag_num, + s.rec_qty, + s.rec_nonyu_price, + s.rec_nonyu_amt, + s.rec_comm_name, + s.rec_nonyu_fcl_name, + s.free_item, + s.rec_nonyu_fcl_addr, + s.rec_nonyu_fcl_post, + s.rec_nonyu_fcl_tel, + s.rec_bef_hsdn_ymd, + s.rec_bef_slip_num, + s.rec_ymd, + s.sale_data_cat, + s.slip_file_name, + s.slip_mgt_num, + s.row_num, + s.hsdn_ymd, + s.exec_dt, + s.v_tran_cd, + s.tran_kbn_name, + s.whs_org_cd, + s.v_whsorg_cd, + s.whs_org_name, + s.whs_org_kn, + s.v_whs_cd, + s.whs_name, + s.nonyu_fcl_cd, + s.v_inst_name, + s.v_inst_kn, + s.v_inst_addr, + s.comm_cd, + s.comm_name, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END AS nonyu_qty, + s.nonyu_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END AS nonyu_amt, + s.shikiri_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END AS shikiri_amt, + s.nhi_price, + CASE + WHEN + (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END AS nhi_amt, + s.v_inst_cd, + CASE + WHEN + (fmvt.fcl_type = 'A1' or fmvt.fcl_type = 'A0') THEN '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' THEN '2' + ELSE + '1' + END AS inst_clas_cd, + bpnct.bu_cd, + ppmv.mkr_cd, + ppmv.mkr_inf_1, + ppmv.mkr_inf_2, + CASE + WHEN + s.v_inst_cd LIKE '00%' + THEN + ci.prefc_cd + ELSE + fmvt.prft_cd + END AS prft_cd, + s.whspos_err_kbn, + s.htdnymd_err_kbn, + s.prd_exis_kbn, + s.fcl_exis_kbn, + s.bef_hsdn_ymd, + s.bef_slip_num, + s.slip_org_kbn, + s.kjyo_ym, + s.tksnbk_kbn, + s.fcl_exec_kbn, + s.rec_sts_kbn, + s.ins_dt, + s.ins_usr, + SYSDATE() + FROM ( + SELECT + CAST(SYSDATE() AS DATE) AS today + ) AS sub + INNER JOIN src05.sales AS s + ON sub.today = CAST(s.dwh_upd_dt AS DATE) + LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv + ON s.comm_cd = ppmv.prd_cd + AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date + AND ppmv.rec_sts_kbn != '9' + LEFT OUTER JOIN src05.fcl_mst_v_t AS fmvt + ON s.v_inst_cd = fmvt.v_inst_cd + LEFT OUTER JOIN src05.bu_prd_name_contrast_t AS bpnct + ON s.comm_cd = bpnct.prd_cd + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.pp_start_date AND bpnct.pp_end_date + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date + LEFT OUTER JOIN src05.com_inst AS ci + ON s.v_inst_cd = ci.dcf_dsf_inst_cd + WHERE + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + OR ( + s.rec_sts_kbn = '0' + AND s.err_flg20 != 'M' + AND s.v_tran_cd IN (110, 120, 210, 220) + AND ( + ( + s.fcl_exec_kbn NOT IN ('2', '5') + AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) + ) + OR s.fcl_exec_kbn IS NULL + ) + ) + + ON DUPLICATE KEY UPDATE + rec_whs_cd = s.rec_whs_cd, + rec_whs_sub_cd = s.rec_whs_sub_cd, + rec_whs_org_cd = s.rec_whs_org_cd, + rec_cust_cd = s.rec_cust_cd, + rec_comm_cd = s.rec_comm_cd, + rec_tran_kbn = s.rec_tran_kbn, + rev_hsdnymd_wrk = s.rev_hsdnymd_wrk, + rev_hsdnymd_srk = s.rev_hsdnymd_srk, + rec_urag_num = s.rec_urag_num, + rec_qty = s.rec_qty, + rec_nonyu_price = s.rec_nonyu_price, + rec_nonyu_amt = s.rec_nonyu_amt, + rec_comm_name = s.rec_comm_name, + rec_nonyu_fcl_name = s.rec_nonyu_fcl_name, + free_item = s.free_item, + rec_nonyu_fcl_addr = s.rec_nonyu_fcl_addr, + rec_nonyu_fcl_post = s.rec_nonyu_fcl_post, + rec_nonyu_fcl_tel = s.rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd = s.rec_bef_hsdn_ymd, + rec_bef_slip_num = s.rec_bef_slip_num, + rec_ymd = s.rec_ymd, + sale_data_cat = s.sale_data_cat, + slip_file_name = s.slip_file_name, + row_num = s.row_num, + hsdn_ymd = s.hsdn_ymd, + exec_dt = s.exec_dt, + v_tran_cd = s.v_tran_cd, + tran_kbn_name = s.tran_kbn_name, + whs_org_cd = s.whs_org_cd, + v_whsorg_cd = s.v_whsorg_cd, + whs_org_name = s.whs_org_name, + whs_org_kn = s.whs_org_kn, + v_whs_cd = s.v_whs_cd, + whs_name = s.whs_name, + nonyu_fcl_cd = s.nonyu_fcl_cd, + inst_name = s.v_inst_name, + inst_name_kana = s.v_inst_kn, + address = s.v_inst_addr, + comm_cd = s.comm_cd, + comm_name = s.comm_name, + nonyu_qty = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END + ), + nonyu_price = s.nonyu_price, + nonyu_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END + ), + shikiri_price = s.shikiri_price, + shikiri_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END + ), + nhi_price = s.nhi_price, + nhi_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END + ), + v_inst_cd = s.v_inst_cd, + inst_clas_cd = ( + CASE + WHEN + (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') + THEN + '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' + THEN + '2' + ELSE + '1' + END + ), + bu_cd = bpnct.bu_cd, + item_cd = ppmv.mkr_cd, + item_name = ppmv.mkr_inf_1, + item_english_name = ppmv.mkr_inf_2, + pref_cd = ( + CASE + WHEN + s.v_inst_cd LIKE '00%' + THEN + ci.prefc_cd + ELSE + fmvt.prft_cd + END + ), + whspos_err_kbn = s.whspos_err_kbn, + htdnymd_err_kbn = s.htdnymd_err_kbn, + prd_exis_kbn = s.prd_exis_kbn, + fcl_exis_kbn = s.fcl_exis_kbn, + bef_hsdn_ymd = s.bef_hsdn_ymd, + bef_slip_num = s.bef_slip_num, + slip_org_kbn = s.slip_org_kbn, + kjyo_ym = s.kjyo_ym, + tksnbk_kbn = s.tksnbk_kbn, + fcl_exec_kbn = s.fcl_exec_kbn, + rec_sts_kbn = s.rec_sts_kbn, + ins_dt = s.ins_dt, + ins_usr = s.ins_usr, + dwh_upd_dt = SYSDATE() + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑤ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑥ 開始' + ); + + UPDATE ( + SELECT + fmvt.v_inst_cd AS v_inst_cd, + fmvt.fcl_type AS fcl_type, + fmvt.prft_cd AS prft_cd, + ci.prefc_cd AS prefc_cd + FROM + (SELECT CAST(SYSDATE() AS DATE) AS today) AS sub + INNER JOIN src05.fcl_mst_v_t AS fmvt + ON sub.today = CAST(fmvt.dwh_upd_dt AS DATE) + LEFT OUTER JOIN src05.com_inst AS ci + ON fmvt.v_inst_cd = ci.dcf_dsf_inst_cd + ) AS t3t5, + src05.sales_lau AS sl + SET + sl.inst_clas_cd = ( + CASE + WHEN + (t3t5.fcl_type = 'A1' OR t3t5.fcl_type = 'A0') + THEN + '3' + WHEN + t3t5.fcl_type BETWEEN '20' AND '29' + THEN + '2' + ELSE + '1' + END + ), + sl.pref_cd = ( + CASE + WHEN + sl.v_inst_cd LIKE '00%' + THEN + t3t5.prefc_cd + ELSE + t3t5.prft_cd + END + ) + WHERE sl.v_inst_cd = t3t5.v_inst_cd + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑥ 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑦ 開始' + ); + + UPDATE + ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, + src05.sales_lau AS sl, + src05.sales AS s + SET + sl.inst_cd = ( + CASE + WHEN + (s.err_flg20 != 'M' AND sl.inst_clas_cd IN ('2', '3')) OR (s.err_flg20 = 'M') + THEN + s.v_inst_cd + ELSE + NULL + END + ) + WHERE + sub.today = CAST(s.dwh_upd_dt AS DATE) + AND sl.slip_mgt_num = s.slip_mgt_num + AND sl.row_num = s.row_num + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '卸販売実績テーブル(洗替後)作成⑦ 終了' + ); + +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql new file mode 100644 index 00000000..4ef2075e --- /dev/null +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -0,0 +1,71 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.v_inst_merge_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'v_inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】3:HCO施設コードの洗替① 開始' + ); + + UPDATE ( + SELECT + v_inst_cd, + v_inst_cd_merge, + fcl_name, + fcl_kn_name, + fmt_addr, + prft_cd + FROM + internal05.v_inst_merge_t + WHERE + (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' + ) AS vimt, + src05.@target_table AS tt + SET + tt.inst_cd = ( + CASE + WHEN + tt.inst_clas_cd = '1' + THEN + tt.inst_cd + WHEN + (tt.inst_clas_cd = '2' OR tt.inst_clas_cd = '3') + THEN + vimt.v_inst_cd_merge + END + ) + tt.v_inst_cd = vimt.v_inst_cd_merge, + tt.inst_name = vimt.fcl_name, + tt.inst_name_kana = vimt.fcl_kn_name, + tt.address = vimt.fmt_addr, + tt.prefc_cd = vimt.prft_cd, + tt.dwh_upd_dt = SYSDATE() + WHERE + tt.v_inst_cd = vimt.v_inst_cd + AND (tt.inst_clas_cd IN ('1', '2', '3')) + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】3:HCO施設コードの洗替① 終了' + ); + +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql new file mode 100644 index 00000000..e3ce53c7 --- /dev/null +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -0,0 +1,118 @@ +-- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する +CREATE PROCEDURE src05.whs_org_laundering(@target_table VARCHAR(64)) +SQL SECURITY INVOKER +BEGIN + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'whs_org_laundering'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'whs_org_launderingでエラーが発生', @error_state, @error_msg); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; + + SET @error_state = NULL, @error_msg = NULL; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替① 開始' + ); + + TRUNCATE TABLE internal05.whs_customer_org_t; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替① 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替② 開始' + ); + + INSERT INTO + internal05.whs_customer_org_t ( + whs_cd, + whs_sub_cd, + customer_cd, + whs_org_cd, + v_org_cd, + nm_2 + ) + SELECT + wcmv.whs_cd, + wcmv.whs_sub_cd, + wcmv.customer_cd, + wcmv.whs_org_cd, + ocmv.v_org_cd, + mohv2.name_2 + FROM + src05.whs_customer_mst_v AS wcmv + LEFT OUTER JOIN src05.org_cnv_mst_v AS ocmv + ON wcmv.whs_cd = ocmv.whs_cd + AND wcmv.whs_sub_cd = ocmv.whs_sub_cd + AND wcmv.whs_org_cd = ocmv.org_cd + AND src05.get_syor_date() BETWEEN ocmv.start_date AND ocmv.end_date + AND ocmv.rec_sts_kbn != '9' + LEFT OUTER JOIN ( + SELECT + mohv.v_cd_2, + mohv.name_2 + FROM src05.mkr_org_horizon_v AS mohv + INNER JOIN ( + SELECT + count(1) AS c, + v_cd_2, + MAX(dwh_upd_dt) AS dwh_upd_dt_latest + FROM + src05.mkr_org_horizon_v + WHERE + rec_sts_kbn != '9' + AND src05.get_syor_date() BETWEEN start_date AND end_date + GROUP BY + v_cd_2 + ORDER BY + MAX(start_date) DESC + ) AS m_latest + ON mohv.v_cd_2 = m_latest.v_cd_2 + AND mohv.dwh_upd_dt = m_latest.dwh_upd_dt_latest + WHERE + mohv.rec_sts_kbn != '9' + AND src05.get_syor_date() BETWEEN mohv.start_date AND mohv.end_date + ) AS mohv2 + ON ocmv.v_org_cd = mohv2.v_cd_2 + WHERE + wcmv.rec_sts_kbn != '9' + AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替② 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 開始' + ); + + UPDATE + src05.sales_lau AS sl, src05.@target_table AS tt + SET + sl.whs_org_cd = tt.whs_org_cd, + sl.v_whsorg_cd = tt.v_org_cd, + sl.whs_org_name = tt.nm_2 + WHERE + st.whs_cd = tt.whs_cd + AND st.whs_sub_cd = tt.whs_sub_cd + AND st.customer_cd = tt.customer_cd + ; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 終了' + ); + +END \ No newline at end of file From 5f4efd451def5911198a8826a117cec361e6ee87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Mon, 12 Jun 2023 10:30:52 +0900 Subject: [PATCH 52/86] =?UTF-8?q?feat:=2048-1.=E5=8D=B8=E5=AE=9F=E7=B8=BE?= =?UTF-8?q?=E6=B4=97=E6=9B=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch-daily/.env.example | 6 + .../src/batch/laundering/sales_laundering.py | 6 +- .../laundering/sales_results_laundering.py | 85 +- .../src/system_var/environment.py | 7 + .../src05/hco_to_mdb_laundering.sql | 40 +- .../src05/inst_merge_laundering.sql | 57 +- .../src05/sales_lau_delete.sql | 20 +- .../src05/sales_lau_upsert.sql | 864 +++++++++--------- .../src05/v_inst_merge_laundering.sql | 86 +- .../src05/whs_org_laundering.sql | 59 +- 10 files changed, 629 insertions(+), 601 deletions(-) diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 95aef7fe..7463e0d2 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -11,3 +11,9 @@ ULTMARC_BACKUP_FOLDER=ultmarc JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 \ No newline at end of file diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index 3862177e..8264f4bd 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -1,5 +1,7 @@ from src.batch.common.batch_context import BatchContext -from src.batch.laundering import create_inst_merge_for_laundering, emp_chg_inst_laundering, ult_ident_presc_laundering +from src.batch.laundering import ( + create_inst_merge_for_laundering, emp_chg_inst_laundering, + ult_ident_presc_laundering, sales_results_laundering) from src.logging.get_logger import get_logger batch_context = BatchContext.get_instance() @@ -21,7 +23,7 @@ def exec(): # 納入先処方元マスタ洗替 ult_ident_presc_laundering.exec() # 卸販売洗替 - + sales_results_laundering.exec() # # 並列処理のテスト用コード # import time diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 979f1042..4fe4126a 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -2,6 +2,7 @@ from src.db.database import Database from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.batch.batch_functions import logging_sql +from src.system_var import environment logger = get_logger('卸実績洗替') @@ -12,15 +13,15 @@ def exec(): db.connect() logger.debug('処理開始') # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 - _call_sales_lau_delete(db, 'sales_lau', 5) + _call_sales_lau_delete(db) # 卸販売実績テーブル(洗替後)作成 - _call_sales_lau_upsert(db, 'sales_lau', '', '') + _call_sales_lau_upsert(db) # 1:卸組織洗替 - _call_whs_org_laundering(db, 'sales_lau') + _call_whs_org_laundering(db) # 3:HCO施設コードの洗替 - _update_sales_lau_from_vop_hco_merge_v(db, 'sales_lau') + _update_sales_lau_from_vop_hco_merge_v(db) # 4:メルク施設コードの洗替 - _update_mst_inst_laundering(db, 'sales_lau') + _update_mst_inst_laundering(db) logger.debug('処理終了') except Exception as e: raise BatchOperationException(e) @@ -28,35 +29,49 @@ def exec(): db.disconnect() -def _call_sales_lau_delete(db: Database, target_table: str, set_year: int): +def _call_sales_lau_delete(db: Database): # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 logger.info('sales_lau_delete(プロシージャ―) 開始') - db.execute(f'CALL src05.sales_lau_delete("{target_table}", {set_year})') + db.execute(f""" + CALL src05.sales_lau_delete( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', + {environment.SALES_LAUNDERING_TARGET_YEAR_OFFSET} + ) + """) logger.info('sales_lau_delete(プロシージャ―) 終了') return -def _call_sales_lau_upsert(db: Database, target_table: str, extract_from_date: str, - extract_to_date: str): +def _call_sales_lau_upsert(db: Database): # 卸販売実績テーブル(洗替後)作成 - logger.info('sales_lau_delete(プロシージャ―) 開始') - db.execute(f'CALL src05.sales_lau_delete("{target_table}", "{extract_from_date}", "{extract_to_date}")') - logger.info('sales_lau_delete(プロシージャ―) 終了') + logger.info('sales_lau_upsert(プロシージャ―) 開始') + db.execute(f""" + CALL src05.sales_lau_upsert( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', + (src05.get_syor_date() - {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD}), + src05.get_syor_date() + ) + """) + logger.info('sales_lau_upsert(プロシージャ―) 終了') return -def _call_whs_org_laundering(db: Database, target_table: str): +def _call_whs_org_laundering(db: Database): # 卸組織洗替 logger.info('whs_org_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.whs_org_laundering("{target_table}")') + db.execute(f""" + CALL src05.whs_org_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('whs_org_laundering(プロシージャ―) 終了') return -def _update_sales_lau_from_vop_hco_merge_v(db: Database, target_table: str): +def _update_sales_lau_from_vop_hco_merge_v(db: Database): # HCO施設コードの洗替 if _count_vop_hco_merge_v(db) >= 1: - _call_v_inst_merge_laundering(db, target_table) + _call_v_inst_merge_laundering(db) return logger.info('V施設統合マスタにデータは存在しません') return @@ -81,32 +96,40 @@ def _count_vop_hco_merge_v(db: Database) -> int: return result[0]['cnt'] -def _call_v_inst_merge_laundering(db: Database, target_table: str): - # HCO施設コードの洗替(テーブル更新) +def _call_v_inst_merge_laundering(db: Database): + # HCO施設コードの洗替(プロシージャ―の呼び出し) logger.info('v_inst_merge_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.v_inst_merge_laundering("{target_table}")') + db.execute(f""" + CALL src05.v_inst_merge_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('v_inst_merge_laundering(プロシージャ―) 終了') return -def _update_mst_inst_laundering(db: Database, target_table: str): +def _update_mst_inst_laundering(db: Database): # メルク施設コードの洗替 - _call_hco_to_mdb_laundering(db, target_table) - _update_sales_lau_from_dcf_inst_merge(db, target_table) + _call_hco_to_mdb_laundering(db) + _update_sales_lau_from_dcf_inst_merge(db) -def _call_hco_to_mdb_laundering(db: Database, target_table: str): +def _call_hco_to_mdb_laundering(db: Database): # A:医療機関のデータはMDB変換表からHCO⇒DCFへ変換 logger.info('hco_to_mdb_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.hco_to_mdb_laundering("{target_table}")') + db.execute(f""" + CALL src05.hco_to_mdb_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('hco_to_mdb_laundering(プロシージャ―) 終了') return -def _update_sales_lau_from_dcf_inst_merge(db: Database, target_table: str): +def _update_sales_lau_from_dcf_inst_merge(db: Database): # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする if _count_dcf_inst_merge(db) >= 1: - _call_inst_merge_laundering(db, target_table) + _call_inst_merge_laundering(db) return logger.info('DCF施設統合マスタにデータは存在しません') return @@ -131,9 +154,13 @@ def _count_dcf_inst_merge(db: Database) -> int: return result[0]['cnt'] -def _call_inst_merge_laundering(db: Database, target_table: str): - # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(テーブル更新) +def _call_inst_merge_laundering(db: Database): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(プロシージャ―の呼び出し) logger.info('inst_merge_laundering(プロシージャ―) 開始') - db.execute(f'CALL src05.inst_merge_laundering("{target_table}")') + db.execute(f""" + CALL src05.inst_merge_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) logger.info('inst_merge_laundering(プロシージャ―) 終了') return diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index b1730224..a51ab519 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -22,3 +22,10 @@ DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_AT DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) + +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql index 03402001..b3cbfc6e 100644 --- a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.hco_to_mdb_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.hco_to_mdb_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'hco_to_mdb_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -23,7 +23,7 @@ BEGIN SET @error_state = NULL, @error_msg = NULL; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_A① 開始'); + '【洗替】4:メルク施設コードの洗替_A① 開始'); TRUNCATE TABLE internal05.hco_cnv_mdb_t; @@ -31,11 +31,11 @@ BEGIN '【洗替】4:メルク施設コードの洗替_A① 終了'); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_A② 開始'); + '【洗替】4:メルク施設コードの洗替_A② 開始'); INSERT INTO internal05.hco_cnv_mdb_t ( - hco_vod_v, + hco_vid_v, mdb_cd, form_inst_name_kana, form_inst_name_kanji, @@ -78,19 +78,23 @@ BEGIN '【洗替】4:メルク施設コードの洗替_A② 終了'); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_A③ 開始'); + '【洗替】4:メルク施設コードの洗替_A③ 開始'); - UPDATE src05.@target_table AS tt, internal05.hco_cnv_mdb_t AS hcmt - SET - tt.inst_cd = hcmt.mdb_cd, - tt.inst_name_kana = hcmt.form_inst_name_kana, - tt.inst_name = hcmt.form_inst_name_kanji, - tt.address = hcmt.inst_addr, - tt.pref_cd = hcmt.prefc_cd - WHERE - tt.v_inst_cd = hcmt.hco_vid_v - AND tt.inst_clas_cd = '1' - ; + SET @update_institution = " + UPDATE $$target_table$$ AS tt, internal05.hco_cnv_mdb_t AS hcmt + SET + tt.inst_cd = hcmt.mdb_cd, + tt.inst_name_kana = hcmt.form_inst_name_kana, + tt.inst_name = hcmt.form_inst_name_kanji, + tt.address = hcmt.inst_addr, + tt.pref_cd = hcmt.prefc_cd + WHERE + tt.v_inst_cd = hcmt.hco_vid_v + AND tt.inst_clas_cd = '1' + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A③ 終了'); diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql index 92ff79a9..6a0642a9 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.inst_merge_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.inst_merge_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -25,28 +25,35 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_B① 開始'); - UPDATE ( - SELECT - dcf_dsf_inst_cd, - dup_opp_cd, - form_inst_name_kanji, - form_inst_name_kana, - inst_addr, - prefc_cd - FROM - internal05.inst_merge_t - ) AS imt, - src05.@target_table AS tt - SET - tt.inst_cd = imt.dup_opp_cd, - tt.inst_name = imt.form_inst_name_kanji, - tt.inst_name_kana = imt.form_inst_name_kana, - tt.address = imt.inst_addr, - tt.prefc_cd = imt.prefc_cd - WHERE - tt.inst_cd = imt.dcf_dsf_inst_cd - ; + SET @update_institution = " + UPDATE ( + SELECT + dcf_dsf_inst_cd, + dup_opp_cd, + form_inst_name_kanji, + form_inst_name_kana, + inst_addr, + prefc_cd + FROM + internal05.inst_merge_t + ) AS imt, + $$target_table$$ AS tt + SET + tt.inst_cd = imt.dup_opp_cd, + tt.inst_name = imt.form_inst_name_kanji, + tt.inst_name_kana = imt.form_inst_name_kana, + tt.address = imt.inst_addr, + tt.pref_cd = imt.prefc_cd, + tt.dwh_upd_dt = SYSDATE() + WHERE + tt.inst_cd = imt.dcf_dsf_inst_cd + AND tt.inst_clas_cd = '1' + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】4:メルク施設コードの洗替_B① 終了'); + '【洗替】4:メルク施設コードの洗替_B① 終了'); +END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql index 652db718..c1610435 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.sales_lau_delete(@target_table VARCHAR(64), @laundering_period_year INT) +CREATE PROCEDURE src05.sales_lau_delete(target_table VARCHAR(255), laundering_period_year INT) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_delete'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -25,11 +25,15 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 開始'); - DELETE FROM - src05.@target_table - WHERE - kjyo_ym < DATE_FORMAT((src05.get_syor_date() - INTERVAL @laundering_period_year YEAR), '%Y%m') - ; + SET @delete_data = " + DELETE FROM + $$target_table$$ + WHERE + kjyo_ym < DATE_FORMAT((src05.get_syor_date() - INTERVAL ? YEAR), '%Y%m') + "; + SET @delete_data = REPLACE(@delete_data, "$$target_table$$", target_table); + PREPARE delete_data_stmt from @delete_data; + EXECUTE delete_data_stmt USING @laundering_period_year; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 270495e7..ecb2b671 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -1,6 +1,6 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.sales_lau_upsert(@target_table VARCHAR(64), @extract_from_date DATETIME, - @extract_to_date DATETIME) +CREATE PROCEDURE src05.sales_lau_upsert(target_table VARCHAR(255), extract_from_date date, + extract_to_date date) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -8,10 +8,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_upsert'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -24,7 +24,7 @@ BEGIN SET @error_state = NULL, @error_msg = NULL; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成① 開始' + '卸販売実績テーブル(洗替後)作成① 開始' ); TRUNCATE TABLE internal05.bu_prd_name_contrast_t; @@ -36,7 +36,7 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 開始' ); - + INSERT INTO internal05.bu_prd_name_contrast_t ( prd_cd, @@ -46,15 +46,15 @@ BEGIN pp_end_date, update_date, bp_start_date, - bp_end_date - ) + bp_end_date + ) SELECT ppmv.prd_cd, bpnc.bu_cd, ppmv.phm_itm_cd, ppmv.start_date AS pp_start_date, ppmv.end_date AS pp_end_date, - bpnc.update_date AS update_date + bpnc.update_date AS update_date, bpnc.start_date AS bp_start_date, bpnc.end_date AS bp_end_date FROM @@ -63,7 +63,7 @@ BEGIN ON ppmv.phm_itm_cd = bpnc.phm_itm_cd WHERE ppmv.rec_sts_kbn != '9' - ; + ; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 終了' @@ -71,46 +71,47 @@ BEGIN call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成③ 開始' + '卸販売実績テーブル(洗替後)作成③ 開始' ); TRUNCATE TABLE internal05.fcl_mst_v_t; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成③ 終了' - ); + '卸販売実績テーブル(洗替後)作成③ 終了' + ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成④ 開始' + '卸販売実績テーブル(洗替後)作成④ 開始' ); INSERT INTO - internal05.internal05.fcl_mst_v_t + internal05.fcl_mst_v_t SELECT - v_inst_cd, - sub_num, - start_date, - end_date, - closed_dt, - fcl_name, - fcl_kn_name, - fcl_abb_name, - mkr_cd, - jsk_proc_kbn, - fmt_addr, - fmt_kn_addr, - postal_cd, - prft_cd, - prft_name, - city_name, - addr_line_1, - tel_num, - admin_kbn, - fcl_type, - rec_sts_kbn, - ins_dt, - upd_dt, - dwh_upd_dt + fmv1.v_inst_cd, + fmv1.sub_num, + fmv1.start_date, + fmv1.end_date, + fmv1.closed_dt, + fmv1.fcl_name, + fmv1.fcl_kn_name, + fmv1.fcl_abb_name, + fmv1.fcl_abb_kn_name, + fmv1.mkr_cd, + fmv1.jsk_proc_kbn, + fmv1.fmt_addr, + fmv1.fmt_kn_addr, + fmv1.postal_cd, + fmv1.prft_cd, + fmv1.prft_name, + fmv1.city_name, + fmv1.addr_line_1, + fmv1.tel_num, + fmv1.admin_kbn, + fmv1.fcl_type, + fmv1.rec_sts_kbn, + fmv1.ins_dt, + fmv1.upd_dt, + fmv1.dwh_upd_dt FROM src05.fcl_mst_v AS fmv1 INNER JOIN ( @@ -129,334 +130,176 @@ BEGIN ; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成④ 終了' + '卸販売実績テーブル(洗替後)作成④ 終了' ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑤ 開始' - ); + '卸販売実績テーブル(洗替後)作成⑤ 開始' + ); - INSERT INTO - src05.@target_table ( - rec_whs_cd, - rec_whs_sub_cd, - rec_whs_org_cd, - rec_cust_cd, - rec_comm_cd, - rec_tran_kbn, - rev_hsdnymd_wrk, - rev_hsdnymd_srk, - rec_urag_num, - rec_qty, - rec_nonyu_price, - rec_nonyu_amt, - rec_comm_name, - rec_nonyu_fcl_name, - free_item, - rec_nonyu_fcl_addr, - rec_nonyu_fcl_post, - rec_nonyu_fcl_tel, - rec_bef_hsdn_ymd, - rec_bef_slip_num, - rec_ymd, - sale_data_cat, - slip_file_name, - slip_mgt_num, - row_num, - hsdn_ymd, - exec_dt, - v_tran_cd, - tran_kbn_name, - whs_org_cd, - v_whsorg_cd, - whs_org_name, - whs_org_kn, - v_whs_cd, - whs_name, - nonyu_fcl_cd, - inst_name, - inst_name_kana, - address, - comm_cd, - comm_name, - nonyu_qty, - nonyu_price, - nonyu_amt, - shikiri_price, - shikiri_amt, - nhi_price, - nhi_amt, - v_inst_cd, - inst_clas_cd, - bu_cd, - item_cd, - item_name, - item_english_name, - pref_cd, - whspos_err_kbn, - htdnymd_err_kbn, - prd_exis_kbn, - fcl_exis_kbn, - bef_hsdn_ymd, - bef_slip_num, - slip_org_kbn, - kjyo_ym, - tksnbk_kbn, - fcl_exec_kbn, - rec_sts_kbn, - ins_dt, - ins_usr, - dwh_upd_dt - ) - SELECT - s.rec_whs_cd, - s.rec_whs_sub_cd, - s.rec_whs_org_cd, - s.rec_cust_cd, - s.rec_comm_cd, - s.rec_tran_kbn, - s.rev_hsdnymd_wrk, - s.rev_hsdnymd_srk, - s.rec_urag_num, - s.rec_qty, - s.rec_nonyu_price, - s.rec_nonyu_amt, - s.rec_comm_name, - s.rec_nonyu_fcl_name, - s.free_item, - s.rec_nonyu_fcl_addr, - s.rec_nonyu_fcl_post, - s.rec_nonyu_fcl_tel, - s.rec_bef_hsdn_ymd, - s.rec_bef_slip_num, - s.rec_ymd, - s.sale_data_cat, - s.slip_file_name, - s.slip_mgt_num, - s.row_num, - s.hsdn_ymd, - s.exec_dt, - s.v_tran_cd, - s.tran_kbn_name, - s.whs_org_cd, - s.v_whsorg_cd, - s.whs_org_name, - s.whs_org_kn, - s.v_whs_cd, - s.whs_name, - s.nonyu_fcl_cd, - s.v_inst_name, - s.v_inst_kn, - s.v_inst_addr, - s.comm_cd, - s.comm_name, - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_qty * -1) - ELSE - s.nonyu_qty - END AS nonyu_qty, - s.nonyu_price, - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_amt * -1) - ELSE - s.nonyu_amt - END AS nonyu_amt, - s.shikiri_price, - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.shikiri_amt * -1) - ELSE - s.shikiri_amt - END AS shikiri_amt, - s.nhi_price, - CASE - WHEN - (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nhi_amt * -1) - ELSE - s.nhi_amt - END AS nhi_amt, - s.v_inst_cd, - CASE - WHEN - (fmvt.fcl_type = 'A1' or fmvt.fcl_type = 'A0') THEN '3' - WHEN - fmvt.fcl_type BETWEEN '20' AND '29' THEN '2' - ELSE - '1' - END AS inst_clas_cd, - bpnct.bu_cd, - ppmv.mkr_cd, - ppmv.mkr_inf_1, - ppmv.mkr_inf_2, - CASE - WHEN - s.v_inst_cd LIKE '00%' - THEN - ci.prefc_cd - ELSE - fmvt.prft_cd - END AS prft_cd, - s.whspos_err_kbn, - s.htdnymd_err_kbn, - s.prd_exis_kbn, - s.fcl_exis_kbn, - s.bef_hsdn_ymd, - s.bef_slip_num, - s.slip_org_kbn, - s.kjyo_ym, - s.tksnbk_kbn, - s.fcl_exec_kbn, - s.rec_sts_kbn, - s.ins_dt, - s.ins_usr, - SYSDATE() - FROM ( + SET @insert_sales_laundering = " + INSERT INTO + $$target_table$$ ( + rec_whs_cd, + rec_whs_sub_cd, + rec_whs_org_cd, + rec_cust_cd, + rec_comm_cd, + rec_tran_kbn, + rev_hsdnymd_wrk, + rev_hsdnymd_srk, + rec_urag_num, + rec_qty, + rec_nonyu_price, + rec_nonyu_amt, + rec_comm_name, + rec_nonyu_fcl_name, + free_item, + rec_nonyu_fcl_addr, + rec_nonyu_fcl_post, + rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd, + rec_bef_slip_num, + rec_ymd, + sale_data_cat, + slip_file_name, + slip_mgt_num, + row_num, + hsdn_ymd, + exec_dt, + v_tran_cd, + tran_kbn_name, + whs_org_cd, + v_whsorg_cd, + whs_org_name, + whs_org_kn, + v_whs_cd, + whs_name, + nonyu_fcl_cd, + inst_name, + inst_name_kana, + address, + comm_cd, + comm_name, + nonyu_qty, + nonyu_price, + nonyu_amt, + shikiri_price, + shikiri_amt, + nhi_price, + nhi_amt, + v_inst_cd, + inst_clas_cd, + bu_cd, + item_cd, + item_name, + item_english_name, + pref_cd, + whspos_err_kbn, + htdnymd_err_kbn, + prd_exis_kbn, + fcl_exis_kbn, + bef_hsdn_ymd, + bef_slip_num, + slip_org_kbn, + kjyo_ym, + tksnbk_kbn, + fcl_exec_kbn, + rec_sts_kbn, + ins_dt, + ins_usr, + dwh_upd_dt + ) SELECT - CAST(SYSDATE() AS DATE) AS today - ) AS sub - INNER JOIN src05.sales AS s - ON sub.today = CAST(s.dwh_upd_dt AS DATE) - LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv - ON s.comm_cd = ppmv.prd_cd - AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date - AND ppmv.rec_sts_kbn != '9' - LEFT OUTER JOIN src05.fcl_mst_v_t AS fmvt - ON s.v_inst_cd = fmvt.v_inst_cd - LEFT OUTER JOIN src05.bu_prd_name_contrast_t AS bpnct - ON s.comm_cd = bpnct.prd_cd - AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.pp_start_date AND bpnct.pp_end_date - AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date - LEFT OUTER JOIN src05.com_inst AS ci - ON s.v_inst_cd = ci.dcf_dsf_inst_cd - WHERE - (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') - OR ( - s.rec_sts_kbn = '0' - AND s.err_flg20 != 'M' - AND s.v_tran_cd IN (110, 120, 210, 220) - AND ( - ( - s.fcl_exec_kbn NOT IN ('2', '5') - AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) - ) - OR s.fcl_exec_kbn IS NULL - ) - ) - - ON DUPLICATE KEY UPDATE - rec_whs_cd = s.rec_whs_cd, - rec_whs_sub_cd = s.rec_whs_sub_cd, - rec_whs_org_cd = s.rec_whs_org_cd, - rec_cust_cd = s.rec_cust_cd, - rec_comm_cd = s.rec_comm_cd, - rec_tran_kbn = s.rec_tran_kbn, - rev_hsdnymd_wrk = s.rev_hsdnymd_wrk, - rev_hsdnymd_srk = s.rev_hsdnymd_srk, - rec_urag_num = s.rec_urag_num, - rec_qty = s.rec_qty, - rec_nonyu_price = s.rec_nonyu_price, - rec_nonyu_amt = s.rec_nonyu_amt, - rec_comm_name = s.rec_comm_name, - rec_nonyu_fcl_name = s.rec_nonyu_fcl_name, - free_item = s.free_item, - rec_nonyu_fcl_addr = s.rec_nonyu_fcl_addr, - rec_nonyu_fcl_post = s.rec_nonyu_fcl_post, - rec_nonyu_fcl_tel = s.rec_nonyu_fcl_tel, - rec_bef_hsdn_ymd = s.rec_bef_hsdn_ymd, - rec_bef_slip_num = s.rec_bef_slip_num, - rec_ymd = s.rec_ymd, - sale_data_cat = s.sale_data_cat, - slip_file_name = s.slip_file_name, - row_num = s.row_num, - hsdn_ymd = s.hsdn_ymd, - exec_dt = s.exec_dt, - v_tran_cd = s.v_tran_cd, - tran_kbn_name = s.tran_kbn_name, - whs_org_cd = s.whs_org_cd, - v_whsorg_cd = s.v_whsorg_cd, - whs_org_name = s.whs_org_name, - whs_org_kn = s.whs_org_kn, - v_whs_cd = s.v_whs_cd, - whs_name = s.whs_name, - nonyu_fcl_cd = s.nonyu_fcl_cd, - inst_name = s.v_inst_name, - inst_name_kana = s.v_inst_kn, - address = s.v_inst_addr, - comm_cd = s.comm_cd, - comm_name = s.comm_name, - nonyu_qty = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_qty * -1) - ELSE - s.nonyu_qty - END - ), - nonyu_price = s.nonyu_price, - nonyu_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_amt * -1) - ELSE - s.nonyu_amt - END - ), - shikiri_price = s.shikiri_price, - shikiri_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.shikiri_amt * -1) - ELSE - s.shikiri_amt - END - ), - nhi_price = s.nhi_price, - nhi_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nhi_amt * -1) - ELSE - s.nhi_amt - END - ), - v_inst_cd = s.v_inst_cd, - inst_clas_cd = ( - CASE - WHEN - (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') - THEN - '3' - WHEN - fmvt.fcl_type BETWEEN '20' AND '29' - THEN - '2' - ELSE - '1' - END - ), - bu_cd = bpnct.bu_cd, - item_cd = ppmv.mkr_cd, - item_name = ppmv.mkr_inf_1, - item_english_name = ppmv.mkr_inf_2, - pref_cd = ( + s.rec_whs_cd, + s.rec_whs_sub_cd, + s.rec_whs_org_cd, + s.rec_cust_cd, + s.rec_comm_cd, + s.rec_tran_kbn, + s.rev_hsdnymd_wrk, + s.rev_hsdnymd_srk, + s.rec_urag_num, + s.rec_qty, + s.rec_nonyu_price, + s.rec_nonyu_amt, + s.rec_comm_name, + s.rec_nonyu_fcl_name, + s.free_item, + s.rec_nonyu_fcl_addr, + s.rec_nonyu_fcl_post, + s.rec_nonyu_fcl_tel, + s.rec_bef_hsdn_ymd, + s.rec_bef_slip_num, + s.rec_ymd, + s.sale_data_cat, + s.slip_file_name, + s.slip_mgt_num, + s.row_num, + s.hsdn_ymd, + s.exec_dt, + s.v_tran_cd, + s.tran_kbn_name, + s.whs_org_cd, + s.v_whsorg_cd, + s.whs_org_name, + s.whs_org_kn, + s.v_whs_cd, + s.whs_name, + s.nonyu_fcl_cd, + s.v_inst_name, + s.v_inst_kn, + s.v_inst_addr, + s.comm_cd, + s.comm_name, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END AS nonyu_qty, + s.nonyu_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END AS nonyu_amt, + s.shikiri_price, + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END AS shikiri_amt, + s.nhi_price, + CASE + WHEN + (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END AS nhi_amt, + s.v_inst_cd, + CASE + WHEN + (fmvt.fcl_type = 'A1' or fmvt.fcl_type = 'A0') THEN '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' THEN '2' + ELSE + '1' + END AS inst_clas_cd, + bpnct.bu_cd, + ppmv.mkr_cd, + ppmv.mkr_inf_1, + ppmv.mkr_inf_2, CASE WHEN s.v_inst_cd LIKE '00%' @@ -464,105 +307,224 @@ BEGIN ci.prefc_cd ELSE fmvt.prft_cd - END - ), - whspos_err_kbn = s.whspos_err_kbn, - htdnymd_err_kbn = s.htdnymd_err_kbn, - prd_exis_kbn = s.prd_exis_kbn, - fcl_exis_kbn = s.fcl_exis_kbn, - bef_hsdn_ymd = s.bef_hsdn_ymd, - bef_slip_num = s.bef_slip_num, - slip_org_kbn = s.slip_org_kbn, - kjyo_ym = s.kjyo_ym, - tksnbk_kbn = s.tksnbk_kbn, - fcl_exec_kbn = s.fcl_exec_kbn, - rec_sts_kbn = s.rec_sts_kbn, - ins_dt = s.ins_dt, - ins_usr = s.ins_usr, - dwh_upd_dt = SYSDATE() - ; - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑤ 終了' - ); - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑥ 開始' - ); - - UPDATE ( + END AS prft_cd, + s.whspos_err_kbn, + s.htdnymd_err_kbn, + s.prd_exis_kbn, + s.fcl_exis_kbn, + s.bef_hsdn_ymd, + s.bef_slip_num, + s.slip_org_kbn, + s.kjyo_ym, + s.tksnbk_kbn, + s.fcl_exec_kbn, + s.rec_sts_kbn, + s.ins_dt, + s.ins_usr, + SYSDATE() + FROM ( SELECT - fmvt.v_inst_cd AS v_inst_cd, - fmvt.fcl_type AS fcl_type, - fmvt.prft_cd AS prft_cd, - ci.prefc_cd AS prefc_cd - FROM - (SELECT CAST(SYSDATE() AS DATE) AS today) AS sub - INNER JOIN src05.fcl_mst_v_t AS fmvt - ON sub.today = CAST(fmvt.dwh_upd_dt AS DATE) - LEFT OUTER JOIN src05.com_inst AS ci - ON fmvt.v_inst_cd = ci.dcf_dsf_inst_cd - ) AS t3t5, - src05.sales_lau AS sl - SET - sl.inst_clas_cd = ( - CASE - WHEN - (t3t5.fcl_type = 'A1' OR t3t5.fcl_type = 'A0') - THEN - '3' - WHEN - t3t5.fcl_type BETWEEN '20' AND '29' - THEN - '2' - ELSE - '1' - END - ), - sl.pref_cd = ( - CASE - WHEN - sl.v_inst_cd LIKE '00%' - THEN - t3t5.prefc_cd - ELSE - t3t5.prft_cd - END - ) - WHERE sl.v_inst_cd = t3t5.v_inst_cd - ; + CAST(SYSDATE() AS DATE) AS today + ) AS sub + INNER JOIN src05.sales AS s + ON sub.today = CAST(s.dwh_upd_dt AS DATE) + LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv + ON s.comm_cd = ppmv.prd_cd + AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date + AND ppmv.rec_sts_kbn != '9' + LEFT OUTER JOIN internal05.fcl_mst_v_t AS fmvt + ON s.v_inst_cd = fmvt.v_inst_cd + LEFT OUTER JOIN internal05.bu_prd_name_contrast_t AS bpnct + ON s.comm_cd = bpnct.prd_cd + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.pp_start_date AND bpnct.pp_end_date + AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date + LEFT OUTER JOIN src05.com_inst AS ci + ON s.v_inst_cd = ci.dcf_dsf_inst_cd + WHERE + (? <= s.dwh_upd_dt AND s.dwh_upd_dt <= ?) + AND ( + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + OR ( + s.rec_sts_kbn = '0' + AND s.err_flg20 != 'M' + AND s.v_tran_cd IN (110, 120, 210, 220) + AND ( + ( + s.fcl_exec_kbn NOT IN ('2', '5') + AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) + ) + OR s.fcl_exec_kbn IS NULL + ) + ) + ) + + ON DUPLICATE KEY UPDATE + rec_whs_cd = s.rec_whs_cd, + rec_whs_sub_cd = s.rec_whs_sub_cd, + rec_whs_org_cd = s.rec_whs_org_cd, + rec_cust_cd = s.rec_cust_cd, + rec_comm_cd = s.rec_comm_cd, + rec_tran_kbn = s.rec_tran_kbn, + rev_hsdnymd_wrk = s.rev_hsdnymd_wrk, + rev_hsdnymd_srk = s.rev_hsdnymd_srk, + rec_urag_num = s.rec_urag_num, + rec_qty = s.rec_qty, + rec_nonyu_price = s.rec_nonyu_price, + rec_nonyu_amt = s.rec_nonyu_amt, + rec_comm_name = s.rec_comm_name, + rec_nonyu_fcl_name = s.rec_nonyu_fcl_name, + free_item = s.free_item, + rec_nonyu_fcl_addr = s.rec_nonyu_fcl_addr, + rec_nonyu_fcl_post = s.rec_nonyu_fcl_post, + rec_nonyu_fcl_tel = s.rec_nonyu_fcl_tel, + rec_bef_hsdn_ymd = s.rec_bef_hsdn_ymd, + rec_bef_slip_num = s.rec_bef_slip_num, + rec_ymd = s.rec_ymd, + sale_data_cat = s.sale_data_cat, + slip_file_name = s.slip_file_name, + row_num = s.row_num, + hsdn_ymd = s.hsdn_ymd, + exec_dt = s.exec_dt, + v_tran_cd = s.v_tran_cd, + tran_kbn_name = s.tran_kbn_name, + whs_org_cd = s.whs_org_cd, + v_whsorg_cd = s.v_whsorg_cd, + whs_org_name = s.whs_org_name, + whs_org_kn = s.whs_org_kn, + v_whs_cd = s.v_whs_cd, + whs_name = s.whs_name, + nonyu_fcl_cd = s.nonyu_fcl_cd, + inst_name = s.v_inst_name, + inst_name_kana = s.v_inst_kn, + address = s.v_inst_addr, + comm_cd = s.comm_cd, + comm_name = s.comm_name, + nonyu_qty = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_qty * -1) + ELSE + s.nonyu_qty + END + ), + nonyu_price = s.nonyu_price, + nonyu_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nonyu_amt * -1) + ELSE + s.nonyu_amt + END + ), + shikiri_price = s.shikiri_price, + shikiri_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.shikiri_amt * -1) + ELSE + s.shikiri_amt + END + ), + nhi_price = s.nhi_price, + nhi_amt = ( + CASE + WHEN + (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) + THEN + (s.nhi_amt * -1) + ELSE + s.nhi_amt + END + ), + v_inst_cd = s.v_inst_cd, + inst_clas_cd = ( + CASE + WHEN + (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') + THEN + '3' + WHEN + fmvt.fcl_type BETWEEN '20' AND '29' + THEN + '2' + ELSE + '1' + END + ), + bu_cd = bpnct.bu_cd, + item_cd = ppmv.mkr_cd, + item_name = ppmv.mkr_inf_1, + item_english_name = ppmv.mkr_inf_2, + pref_cd = ( + CASE + WHEN + s.v_inst_cd LIKE '00%' + THEN + ci.prefc_cd + ELSE + fmvt.prft_cd + END + ), + whspos_err_kbn = s.whspos_err_kbn, + htdnymd_err_kbn = s.htdnymd_err_kbn, + prd_exis_kbn = s.prd_exis_kbn, + fcl_exis_kbn = s.fcl_exis_kbn, + bef_hsdn_ymd = s.bef_hsdn_ymd, + bef_slip_num = s.bef_slip_num, + slip_org_kbn = s.slip_org_kbn, + kjyo_ym = s.kjyo_ym, + tksnbk_kbn = s.tksnbk_kbn, + fcl_exec_kbn = s.fcl_exec_kbn, + rec_sts_kbn = s.rec_sts_kbn, + ins_dt = s.ins_dt, + ins_usr = s.ins_usr, + dwh_upd_dt = SYSDATE() + "; + SET @insert_sales_laundering = REPLACE(@insert_sales_laundering, "$$target_table$$", target_table); + PREPARE insert_sales_laundering_stmt from @insert_sales_laundering; + EXECUTE insert_sales_laundering_stmt USING @extract_from_date, @extract_to_date; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑥ 終了' + '卸販売実績テーブル(洗替後)作成⑤ 終了' ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑦ 開始' + '卸販売実績テーブル(洗替後)作成⑥ 開始' ); - UPDATE - ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, - src05.sales_lau AS sl, - src05.sales AS s - SET - sl.inst_cd = ( - CASE - WHEN - (s.err_flg20 != 'M' AND sl.inst_clas_cd IN ('2', '3')) OR (s.err_flg20 = 'M') - THEN - s.v_inst_cd - ELSE - NULL - END - ) - WHERE - sub.today = CAST(s.dwh_upd_dt AS DATE) - AND sl.slip_mgt_num = s.slip_mgt_num - AND sl.row_num = s.row_num - ; + SET @update_institution_code = " + UPDATE + ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, + $$target_table$$ AS tt, + src05.sales AS s + SET + tt.inst_cd = ( + CASE + WHEN + (s.err_flg20 != 'M' AND tt.inst_clas_cd IN ('2', '3')) OR (s.err_flg20 = 'M') + THEN + s.v_inst_cd + ELSE + NULL + END + ) + WHERE + sub.today = CAST(s.dwh_upd_dt AS DATE) + AND tt.slip_mgt_num = s.slip_mgt_num + AND tt.row_num = s.row_num + "; + SET @update_institution_code = REPLACE(@update_institution_code, "$$target_table$$", target_table); + PREPARE update_institution_code_stmt from @update_institution_code; + EXECUTE update_institution_code_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '卸販売実績テーブル(洗替後)作成⑦ 終了' + '卸販売実績テーブル(洗替後)作成⑥ 終了' ); END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 4ef2075e..35c4a700 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.v_inst_merge_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.v_inst_merge_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -23,46 +23,50 @@ BEGIN SET @error_state = NULL, @error_msg = NULL; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】3:HCO施設コードの洗替① 開始' + '【洗替】3:HCO施設コードの洗替① 開始' ); - UPDATE ( - SELECT - v_inst_cd, - v_inst_cd_merge, - fcl_name, - fcl_kn_name, - fmt_addr, - prft_cd - FROM - internal05.v_inst_merge_t - WHERE - (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' - ) AS vimt, - src05.@target_table AS tt - SET - tt.inst_cd = ( - CASE - WHEN - tt.inst_clas_cd = '1' - THEN - tt.inst_cd - WHEN - (tt.inst_clas_cd = '2' OR tt.inst_clas_cd = '3') - THEN - vimt.v_inst_cd_merge - END - ) - tt.v_inst_cd = vimt.v_inst_cd_merge, - tt.inst_name = vimt.fcl_name, - tt.inst_name_kana = vimt.fcl_kn_name, - tt.address = vimt.fmt_addr, - tt.prefc_cd = vimt.prft_cd, - tt.dwh_upd_dt = SYSDATE() - WHERE - tt.v_inst_cd = vimt.v_inst_cd - AND (tt.inst_clas_cd IN ('1', '2', '3')) - ; + SET @update_institution = " + UPDATE ( + SELECT + v_inst_cd, + v_inst_cd_merge, + fcl_name, + fcl_kn_name, + fmt_addr, + prft_cd + FROM + internal05.v_inst_merge_t + WHERE + (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' + ) AS vimt, + $$target_table$$ AS tt + SET + tt.inst_cd = ( + CASE + WHEN + tt.inst_clas_cd = '1' + THEN + tt.inst_cd + WHEN + (tt.inst_clas_cd = '2' OR tt.inst_clas_cd = '3') + THEN + vimt.v_inst_cd_merge + END + ), + tt.v_inst_cd = vimt.v_inst_cd_merge, + tt.inst_name = vimt.fcl_name, + tt.inst_name_kana = vimt.fcl_kn_name, + tt.address = vimt.fmt_addr, + tt.pref_cd = vimt.prft_cd, + tt.dwh_upd_dt = SYSDATE() + WHERE + tt.v_inst_cd = vimt.v_inst_cd + AND (tt.inst_clas_cd IN ('1', '2', '3')) + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 終了' diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index e3ce53c7..4b5835ee 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -1,5 +1,5 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.whs_org_laundering(@target_table VARCHAR(64)) +CREATE PROCEDURE src05.whs_org_laundering(target_table VARCHAR(255)) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -7,10 +7,10 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'whs_org_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION + DECLARE EXIT HANDLER FOR SQLEXCEPTION BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; @@ -29,11 +29,11 @@ BEGIN TRUNCATE TABLE internal05.whs_customer_org_t; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替① 終了' + '【洗替】1.卸組織洗替① 終了' ); call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替② 開始' + '【洗替】1.卸組織洗替② 開始' ); INSERT INTO @@ -43,7 +43,7 @@ BEGIN customer_cd, whs_org_cd, v_org_cd, - nm_2 + name_2 ) SELECT wcmv.whs_cd, @@ -89,30 +89,35 @@ BEGIN ON ocmv.v_org_cd = mohv2.v_cd_2 WHERE wcmv.rec_sts_kbn != '9' - AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date; - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替② 終了' - ); - - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替③ 開始' - ); - - UPDATE - src05.sales_lau AS sl, src05.@target_table AS tt - SET - sl.whs_org_cd = tt.whs_org_cd, - sl.v_whsorg_cd = tt.v_org_cd, - sl.whs_org_name = tt.nm_2 - WHERE - st.whs_cd = tt.whs_cd - AND st.whs_sub_cd = tt.whs_sub_cd - AND st.customer_cd = tt.customer_cd + AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date ; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - '【洗替】1.卸組織洗替③ 終了' + '【洗替】1.卸組織洗替② 終了' + ); + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 開始' + ); + + SET @update_organization = " + UPDATE + $$target_table$$ AS tt, internal05.whs_customer_org_t AS wcot + SET + tt.whs_org_cd = wcot.whs_org_cd, + tt.v_whsorg_cd = wcot.v_org_cd, + tt.whs_org_name = wcot.name_2 + WHERE + wcot.whs_cd = tt.rec_whs_cd + AND wcot.whs_sub_cd = tt.rec_whs_sub_cd + AND wcot.customer_cd = tt.rec_cust_cd + "; + SET @update_organization = REPLACE(@update_organization, "$$target_table$$", target_table); + PREPARE update_organization_stmt from @update_organization; + EXECUTE update_organization_stmt; + + call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + '【洗替】1.卸組織洗替③ 終了' ); END \ No newline at end of file From 0421aa8ccd44f1bb4463c7c08a2ae07cb181da24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Wed, 14 Jun 2023 11:56:44 +0900 Subject: [PATCH 53/86] =?UTF-8?q?feat:=20=E4=BB=95=E6=A7=98=E5=A4=89?= =?UTF-8?q?=E6=9B=B4=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrate_dcf_inst_merge.py | 170 ++++++++++++------ 1 file changed, 113 insertions(+), 57 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index e325d7a4..2e4f9ca7 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -55,76 +55,128 @@ def _set_disabled_dct_inst_merge(db: Database): _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') +def _select_ult_ident_presc_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 納入先処方元マスタから、DCF施設コードに対応した領域コードの取得 + try: + sql = """ + SELECT + ta_cd + FROM + src05.ult_ident_presc + WHERE + presc_cd = :dcf_inst_cd + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd} + ult_ident_presc_ta_cd_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('納入先処方元マスタから領域コードの取得に成功') + except Exception as e: + logger.debug('納入先処方元マスタから領域コードの取得に失敗') + raise e + + return ult_ident_presc_ta_cd_records + + def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): # 納入先処方元マスタの追加 logger.info('納入先処方元マスタの登録 開始') for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) + ult_ident_presc_ta_cd_records = _select_ult_ident_presc_ta_cd(db, enabled_merge_record['dcf_inst_cd']) + for ult_ident_presc_ta_cd_record in ult_ident_presc_ta_cd_records: + ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], + enabled_merge_record['dup_opp_cd'], + ult_ident_presc_ta_cd_record['ta_cd']) + for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): + logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') + # 処方元コード=重複時相手先コードが発生した場合 + if ult_ident_presc_row['opp_count'] > 0: + continue - ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], - enabled_merge_record['dup_opp_cd']) - for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): - logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') - # 処方元コード=重複時相手先コードが発生した場合 - if ult_ident_presc_row['opp_count'] > 0: - break + start_date = _str_to_date_time(ult_ident_presc_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + set_start_date = _date_time_to_str(set_start_date) + is_exists_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: + _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, + '納入先処方元マスタの重複予定データの削除') + is_exists_duplicate_key = True + else: + logger.info('納入先処方元マスタの重複予定データなし') + _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) - start_date = _str_to_date_time(ult_ident_presc_row['start_date']) - set_start_date = start_date \ - if start_date > tekiyo_month_first_day else tekiyo_month_first_day - set_start_date = _date_time_to_str(set_start_date) - is_exists_duplicate_key = False - if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: - _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, - '納入先処方元マスタの重複予定データの削除') - is_exists_duplicate_key = True - else: - logger.info('納入先処方元マスタの重複予定データなし') - _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) - - # 適用終了日 < 適用開始日の場合 - if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: - # 対象レコードを物理削除する - _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, - '開始月>適用開始日のため物理削除') - continue - # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 - if not is_exists_duplicate_key: - last_end_date = tekiyo_month_first_day - timedelta(days=1) - # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 - _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + # 適用終了日 < 適用開始日の場合 + if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: + # 対象レコードを物理削除する + _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, + '開始月>適用開始日のため物理削除') + continue + # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 + if not is_exists_duplicate_key: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) logger.info('納入先処方元マスタの登録 終了') +def _select_emp_chg_inst_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 従業員担当施設マスタから、DCF施設コードに対応した領域コードの取得 + try: + sql = """ + SELECT + ta_cd + FROM + src05.emp_chg_inst + WHERE + inst_cd = :dcf_inst_cd + AND enabled_flg = 'Y' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd} + emp_chg_inst_ta_cd_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('従業員担当施設マスタから領域コードの取得に成功') + except Exception as e: + logger.debug('従業員担当施設マスタから領域コードの取得に失敗') + raise e + + return emp_chg_inst_ta_cd_records + + def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): # 従業員担当施設マスタの登録 logger.info('従業員担当施設マスタの登録 開始') for enabled_merge_record in enabled_dst_inst_merge_records: tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) - emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd']) - for emp_chg_inst_row in emp_chg_inst_records: - # 重複時相手先コードが存在したかのチェック - if emp_chg_inst_row['opp_count'] > 0: - break + emp_chg_inst_ta_cd_records = _select_emp_chg_inst_ta_cd(db, enabled_merge_record['dcf_inst_cd']) + for emp_chg_inst_ta_cd_record in emp_chg_inst_ta_cd_records: + emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd'], + emp_chg_inst_ta_cd_record['ta_cd']) + for emp_chg_inst_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_chg_inst_row['opp_count'] > 0: + continue - start_date = _str_to_date_time(emp_chg_inst_row['start_date']) - set_start_date = start_date \ - if start_date > tekiyo_month_first_day else tekiyo_month_first_day + start_date = _str_to_date_time(emp_chg_inst_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day - _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), - emp_chg_inst_row) + _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), + emp_chg_inst_row) - # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 - if start_date < tekiyo_month_first_day: - # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する - last_end_date = tekiyo_month_first_day - timedelta(days=1) - _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), - emp_chg_inst_row) - continue - # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する - _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], - emp_chg_inst_row['start_date']) + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 + if start_date < tekiyo_month_first_day: + # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), + emp_chg_inst_row) + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) logger.info('従業員担当施設マスタの登録 終了') @@ -468,7 +520,7 @@ def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, raise e -def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: +def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: # emp_chg_instからSELECT try: sql = """ @@ -487,16 +539,18 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis FROM src05.emp_chg_inst AS eciopp WHERE - eciopp.inst_cd = :dup_opp_cd + eciopp.inst_cd = :dup_opp_cd + AND eciopp.ta_cd = :ta_cd ) AS opp_count FROM src05.emp_chg_inst AS eci WHERE eci.inst_cd = :dcf_inst_cd + AND eci.ta_cd = :ta_cd AND eci.enabled_flg = 'Y' AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < eci.end_date """ - params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} emp_chg_inst_records = db.execute_select(sql, params) logging_sql(logger, sql) logger.info('従業員担当施設マスタの取得 成功') @@ -506,7 +560,7 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> lis return emp_chg_inst_records -def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> list[dict]: +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: # ult_ident_prescからSELECT try: sql = """ @@ -522,15 +576,17 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str) -> FROM src05.ult_ident_presc AS uipopp WHERE - uipopp.presc_cd = :dup_opp_cd + uipopp.presc_cd = :dup_opp_cd + AND uipopp.ta_cd = :ta_cd ) AS opp_count FROM src05.ult_ident_presc AS uip WHERE uip.presc_cd = :dcf_inst_cd + AND uip.ta_cd = :ta_cd AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date """ - params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd} + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} ult_ident_presc_records = db.execute_select(sql, params) logging_sql(logger, sql) logger.info('納入先処方元マスタの取得 成功') From a808e03ea816766fec9008266b1ab47dc079c97d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 15 Jun 2023 15:55:16 +0900 Subject: [PATCH 54/86] =?UTF-8?q?feat:=20=E3=83=87=E3=83=83=E3=83=88?= =?UTF-8?q?=E3=83=AD=E3=82=B8=E3=83=83=E3=82=AF=E3=81=AE=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index 2e4f9ca7..ab4d107a 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -107,14 +107,8 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict logger.info('納入先処方元マスタの重複予定データなし') _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) - # 適用終了日 < 適用開始日の場合 - if _str_to_date_time(ult_ident_presc_row['end_date']) < start_date: - # 対象レコードを物理削除する - _delete_ult_ident_presc(db, ult_ident_presc_row['start_date'], ult_ident_presc_row, - '開始月>適用開始日のため物理削除') - continue # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 - if not is_exists_duplicate_key: + if not is_exists_duplicate_key and _str_to_date_time(ult_ident_presc_row['end_date']) >= start_date: last_end_date = tekiyo_month_first_day - timedelta(days=1) # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) From 7a9293187fc9dd2f2368111575e7b56e969f4a10 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Tue, 20 Jun 2023 18:16:01 +0900 Subject: [PATCH 55/86] =?UTF-8?q?LOAD=E6=96=87=E5=AE=9F=E8=A1=8C=E6=99=82?= =?UTF-8?q?=E3=81=ABWARNING=E3=81=8C=E7=99=BA=E7=94=9F=E3=81=97=E3=81=9F?= =?UTF-8?q?=E3=81=A8=E3=81=8D=E3=80=81WARNING=E5=86=85=E5=AE=B9=E3=82=92IN?= =?UTF-8?q?FO=E3=81=A7=E3=83=AD=E3=82=B0=E5=87=BA=E5=8A=9B=E3=81=97?= =?UTF-8?q?=E3=81=A6=E3=80=81=E4=BE=8B=E5=A4=96=E3=82=B9=E3=83=AD=E3=83=BC?= =?UTF-8?q?=E3=81=99=E3=82=8B=E3=82=88=E3=81=86=E3=81=AB=E6=94=B9=E4=BF=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 10 +++++++++- .../tests/batch/vjsk/vjsk_load/test_vjsk_load.py | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 2d99951b..61f287c5 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -27,7 +27,7 @@ class VjskDataLoadManager: # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") - # orgにload ※warningは1148エラーになるらしい + # orgにload ※warningが発生すれば異常終了させる sql = f"""\ LOAD DATA LOCAL INFILE :src_file_name INTO TABLE {table_name_org} @@ -37,6 +37,14 @@ class VjskDataLoadManager: """ result = db.execute(sql, {"src_file_name": src_file_name}) logger.debug(sql) + # MEMO : sqlalchemy(engine=pymysql)としたときの result.context.cursor は、engineに依存してクラスが異なる + # https://nds-tyo.backlog.com/view/NEWDWH2021-1006#comment-266127218 + if result.context.cursor._result.warning_count > 0: + result_w = db.execute("SHOW WARNINGS;") + for row in result_w.fetchall(): + logger.info(f"SHOW WARNINGS : {row}") + raise Exception("LOAD文実行時にWARNINGが発生しました。") + logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') # org→srcにinsert select diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index a648724f..a5cfd370 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -1208,7 +1208,7 @@ class TestImportFileToDb: VjskDataLoadManager.load(target_dict) # 検証 - assert str(e.value).startswith("SQL Error:") > 0 + assert str(e.value).startswith("LOAD文実行時にWARNINGが発生しました。") > 0 # teardown From 4bde68a27c631c4e7c16e1fd4bdc98193c000504 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Wed, 21 Jun 2023 18:11:32 +0900 Subject: [PATCH 56/86] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../laundering/sales_results_laundering.py | 37 ++--- .../src05/sales_lau_upsert.sql | 131 +++++------------- .../src05/whs_org_laundering.sql | 1 - 3 files changed, 56 insertions(+), 113 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 4fe4126a..04d0dcaa 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -4,7 +4,7 @@ from src.logging.get_logger import get_logger from src.batch.batch_functions import logging_sql from src.system_var import environment -logger = get_logger('卸実績洗替') +logger = get_logger('卸卸販売洗替') def exec(): @@ -48,7 +48,7 @@ def _call_sales_lau_upsert(db: Database): db.execute(f""" CALL src05.sales_lau_upsert( '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', - (src05.get_syor_date() - {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD}), + (src05.get_syor_date() - interval {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD} day), src05.get_syor_date() ) """) @@ -70,27 +70,28 @@ def _call_whs_org_laundering(db: Database): def _update_sales_lau_from_vop_hco_merge_v(db: Database): # HCO施設コードの洗替 - if _count_vop_hco_merge_v(db) >= 1: - _call_v_inst_merge_laundering(db) + if _count_v_inst_merge_t(db) == 0: + logger.info('V施設統合マスタ(洗替処理一時テーブル)にデータは存在しません') return - logger.info('V施設統合マスタにデータは存在しません') + + _call_v_inst_merge_laundering(db) return -def _count_vop_hco_merge_v(db: Database) -> int: - # V施設統合マスタのデータ件数の取得 +def _count_v_inst_merge_t(db: Database) -> int: + # V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 try: sql = """ SELECT COUNT(v_inst_cd) AS cnt FROM - src05.vop_hco_merge_v + internal05.v_inst_merge_t """ result = db.execute_select(sql) logging_sql(logger, sql) - logger.info('V施設統合マスタのデータ件数の取得 成功') + logger.info('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功') except Exception as e: - logger.debug('V施設統合マスタのデータ件数の取得 失敗') + logger.debug('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗') raise e return result[0]['cnt'] @@ -128,27 +129,27 @@ def _call_hco_to_mdb_laundering(db: Database): def _update_sales_lau_from_dcf_inst_merge(db: Database): # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする - if _count_dcf_inst_merge(db) >= 1: - _call_inst_merge_laundering(db) + if _count_inst_merge_t(db) == 0: + logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)にデータは存在しません') return - logger.info('DCF施設統合マスタにデータは存在しません') + _call_inst_merge_laundering(db) return -def _count_dcf_inst_merge(db: Database) -> int: - # DCF施設統合マスタのデータ件数の取得 +def _count_inst_merge_t(db: Database) -> int: + # アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 try: sql = """ SELECT COUNT(dcf_inst_cd) AS cnt FROM - src05.dcf_inst_merge + internal05.inst_merge_t """ result = db.execute_select(sql) logging_sql(logger, sql) - logger.info('DCF施設統合マスタのデータ件数の取得 成功') + logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功') except Exception as e: - logger.debug('DCF施設統合マスタのデータ件数の取得 失敗') + logger.debug('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗') raise e return result[0]['cnt'] diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index ecb2b671..182c3b3a 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -137,7 +137,10 @@ BEGIN '卸販売実績テーブル(洗替後)作成⑤ 開始' ); - SET @insert_sales_laundering = " + SET @extract_from_datetime = CAST(extract_from_date AS DATETIME); + SET @extract_to_datetime = CAST(extract_to_date AS DATETIME); + + SET @upsert_sales_launderning = " INSERT INTO $$target_table$$ ( rec_whs_cd, @@ -256,7 +259,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.nonyu_qty * -1) + -s.nonyu_qty ELSE s.nonyu_qty END AS nonyu_qty, @@ -265,7 +268,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.nonyu_amt * -1) + -s.nonyu_amt ELSE s.nonyu_amt END AS nonyu_amt, @@ -274,7 +277,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.shikiri_amt * -1) + -s.shikiri_amt ELSE s.shikiri_amt END AS shikiri_amt, @@ -283,7 +286,7 @@ BEGIN WHEN (LEFT(s.v_tran_cd,1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) THEN - (s.nhi_amt * -1) + -s.nhi_amt ELSE s.nhi_amt END AS nhi_amt, @@ -307,7 +310,7 @@ BEGIN ci.prefc_cd ELSE fmvt.prft_cd - END AS prft_cd, + END AS pref_cd, s.whspos_err_kbn, s.htdnymd_err_kbn, s.prd_exis_kbn, @@ -324,10 +327,11 @@ BEGIN SYSDATE() FROM ( SELECT - CAST(SYSDATE() AS DATE) AS today + ? AS extract_from_datetime, + ? AS extract_to_datetime ) AS sub INNER JOIN src05.sales AS s - ON sub.today = CAST(s.dwh_upd_dt AS DATE) + ON s.dwh_upd_dt BETWEEN sub.extract_from_datetime AND sub.extract_to_datetime LEFT OUTER JOIN src05.phm_prd_mst_v AS ppmv ON s.comm_cd = ppmv.prd_cd AND STR_TO_DATE(s.hsdn_ymd,'%Y%m%d') BETWEEN ppmv.start_date AND ppmv.end_date @@ -341,21 +345,15 @@ BEGIN LEFT OUTER JOIN src05.com_inst AS ci ON s.v_inst_cd = ci.dcf_dsf_inst_cd WHERE - (? <= s.dwh_upd_dt AND s.dwh_upd_dt <= ?) - AND ( - (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') - OR ( - s.rec_sts_kbn = '0' - AND s.err_flg20 != 'M' - AND s.v_tran_cd IN (110, 120, 210, 220) - AND ( - ( - s.fcl_exec_kbn NOT IN ('2', '5') - AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn <> 1) - ) - OR s.fcl_exec_kbn IS NULL - ) - ) + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + OR ( + s.rec_sts_kbn = '0' + AND s.err_flg20 != 'M' + AND s.v_tran_cd IN (110, 120, 210, 220) + AND ( + (s.fcl_exec_kbn NOT IN ('2', '5') AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn != 1)) + OR s.fcl_exec_kbn IS NULL + ) ) ON DUPLICATE KEY UPDATE @@ -399,78 +397,20 @@ BEGIN address = s.v_inst_addr, comm_cd = s.comm_cd, comm_name = s.comm_name, - nonyu_qty = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_qty * -1) - ELSE - s.nonyu_qty - END - ), + nonyu_qty = VALUES(nonyu_qty), nonyu_price = s.nonyu_price, - nonyu_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nonyu_amt * -1) - ELSE - s.nonyu_amt - END - ), + nonyu_amt = VALUES(nonyu_amt), shikiri_price = s.shikiri_price, - shikiri_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.shikiri_amt * -1) - ELSE - s.shikiri_amt - END - ), + shikiri_amt = VALUES(shikiri_amt), nhi_price = s.nhi_price, - nhi_amt = ( - CASE - WHEN - (LEFT(s.v_tran_cd, 1) = 2 AND (s.err_flg20 IS NULL OR s.err_flg20 != 'M')) - THEN - (s.nhi_amt * -1) - ELSE - s.nhi_amt - END - ), + nhi_amt = VALUES(nhi_amt), v_inst_cd = s.v_inst_cd, - inst_clas_cd = ( - CASE - WHEN - (fmvt.fcl_type = 'A1' OR fmvt.fcl_type = 'A0') - THEN - '3' - WHEN - fmvt.fcl_type BETWEEN '20' AND '29' - THEN - '2' - ELSE - '1' - END - ), + inst_clas_cd = VALUES(inst_clas_cd), bu_cd = bpnct.bu_cd, item_cd = ppmv.mkr_cd, item_name = ppmv.mkr_inf_1, item_english_name = ppmv.mkr_inf_2, - pref_cd = ( - CASE - WHEN - s.v_inst_cd LIKE '00%' - THEN - ci.prefc_cd - ELSE - fmvt.prft_cd - END - ), + pref_cd = VALUES(pref_cd), whspos_err_kbn = s.whspos_err_kbn, htdnymd_err_kbn = s.htdnymd_err_kbn, prd_exis_kbn = s.prd_exis_kbn, @@ -486,9 +426,9 @@ BEGIN ins_usr = s.ins_usr, dwh_upd_dt = SYSDATE() "; - SET @insert_sales_laundering = REPLACE(@insert_sales_laundering, "$$target_table$$", target_table); - PREPARE insert_sales_laundering_stmt from @insert_sales_laundering; - EXECUTE insert_sales_laundering_stmt USING @extract_from_date, @extract_to_date; + SET @upsert_sales_launderning = REPLACE(@upsert_sales_launderning, "$$target_table$$", target_table); + PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; + EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 終了' @@ -499,8 +439,11 @@ BEGIN ); SET @update_institution_code = " - UPDATE - ( SELECT CAST(SYSDATE() AS DATE) AS today ) AS sub, + UPDATE ( + SELECT + ? AS extract_from_datetime, + ? AS extract_to_datetime + ) AS sub, $$target_table$$ AS tt, src05.sales AS s SET @@ -515,13 +458,13 @@ BEGIN END ) WHERE - sub.today = CAST(s.dwh_upd_dt AS DATE) + s.dwh_upd_dt BETWEEN sub.extract_from_datetime AND sub.extract_to_datetime AND tt.slip_mgt_num = s.slip_mgt_num AND tt.row_num = s.row_num "; SET @update_institution_code = REPLACE(@update_institution_code, "$$target_table$$", target_table); PREPARE update_institution_code_stmt from @update_institution_code; - EXECUTE update_institution_code_stmt; + EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 終了' diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index 4b5835ee..65dc9e30 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -67,7 +67,6 @@ BEGIN FROM src05.mkr_org_horizon_v AS mohv INNER JOIN ( SELECT - count(1) AS c, v_cd_2, MAX(dwh_upd_dt) AS dwh_upd_dt_latest FROM From 3b2362442d130853af51cbda8c1d5cf876262310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 22 Jun 2023 10:34:14 +0900 Subject: [PATCH 57/86] =?UTF-8?q?feat:=20=E5=8D=98=E4=BD=93=E8=A9=A6?= =?UTF-8?q?=E9=A8=93NG=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/laundering/sales_results_laundering.py | 2 +- rds_mysql/stored_procedure/src05/sales_lau_upsert.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 04d0dcaa..530fd9b9 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -141,7 +141,7 @@ def _count_inst_merge_t(db: Database) -> int: try: sql = """ SELECT - COUNT(dcf_inst_cd) AS cnt + COUNT(dcf_dsf_inst_cd) AS cnt FROM internal05.inst_merge_t """ diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 182c3b3a..63049f09 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -138,7 +138,7 @@ BEGIN ); SET @extract_from_datetime = CAST(extract_from_date AS DATETIME); - SET @extract_to_datetime = CAST(extract_to_date AS DATETIME); + SET @extract_to_datetime = ADDTIME(CAST(extract_to_date AS DATETIME), '23:59:59'); SET @upsert_sales_launderning = " INSERT INTO From d285a28c9636d3dbee1d81620ad2301a7b7bf412 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 23 Jun 2023 20:03:37 +0900 Subject: [PATCH 58/86] =?UTF-8?q?LOAD=E3=81=AE=E5=AE=9F=E8=A1=8C=E7=B5=90?= =?UTF-8?q?=E6=9E=9C=E3=81=AE=E3=83=AF=E3=83=BC=E3=83=8B=E3=83=B3=E3=82=B0?= =?UTF-8?q?=E3=82=92=E3=82=AD=E3=83=A3=E3=83=83=E3=83=81=E3=81=97=E3=81=A6?= =?UTF-8?q?=E4=BE=8B=E5=A4=96=E3=81=A7=E8=90=BD=E3=81=A8=E3=81=99=E5=87=A6?= =?UTF-8?q?=E7=90=86=E3=82=92=E5=AE=9F=E8=A3=85?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 17 +++++++++-------- .../batch/vjsk/vjsk_load/test_vjsk_load.py | 1 + 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 61f287c5..f869b983 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -35,17 +35,18 @@ class VjskDataLoadManager: ENCLOSED BY '\"' IGNORE 1 LINES; """ + db.begin() result = db.execute(sql, {"src_file_name": src_file_name}) - logger.debug(sql) - # MEMO : sqlalchemy(engine=pymysql)としたときの result.context.cursor は、engineに依存してクラスが異なる - # https://nds-tyo.backlog.com/view/NEWDWH2021-1006#comment-266127218 - if result.context.cursor._result.warning_count > 0: - result_w = db.execute("SHOW WARNINGS;") - for row in result_w.fetchall(): - logger.info(f"SHOW WARNINGS : {row}") + result_w = db.execute("SHOW WARNINGS;") + has_mysql_warnings = False + for row in result_w.fetchall(): + has_mysql_warnings = True + logger.info(f"SHOW WARNINGS : {row}") + if has_mysql_warnings: raise Exception("LOAD文実行時にWARNINGが発生しました。") - + logger.debug(sql) logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') + db.commit() # org→srcにinsert select db.begin() diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py index a5cfd370..2dbe2ef5 100644 --- a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -35,6 +35,7 @@ class TestImportFileToDb: self.db = database self.db.connect() + self.db.execute("set sql_mode = 'TRADITIONAL';") # self.db.begin() # testing From 8534e6d66bcd53ced315251ceec3a198384ded4a Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 26 Jun 2023 13:38:23 +0900 Subject: [PATCH 59/86] =?UTF-8?q?feat:=20=E3=83=87=E3=83=BC=E3=82=BF?= =?UTF-8?q?=E3=83=99=E3=83=BC=E3=82=B9=E6=93=8D=E4=BD=9C=E9=83=A8=E5=93=81?= =?UTF-8?q?=E3=82=92=E4=BF=AE=E6=AD=A3=E3=80=82=E3=83=91=E3=83=A9=E3=83=A1?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E6=8C=87=E5=AE=9A=E3=81=AB=E3=82=88=E3=82=8A?= =?UTF-8?q?=E3=80=81AUTOCOMMIT=E3=81=A7=E6=8E=A5=E7=B6=9A=E3=81=99?= =?UTF-8?q?=E3=82=8B=E3=81=8B=E3=81=A9=E3=81=86=E3=81=8B=E3=82=92=E5=88=86?= =?UTF-8?q?=E5=B2=90=E3=81=99=E3=82=8B=E3=82=88=E3=81=86=E3=81=AB=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../laundering/sales_results_laundering.py | 4 +-- ecs/jskult-batch-daily/src/db/database.py | 26 ++++++++++++++----- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 530fd9b9..87958e5c 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -1,14 +1,14 @@ +from src.batch.batch_functions import logging_sql from src.db.database import Database from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger -from src.batch.batch_functions import logging_sql from src.system_var import environment logger = get_logger('卸卸販売洗替') def exec(): - db = Database.get_instance() + db = Database.get_instance(autocommit=True) try: db.connect() logger.debug('処理開始') diff --git a/ecs/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-daily/src/db/database.py index f67a21b9..b9a745be 100644 --- a/ecs/jskult-batch-daily/src/db/database.py +++ b/ecs/jskult-batch-daily/src/db/database.py @@ -13,15 +13,17 @@ logger = get_logger(__name__) class Database: """データベース操作クラス""" __connection: Connection = None - __engine: Engine = None + __transactional_engine: Engine = None + __autocommit_engine: Engine = None __host: str = None __port: str = None __username: str = None __password: str = None __schema: str = None + __autocommit: bool = None __connection_string: str = None - def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None: + def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: """このクラスの新たなインスタンスを初期化します Args: @@ -30,12 +32,14 @@ class Database: host (str): DBホスト名 port (int): DBポート schema (str): DBスキーマ名 + autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. """ self.__username = username self.__password = password self.__host = host self.__port = int(port) self.__schema = schema + self.__autocommit = autocommit self.__connection_string = URL.create( drivername='mysql+pymysql', @@ -47,16 +51,20 @@ class Database: query={"charset": "utf8mb4"} ) - self.__engine = create_engine( + self.__transactional_engine = create_engine( self.__connection_string, pool_timeout=5, poolclass=QueuePool ) + self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + @classmethod - def get_instance(cls): + def get_instance(cls, autocommit=False): """インスタンスを取得します + Args: + autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. Returns: Database: DB操作クラスインスタンス """ @@ -65,7 +73,8 @@ class Database: password=environment.DB_PASSWORD, host=environment.DB_HOST, port=environment.DB_PORT, - schema=environment.DB_SCHEMA + schema=environment.DB_SCHEMA, + autocommit=autocommit ) @retry( @@ -77,12 +86,15 @@ class Database: stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT)) def connect(self): """ - DBに接続します。接続に失敗した場合、リトライします。 + DBに接続します。接続に失敗した場合、リトライします。\n + インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) Raises: DBException: 接続失敗 """ try: - self.__connection = self.__engine.connect() + self.__connection = ( + self.__autocommit_engine.connect() if self.__autocommit is True + else self.__transactional_engine.connect()) except Exception as e: raise DBException(e) From 00175d26f7213d912aadecd164898de913736633 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Mon, 26 Jun 2023 14:20:17 +0900 Subject: [PATCH 60/86] =?UTF-8?q?feat:=20=E3=83=AC=E3=83=93=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/laundering/sales_results_laundering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py index 530fd9b9..05086e4a 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -4,7 +4,7 @@ from src.logging.get_logger import get_logger from src.batch.batch_functions import logging_sql from src.system_var import environment -logger = get_logger('卸卸販売洗替') +logger = get_logger('卸販売洗替') def exec(): From 07d9f3785353ef33b1e118d9424878df59d45ce7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 27 Jun 2023 09:33:00 +0900 Subject: [PATCH 61/86] =?UTF-8?q?feat:=20=E4=B8=8D=E8=A6=81=E3=81=AA?= =?UTF-8?q?=E3=82=B9=E3=83=9A=E3=83=BC=E3=82=B9=E7=AD=89=E3=81=AE=E5=89=8A?= =?UTF-8?q?=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../stored_procedure/src05/sales_lau_upsert.sql | 16 ++++++++-------- .../src05/v_inst_merge_laundering.sql | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 63049f09..5f26b9c8 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -41,7 +41,7 @@ BEGIN internal05.bu_prd_name_contrast_t ( prd_cd, bu_cd, - phm_itm_cd, + phm_itm_cd, pp_start_date, pp_end_date, update_date, @@ -344,14 +344,14 @@ BEGIN AND STR_TO_DATE(s.hsdn_ymd, '%Y%m%d') BETWEEN bpnct.bp_start_date AND bpnct.bp_end_date LEFT OUTER JOIN src05.com_inst AS ci ON s.v_inst_cd = ci.dcf_dsf_inst_cd - WHERE - (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') + WHERE + (s.rec_sts_kbn = '0' AND s.err_flg20 = 'M') OR ( s.rec_sts_kbn = '0' AND s.err_flg20 != 'M' AND s.v_tran_cd IN (110, 120, 210, 220) AND ( - (s.fcl_exec_kbn NOT IN ('2', '5') AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn != 1)) + (s.fcl_exec_kbn NOT IN ('2', '5') AND (s.fcl_exec_kbn != '6' OR ppmv.prd_sale_kbn != 1)) OR s.fcl_exec_kbn IS NULL ) ) @@ -427,8 +427,8 @@ BEGIN dwh_upd_dt = SYSDATE() "; SET @upsert_sales_launderning = REPLACE(@upsert_sales_launderning, "$$target_table$$", target_table); - PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; - EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; + PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; + EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 終了' @@ -463,8 +463,8 @@ BEGIN AND tt.row_num = s.row_num "; SET @update_institution_code = REPLACE(@update_institution_code, "$$target_table$$", target_table); - PREPARE update_institution_code_stmt from @update_institution_code; - EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; + PREPARE update_institution_code_stmt from @update_institution_code; + EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 終了' diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 35c4a700..7c1dee91 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -63,10 +63,10 @@ BEGIN WHERE tt.v_inst_cd = vimt.v_inst_cd AND (tt.inst_clas_cd IN ('1', '2', '3')) - "; - SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); - PREPARE update_institution_stmt from @update_institution; - EXECUTE update_institution_stmt; + "; + SET @update_institution = REPLACE(@update_institution, "$$target_table$$", target_table); + PREPARE update_institution_stmt from @update_institution; + EXECUTE update_institution_stmt; call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 終了' From 6239ef783a21edde213c4c753f8f0fcd40c144b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Tue, 27 Jun 2023 14:02:47 +0900 Subject: [PATCH 62/86] =?UTF-8?q?feat:=20=E4=BE=8B=E5=A4=96=E5=87=A6?= =?UTF-8?q?=E7=90=86=E3=81=AE=E3=83=A1=E3=83=83=E3=82=BB=E3=83=BC=E3=82=B8?= =?UTF-8?q?=E3=82=AA=E3=83=90=E3=83=BC=E3=83=95=E3=83=AD=E3=83=BC=E3=81=AE?= =?UTF-8?q?=E5=AF=BE=E5=87=A6=E7=AD=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src05/hco_to_mdb_laundering.sql | 20 ++++++++---- .../src05/inst_merge_laundering.sql | 12 +++++-- .../src05/sales_lau_delete.sql | 12 +++++-- .../src05/sales_lau_upsert.sql | 32 +++++++++++-------- .../src05/v_inst_merge_laundering.sql | 12 +++++-- .../src05/whs_org_laundering.sql | 20 ++++++++---- 6 files changed, 72 insertions(+), 36 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql index b3cbfc6e..8201f3bc 100644 --- a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -14,23 +14,29 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'hco_to_mdb_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A① 開始'); TRUNCATE TABLE internal05.hco_cnv_mdb_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A① 終了'); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A② 開始'); INSERT INTO @@ -74,10 +80,10 @@ BEGIN AND ci.delete_flg = '0' ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A② 終了'); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A③ 開始'); SET @update_institution = " @@ -96,7 +102,7 @@ BEGIN PREPARE update_institution_stmt from @update_institution; EXECUTE update_institution_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_A③ 終了'); END diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql index 6a0642a9..05908ca1 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -14,15 +14,21 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_B① 開始'); SET @update_institution = " @@ -53,7 +59,7 @@ BEGIN PREPARE update_institution_stmt from @update_institution; EXECUTE update_institution_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】4:メルク施設コードの洗替_B① 終了'); END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql index c1610435..8b312d6f 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -14,15 +14,21 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'sales_lau_deleteでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 開始'); SET @delete_data = " @@ -35,7 +41,7 @@ BEGIN PREPARE delete_data_stmt from @delete_data; EXECUTE delete_data_stmt USING @laundering_period_year; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); END \ No newline at end of file diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 5f26b9c8..72a86310 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -15,25 +15,31 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'sales_lau_upsertでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成① 開始' ); TRUNCATE TABLE internal05.bu_prd_name_contrast_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成① 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 開始' ); @@ -65,22 +71,22 @@ BEGIN ppmv.rec_sts_kbn != '9' ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成② 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成③ 開始' ); TRUNCATE TABLE internal05.fcl_mst_v_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成③ 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成④ 開始' ); @@ -129,11 +135,11 @@ BEGIN fmv1.rec_sts_kbn != '9' ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成④ 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 開始' ); @@ -430,11 +436,11 @@ BEGIN PREPARE upsert_sales_launderning_stmt from @upsert_sales_launderning; EXECUTE upsert_sales_launderning_stmt USING @extract_from_datetime, @extract_to_datetime; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑤ 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 開始' ); @@ -466,7 +472,7 @@ BEGIN PREPARE update_institution_code_stmt from @update_institution_code; EXECUTE update_institution_code_stmt USING @extract_from_datetime, @extract_to_datetime; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)作成⑥ 終了' ); diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 7c1dee91..06af3867 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -14,15 +14,21 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'v_inst_merge_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 開始' ); @@ -68,7 +74,7 @@ BEGIN PREPARE update_institution_stmt from @update_institution; EXECUTE update_institution_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】3:HCO施設コードの洗替① 終了' ); diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index 65dc9e30..e183d0c8 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -14,25 +14,31 @@ BEGIN BEGIN GET DIAGNOSTICS CONDITION 1 @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, 'whs_org_launderingでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + ELSE @error_msg + END + ); SIGNAL SQLSTATE '45000' SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; END; SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替① 開始' ); TRUNCATE TABLE internal05.whs_customer_org_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替① 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替② 開始' ); @@ -91,11 +97,11 @@ BEGIN AND src05.get_syor_date() BETWEEN wcmv.start_date AND wcmv.end_date ; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替② 終了' ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替③ 開始' ); @@ -115,7 +121,7 @@ BEGIN PREPARE update_organization_stmt from @update_organization; EXECUTE update_organization_stmt; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '【洗替】1.卸組織洗替③ 終了' ); From a767457749dc789415efcab987131f43ed434415 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 23 Jun 2023 20:15:35 +0900 Subject: [PATCH 63/86] =?UTF-8?q?LOAD=E5=AE=9F=E8=A1=8C=E7=B5=90=E6=9E=9C?= =?UTF-8?q?=E3=80=81=E3=83=AF=E3=83=BC=E3=83=8B=E3=83=B3=E3=82=B01261?= =?UTF-8?q?=E3=81=AF=E8=A8=B1=E5=AE=B9=E3=81=99=E3=82=8B=E3=81=A8=E3=81=97?= =?UTF-8?q?=E3=81=A6=E4=BE=8B=E5=A4=96=E5=88=A4=E5=AE=9A=E3=81=8B=E3=82=89?= =?UTF-8?q?=E9=99=A4=E5=A4=96=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index f869b983..fa82805d 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -37,14 +37,18 @@ class VjskDataLoadManager: """ db.begin() result = db.execute(sql, {"src_file_name": src_file_name}) + logger.debug(sql) result_w = db.execute("SHOW WARNINGS;") has_mysql_warnings = False for row in result_w.fetchall(): + # 例外スロー対象から除外 : Warning(1261) Row {ROW NUMBER} doesn't contain data for all columns + if len(row) >= 2 and row[0] == "Warning" and row[1] == 1261: + logger.info(f"SHOW WARNINGS (SKIP) : {row}") + continue has_mysql_warnings = True logger.info(f"SHOW WARNINGS : {row}") if has_mysql_warnings: raise Exception("LOAD文実行時にWARNINGが発生しました。") - logger.debug(sql) logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') db.commit() From 2d56d8242013621b95dc6f86484e313bf42370a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Thu, 29 Jun 2023 10:53:04 +0900 Subject: [PATCH 64/86] =?UTF-8?q?feat:=20=E4=B8=8D=E5=85=B7=E5=90=88?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C=E3=80=81=E5=BC=95=E6=95=B0=E5=87=BA=E5=8A=9B?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C=E3=80=81=E4=BE=8B=E5=A4=96=E5=87=A6=E7=90=86?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../stored_procedure/src05/hco_to_mdb_laundering.sql | 4 ++-- .../stored_procedure/src05/inst_merge_laundering.sql | 4 ++-- rds_mysql/stored_procedure/src05/sales_lau_delete.sql | 8 +++++--- rds_mysql/stored_procedure/src05/sales_lau_upsert.sql | 9 +++++---- .../stored_procedure/src05/v_inst_merge_laundering.sql | 6 ++---- rds_mysql/stored_procedure/src05/whs_org_laundering.sql | 4 ++-- 6 files changed, 18 insertions(+), 17 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql index 8201f3bc..eacd56e9 100644 --- a/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql +++ b/rds_mysql/stored_procedure/src05/hco_to_mdb_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'hco_to_mdb_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'hco_to_mdb_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); diff --git a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql index 05908ca1..39f6e431 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'inst_merge_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); diff --git a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql index 8b312d6f..39e8f065 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_delete.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_delete.sql @@ -7,7 +7,8 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_delete'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table, + 'laundering_period_year', laundering_period_year); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +19,7 @@ BEGIN 'sales_lau_deleteでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); @@ -39,7 +40,8 @@ BEGIN "; SET @delete_data = REPLACE(@delete_data, "$$target_table$$", target_table); PREPARE delete_data_stmt from @delete_data; - EXECUTE delete_data_stmt USING @laundering_period_year; + SET @interval_year = laundering_period_year; + EXECUTE delete_data_stmt USING @interval_year; CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, '卸販売実績テーブル(洗替後)過去5年以前のデータ削除① 終了'); diff --git a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql index 72a86310..56758f18 100644 --- a/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql +++ b/rds_mysql/stored_procedure/src05/sales_lau_upsert.sql @@ -1,6 +1,6 @@ -- A5M2で実行時に[SQL] - [スラッシュ(/)のみの行でSQLを区切る]に変えてから実行する -CREATE PROCEDURE src05.sales_lau_upsert(target_table VARCHAR(255), extract_from_date date, - extract_to_date date) +CREATE PROCEDURE src05.sales_lau_upsert(target_table VARCHAR(255), extract_from_date DATE, + extract_to_date DATE) SQL SECURITY INVOKER BEGIN -- スキーマ名 @@ -8,7 +8,8 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'sales_lau_upsert'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table, 'extract_from_date', + extract_from_date, 'extract_to_date', extract_to_date); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -19,7 +20,7 @@ BEGIN 'sales_lau_upsertでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql index 06af3867..db50980a 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'v_inst_merge_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); @@ -43,8 +43,6 @@ BEGIN prft_cd FROM internal05.v_inst_merge_t - WHERE - (fcl_type IN ('A1', 'A0')) OR fcl_type BETWEEN '20' AND '29' ) AS vimt, $$target_table$$ AS tt SET diff --git a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql index e183d0c8..aad11100 100644 --- a/rds_mysql/stored_procedure/src05/whs_org_laundering.sql +++ b/rds_mysql/stored_procedure/src05/whs_org_laundering.sql @@ -7,7 +7,7 @@ BEGIN -- プロシージャ名 DECLARE procedure_name VARCHAR(100) DEFAULT 'whs_org_laundering'; -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + DECLARE procedure_args JSON DEFAULT JSON_OBJECT('target_table', target_table); -- 例外処理 DECLARE EXIT HANDLER FOR SQLEXCEPTION @@ -18,7 +18,7 @@ BEGIN 'whs_org_launderingでエラーが発生', @error_state, @error_msg); SET @error_msg = ( CASE - WHEN LENGTH(@error_msg) > 127 THEN CONCAT(SUBSTRING(@error_msg, 1, 124), '...') + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') ELSE @error_msg END ); From 901e6e9cd9d238675da904e7fc069e20da0ae4f9 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 14:56:40 +0900 Subject: [PATCH 65/86] =?UTF-8?q?feat:=20=E3=83=A1=E3=83=8B=E3=83=A5?= =?UTF-8?q?=E3=83=BC=E7=94=BB=E9=9D=A2=E3=81=AEdump=E5=8F=96=E5=BE=97?= =?UTF-8?q?=E7=8A=B6=E6=85=8B=E5=8C=BA=E5=88=86=E3=81=AB=E3=82=88=E3=82=8B?= =?UTF-8?q?=E5=88=B6=E5=BE=A1=E3=82=92=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/menu.py | 2 ++ ecs/jskult-webapp/src/model/db/hdke_tbl.py | 1 + ecs/jskult-webapp/src/model/view/menu_view_model.py | 7 ++++++- ecs/jskult-webapp/src/repositories/hdke_tbl_repository.py | 2 +- ecs/jskult-webapp/src/static/css/menuStyle.css | 2 +- ecs/jskult-webapp/src/system_var/constants.py | 3 +++ ecs/jskult-webapp/src/templates/menu.html | 8 +++++--- 7 files changed, 19 insertions(+), 6 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/menu.py b/ecs/jskult-webapp/src/controller/menu.py index 61a1a3a0..96826fce 100644 --- a/ecs/jskult-webapp/src/controller/menu.py +++ b/ecs/jskult-webapp/src/controller/menu.py @@ -32,6 +32,7 @@ def menu_view( hdke_tbl_record = batch_status_service.hdke_table_record batch_status = hdke_tbl_record.bch_actf + dump_status = hdke_tbl_record.dump_sts_kbn user = UserViewModel( doc_flg=session.doc_flg, inst_flg=session.inst_flg, @@ -40,6 +41,7 @@ def menu_view( ) menu = MenuViewModel( batch_status=batch_status, + dump_status=dump_status, user_model=user ) # セッション書き換え diff --git a/ecs/jskult-webapp/src/model/db/hdke_tbl.py b/ecs/jskult-webapp/src/model/db/hdke_tbl.py index 944581d5..9655c6c1 100644 --- a/ecs/jskult-webapp/src/model/db/hdke_tbl.py +++ b/ecs/jskult-webapp/src/model/db/hdke_tbl.py @@ -5,3 +5,4 @@ from src.model.db.base_db_model import BaseDBModel class HdkeTblModel(BaseDBModel): bch_actf: Optional[str] + dump_sts_kbn: Optional[str] diff --git a/ecs/jskult-webapp/src/model/view/menu_view_model.py b/ecs/jskult-webapp/src/model/view/menu_view_model.py index 647bdec9..7a7970d2 100644 --- a/ecs/jskult-webapp/src/model/view/menu_view_model.py +++ b/ecs/jskult-webapp/src/model/view/menu_view_model.py @@ -3,15 +3,20 @@ from typing import Optional from pydantic import BaseModel from src.model.view.user_view_model import UserViewModel +from src.system_var import constants class MenuViewModel(BaseModel): subtitle: str = 'MeDaCA 機能メニュー' batch_status: Optional[str] + dump_status: Optional[str] user_model: UserViewModel def is_batch_processing(self): - return self.batch_status == '1' + return self.batch_status == constants.BATCH_STATUS_PROCESSING + + def is_backup_processing(self): + return self.dump_status != constants.DUMP_STATUS_UNPROCESSED def is_available_ult_doctor_menu(self): return self.user_model.has_ult_doctor_permission() diff --git a/ecs/jskult-webapp/src/repositories/hdke_tbl_repository.py b/ecs/jskult-webapp/src/repositories/hdke_tbl_repository.py index 46f5bfcc..967fbbe1 100644 --- a/ecs/jskult-webapp/src/repositories/hdke_tbl_repository.py +++ b/ecs/jskult-webapp/src/repositories/hdke_tbl_repository.py @@ -6,7 +6,7 @@ logger = get_logger('日付テーブル取得') class HdkeTblRepository(BaseRepository): - FETCH_SQL = "SELECT bch_actf FROM src05.hdke_tbl" + FETCH_SQL = "SELECT bch_actf, dump_sts_kbn FROM src05.hdke_tbl" def fetch_all(self) -> list[HdkeTblModel]: try: diff --git a/ecs/jskult-webapp/src/static/css/menuStyle.css b/ecs/jskult-webapp/src/static/css/menuStyle.css index b1920070..3a07d9fc 100644 --- a/ecs/jskult-webapp/src/static/css/menuStyle.css +++ b/ecs/jskult-webapp/src/static/css/menuStyle.css @@ -37,7 +37,7 @@ body{ font-size: 160%; } -.notUseBioMsg{ +.notUseBioMsg,.notUseMainteMsg{ font-size: 143%; color: red; } diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index 899c19e5..b7ffa0e2 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -1,5 +1,8 @@ import os.path as path +BATCH_STATUS_PROCESSING = '1' +DUMP_STATUS_UNPROCESSED = '0' + BIO_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data') BIO_EXCEL_TEMPLATE_FILE_PATH = path.join(BIO_TEMPORARY_FILE_DIR_PATH, 'BioData_template.xlsx') diff --git a/ecs/jskult-webapp/src/templates/menu.html b/ecs/jskult-webapp/src/templates/menu.html index b8026593..bbfca152 100644 --- a/ecs/jskult-webapp/src/templates/menu.html +++ b/ecs/jskult-webapp/src/templates/menu.html @@ -24,10 +24,12 @@ {% endif %} {% endif %} {% if menu.is_available_master_maintenance_menu() %} - {% if not menu.is_batch_processing() %} - マスターメンテメニュー

+ {% if menu.is_batch_processing() %} +
マスターメンテメニューは
日次バッチ処理中のため利用出来ません
+ {% elif menu.is_backup_processing() %} +
バックアップ取得を開始しました。
日次バッチ更新が終了するまでマスターメンテメニューは利用できません
{% else %} -
マスターメンテメニューは
日次バッチ処理中のため利用出来ません
+ マスターメンテメニュー

{% endif %} {% endif %}

Logout From dbe7b3e007f4f89ff962e2e9871029cdd61fab0a Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 15:34:33 +0900 Subject: [PATCH 66/86] =?UTF-8?q?feat:=20=E3=83=9E=E3=82=B9=E3=82=BF?= =?UTF-8?q?=E3=83=A1=E3=83=B3=E3=83=86=E3=83=A1=E3=83=8B=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/controller/master_mainte.py | 60 +++++++++++++++++++ ecs/jskult-webapp/src/main.py | 4 +- .../view/master_mainte_menu_view_model.py | 5 ++ .../src/services/batch_status_service.py | 16 ++++- ecs/jskult-webapp/src/system_var/constants.py | 4 ++ .../src/templates/masterMainteMenu.html | 25 ++++++++ ecs/jskult-webapp/src/templates/menu.html | 2 +- 7 files changed, 113 insertions(+), 3 deletions(-) create mode 100644 ecs/jskult-webapp/src/controller/master_mainte.py create mode 100644 ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py create mode 100644 ecs/jskult-webapp/src/templates/masterMainteMenu.html diff --git a/ecs/jskult-webapp/src/controller/master_mainte.py b/ecs/jskult-webapp/src/controller/master_mainte.py new file mode 100644 index 00000000..ad5eb3e1 --- /dev/null +++ b/ecs/jskult-webapp/src/controller/master_mainte.py @@ -0,0 +1,60 @@ +from fastapi import APIRouter, Depends, HTTPException, Request +from fastapi.responses import HTMLResponse +from starlette import status + +from src.depends.services import get_service +from src.model.internal.session import UserSession +from src.model.view.master_mainte_menu_view_model import \ + MasterMainteMenuViewModel +from src.router.session_router import AuthenticatedRoute +from src.services.batch_status_service import BatchStatusService +from src.services.session_service import set_session +from src.system_var import constants +from src.templates import templates + +router = APIRouter() +router.route_class = AuthenticatedRoute + +######################### +# Views # +######################### + + +@router.get('/masterMainteMenu', response_class=HTMLResponse) +def menu_view( + request: Request, + batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +): + session: UserSession = request.session + + # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる + if session.master_mainte_flg != '1': + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) + + # バッチ処理中の場合、ログアウトさせる + if batch_status_service.is_batch_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, + detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) + # dump処理中の場合、ログアウトさせる + if batch_status_service.is_dump_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) + + # 画面表示用のモデル + menu = MasterMainteMenuViewModel() + # セッション書き換え + session.update( + actions=[ + UserSession.last_access_time.set(UserSession.new_last_access_time()), + UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()), + ] + ) + set_session(session) + templates_response = templates.TemplateResponse( + 'masterMainteMenu.html', + { + 'request': request, + 'menu': menu + }, + headers={'session_key': session.session_key} + ) + return templates_response diff --git a/ecs/jskult-webapp/src/main.py b/ecs/jskult-webapp/src/main.py index 2aca351c..8717c7cb 100644 --- a/ecs/jskult-webapp/src/main.py +++ b/ecs/jskult-webapp/src/main.py @@ -6,7 +6,7 @@ from starlette import status import src.static as static from src.controller import (bio, bio_download, healthcheck, login, logout, - menu, root, ultmarc) + master_mainte, menu, root, ultmarc) from src.controller.sample_send_file import router as sample_router from src.core import tasks from src.error.exception_handler import http_exception_handler @@ -31,6 +31,8 @@ app.include_router(ultmarc.router, prefix='/ultmarc') # 生物由来のダウンロード用APIルーター。 # クライアントから非同期呼出しされるため、共通ルーターとは異なる扱いとする。 app.include_router(bio_download.router, prefix='/bio') +# マスタメンテ +app.include_router(master_mainte.router, prefix='/masterMainte') # ヘルスチェック用のルーター app.include_router(healthcheck.router, prefix='/healthcheck') diff --git a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py new file mode 100644 index 00000000..c8ff644a --- /dev/null +++ b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel + + +class MasterMainteMenuViewModel(BaseModel): + subtitle: str = 'MeDaCA 機能メニュー' diff --git a/ecs/jskult-webapp/src/services/batch_status_service.py b/ecs/jskult-webapp/src/services/batch_status_service.py index c9f6c6a7..ffb57af0 100644 --- a/ecs/jskult-webapp/src/services/batch_status_service.py +++ b/ecs/jskult-webapp/src/services/batch_status_service.py @@ -4,6 +4,7 @@ from src.model.db.hdke_tbl import HdkeTblModel from src.repositories.base_repository import BaseRepository from src.repositories.hdke_tbl_repository import HdkeTblRepository from src.services.base_service import BaseService +from src.system_var import constants class BatchStatusService(BaseService): @@ -25,17 +26,30 @@ class BatchStatusService(BaseService): @property def hdke_table_record(self) -> HdkeTblModel: + """日付テーブルを取得する""" + # 日付マスタのレコードがあることを確認 self.__assert_record_exists() # 日付テーブルのレコードは必ず1件 return self.__hdke_table_record[0] def is_batch_processing(self): + """バッチ処理中かどうかを判定する""" + # 日付マスタのレコードがあることを確認 self.__assert_record_exists() - return self.hdke_table_record.bch_actf == '1' # TODO: 定数化する + return self.hdke_table_record.bch_actf == constants.BATCH_STATUS_PROCESSING + + def is_dump_processing(self): + """dump処理処理中かどうかを判定する""" + + # 日付マスタのレコードがあることを確認 + self.__assert_record_exists() + return self.hdke_table_record.dump_sts_kbn != constants.DUMP_STATUS_UNPROCESSED def __assert_record_exists(self): + """日付テーブルが有ることを保証する""" + # 日付マスタのレコードがない場合は例外とする if len(self.__hdke_table_record) == 0: raise DBException('日付テーブルのレコードが存在しません') diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index b7ffa0e2..604acfe5 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -1,6 +1,8 @@ import os.path as path +# 日付テーブル.バッチ処理ステータス:未処理 BATCH_STATUS_PROCESSING = '1' +# 日付テーブル.dump取得状態区分:未処理 DUMP_STATUS_UNPROCESSED = '0' BIO_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data') @@ -115,6 +117,7 @@ LOGOUT_REASON_DO_LOGOUT = 'do_logout' LOGOUT_REASON_LOGIN_ERROR = 'login_error' LOGOUT_REASON_BATCH_PROCESSING = 'batch_processing' LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE = 'batch_processing_ult' +LOGOUT_REASON_BACKUP_PROCESSING = 'dump_processing' LOGOUT_REASON_NOT_LOGIN = 'not_login' LOGOUT_REASON_SESSION_EXPIRED = 'session_expired' LOGOUT_REASON_DB_ERROR = 'db_error' @@ -125,6 +128,7 @@ LOGOUT_REASON_MESSAGE_MAP = { LOGOUT_REASON_LOGIN_ERROR: '存在しないユーザー、
またはパスワードが違います。', LOGOUT_REASON_BATCH_PROCESSING: '日次バッチ処理中なので、
生物由来データ参照は使用出来ません。', LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE: '日次バッチ処理中のため、
マスタ-メンテは使用出来ません。', + LOGOUT_REASON_BACKUP_PROCESSING: 'バックアップ取得を開始しました。
日次バッチ更新が終了するまでマスターメンテは使用できません', LOGOUT_REASON_NOT_LOGIN: 'Loginしてからページにアクセスしてください。', LOGOUT_REASON_SESSION_EXPIRED: 'セッションが切れています。
再度Loginしてください。', LOGOUT_REASON_DB_ERROR: 'DB接続に失敗しました。
再度Loginするか、
管理者にお問い合わせください。', diff --git a/ecs/jskult-webapp/src/templates/masterMainteMenu.html b/ecs/jskult-webapp/src/templates/masterMainteMenu.html new file mode 100644 index 00000000..147ab2da --- /dev/null +++ b/ecs/jskult-webapp/src/templates/masterMainteMenu.html @@ -0,0 +1,25 @@ + + + + {% with subtitle = menu.subtitle %} + {% include '_header.html' %} + {% endwith %} + + + + + diff --git a/ecs/jskult-webapp/src/templates/menu.html b/ecs/jskult-webapp/src/templates/menu.html index bbfca152..59eb8a74 100644 --- a/ecs/jskult-webapp/src/templates/menu.html +++ b/ecs/jskult-webapp/src/templates/menu.html @@ -29,7 +29,7 @@ {% elif menu.is_backup_processing() %}
バックアップ取得を開始しました。
日次バッチ更新が終了するまでマスターメンテメニューは利用できません
{% else %} - マスターメンテメニュー

+ マスターメンテメニュー

{% endif %} {% endif %}

Logout From af13fa66553ac49d53448a8176b03a7ccbf3e223 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 16:13:04 +0900 Subject: [PATCH 67/86] =?UTF-8?q?fix:=20=E3=83=A1=E3=83=B3=E3=83=86?= =?UTF-8?q?=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=83=AD=E3=82=B0=E3=82=A4?= =?UTF-8?q?=E3=83=B3=E5=A4=B1=E6=95=97=E6=99=82=E3=81=AB=E3=82=A8=E3=83=A9?= =?UTF-8?q?=E3=83=BC=E9=80=9A=E7=9F=A5=E3=81=8C=E5=87=BA=E3=81=A6=E3=81=97?= =?UTF-8?q?=E3=81=BE=E3=81=86=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3=E3=80=82?= =?UTF-8?q?SSO=E3=81=AF=E9=80=9A=E7=9F=A5=E3=81=95=E3=81=9B=E3=82=8B?= =?UTF-8?q?=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 09032af5..6d867645 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -69,10 +69,10 @@ def login( try: jwt_token = login_service.login(request.username, request.password) except NotAuthorizeException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) verified_token = jwt_token.verify_token() @@ -126,7 +126,7 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(e) + logger.exception(f'SSOログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) # トークンからユーザーIDを取得 From cb1db32d2dfc05e8cf1fb57865018a250a38468d Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 16:39:12 +0900 Subject: [PATCH 68/86] =?UTF-8?q?fix:=20=E3=83=A6=E3=83=BC=E3=82=B6?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=81=AB=E3=83=AC=E3=82=B3=E3=83=BC?= =?UTF-8?q?=E3=83=89=E3=81=8C=E5=AD=98=E5=9C=A8=E3=81=97=E3=81=AA=E3=81=84?= =?UTF-8?q?=E5=A0=B4=E5=90=88=E3=81=AB=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AB?= =?UTF-8?q?=E3=81=AA=E3=82=8B=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 6d867645..00e5cb75 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -79,6 +79,10 @@ def login( # 普通の認証だと、`cognito:username`に入る。 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') @@ -132,6 +136,11 @@ def sso_authorize( # トークンからユーザーIDを取得 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') From ccd4ff79a9eb8033abd0aefe59dda2b727a1f9ed Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 16:13:04 +0900 Subject: [PATCH 69/86] =?UTF-8?q?fix:=20=E3=83=A1=E3=83=B3=E3=83=86?= =?UTF-8?q?=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=83=AD=E3=82=B0=E3=82=A4?= =?UTF-8?q?=E3=83=B3=E5=A4=B1=E6=95=97=E6=99=82=E3=81=AB=E3=82=A8=E3=83=A9?= =?UTF-8?q?=E3=83=BC=E9=80=9A=E7=9F=A5=E3=81=8C=E5=87=BA=E3=81=A6=E3=81=97?= =?UTF-8?q?=E3=81=BE=E3=81=86=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3=E3=80=82?= =?UTF-8?q?SSO=E3=81=AF=E9=80=9A=E7=9F=A5=E3=81=95=E3=81=9B=E3=82=8B?= =?UTF-8?q?=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 09032af5..6d867645 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -69,10 +69,10 @@ def login( try: jwt_token = login_service.login(request.username, request.password) except NotAuthorizeException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) verified_token = jwt_token.verify_token() @@ -126,7 +126,7 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(e) + logger.exception(f'SSOログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) # トークンからユーザーIDを取得 From d76b8d1c9332550239effe6e0c6d5c88b0aed986 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 16:39:12 +0900 Subject: [PATCH 70/86] =?UTF-8?q?fix:=20=E3=83=A6=E3=83=BC=E3=82=B6?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF=E3=81=AB=E3=83=AC=E3=82=B3=E3=83=BC?= =?UTF-8?q?=E3=83=89=E3=81=8C=E5=AD=98=E5=9C=A8=E3=81=97=E3=81=AA=E3=81=84?= =?UTF-8?q?=E5=A0=B4=E5=90=88=E3=81=AB=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AB?= =?UTF-8?q?=E3=81=AA=E3=82=8B=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 6d867645..00e5cb75 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -79,6 +79,10 @@ def login( # 普通の認証だと、`cognito:username`に入る。 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') @@ -132,6 +136,11 @@ def sso_authorize( # トークンからユーザーIDを取得 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') From d18b12a0f62f87dcfafaf2c397b00f6f8a986e97 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 17:13:35 +0900 Subject: [PATCH 71/86] =?UTF-8?q?Revert=20"fix:=20=E3=83=A1=E3=83=B3?= =?UTF-8?q?=E3=83=86=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A4=E3=83=B3=E5=A4=B1=E6=95=97=E6=99=82=E3=81=AB=E3=82=A8?= =?UTF-8?q?=E3=83=A9=E3=83=BC=E9=80=9A=E7=9F=A5=E3=81=8C=E5=87=BA=E3=81=A6?= =?UTF-8?q?=E3=81=97=E3=81=BE=E3=81=86=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3?= =?UTF-8?q?=E3=80=82SSO=E3=81=AF=E9=80=9A=E7=9F=A5=E3=81=95=E3=81=9B?= =?UTF-8?q?=E3=82=8B=E3=80=82"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit af13fa66553ac49d53448a8176b03a7ccbf3e223. --- ecs/jskult-webapp/src/controller/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 00e5cb75..d8e7d569 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -69,10 +69,10 @@ def login( try: jwt_token = login_service.login(request.username, request.password) except NotAuthorizeException as e: - logger.info(f'ログイン失敗:{e}') + logger.exception(e) raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: - logger.info(f'ログイン失敗:{e}') + logger.exception(e) raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) verified_token = jwt_token.verify_token() @@ -130,7 +130,7 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(f'SSOログイン失敗:{e}') + logger.exception(e) raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) # トークンからユーザーIDを取得 From 4c3339fc24c68c707e4539efd60b0ec56eb811f8 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Thu, 29 Jun 2023 17:13:41 +0900 Subject: [PATCH 72/86] =?UTF-8?q?Revert=20"fix:=20=E3=83=A6=E3=83=BC?= =?UTF-8?q?=E3=82=B6=E3=83=9E=E3=82=B9=E3=82=BF=E3=81=AB=E3=83=AC=E3=82=B3?= =?UTF-8?q?=E3=83=BC=E3=83=89=E3=81=8C=E5=AD=98=E5=9C=A8=E3=81=97=E3=81=AA?= =?UTF-8?q?=E3=81=84=E5=A0=B4=E5=90=88=E3=81=AB=E3=82=A8=E3=83=A9=E3=83=BC?= =?UTF-8?q?=E3=81=AB=E3=81=AA=E3=82=8B=E3=81=AE=E3=82=92=E4=BF=AE=E6=AD=A3?= =?UTF-8?q?"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit cb1db32d2dfc05e8cf1fb57865018a250a38468d. --- ecs/jskult-webapp/src/controller/login.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index d8e7d569..09032af5 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -79,10 +79,6 @@ def login( # 普通の認証だと、`cognito:username`に入る。 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) - # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする - if user_record is None: - logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') @@ -136,11 +132,6 @@ def sso_authorize( # トークンからユーザーIDを取得 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) - - # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする - if user_record is None: - logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') From c5d66c6d25e99463ba7f01a2a1259557cbb2d2ab Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 10:44:00 +0900 Subject: [PATCH 73/86] =?UTF-8?q?fix:=20=E7=94=BB=E9=9D=A2=E5=88=A9?= =?UTF-8?q?=E7=94=A8=E5=8F=AF=E5=90=A6=E3=83=95=E3=83=A9=E3=82=B0=E3=81=AF?= =?UTF-8?q?NULL=E3=81=8C=E8=A8=B1=E5=AE=B9=E3=81=95=E3=82=8C=E3=82=8B?= =?UTF-8?q?=E3=81=9F=E3=82=81=E3=80=81=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/model/view/user_view_model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ecs/jskult-webapp/src/model/view/user_view_model.py b/ecs/jskult-webapp/src/model/view/user_view_model.py index 55f1528a..5b523b4c 100644 --- a/ecs/jskult-webapp/src/model/view/user_view_model.py +++ b/ecs/jskult-webapp/src/model/view/user_view_model.py @@ -4,10 +4,10 @@ from pydantic import BaseModel class UserViewModel(BaseModel): - bio_flg: str # AUTH_FLG1 - doc_flg: str # AUTH_FLG2 - inst_flg: str # AUTH_FLG3 - master_mainte_flg: str # AUTH_FLG4 + bio_flg: Optional[str] # AUTH_FLG1 + doc_flg: Optional[str] # AUTH_FLG2 + inst_flg: Optional[str] # AUTH_FLG3 + master_mainte_flg: Optional[str] # AUTH_FLG4 user_flg: Optional[str] # MNTUSER_FLG def has_ult_doctor_permission(self): From 646dd4f7d9b1f606b5c423dd0e5aef9a9bc1dba3 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 14:57:34 +0900 Subject: [PATCH 74/86] =?UTF-8?q?fix:=20=E6=96=87=E8=A8=80=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/model/view/master_mainte_menu_view_model.py | 2 +- ecs/jskult-webapp/src/templates/masterMainteMenu.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py index c8ff644a..2b1629b1 100644 --- a/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py +++ b/ecs/jskult-webapp/src/model/view/master_mainte_menu_view_model.py @@ -2,4 +2,4 @@ from pydantic import BaseModel class MasterMainteMenuViewModel(BaseModel): - subtitle: str = 'MeDaCA 機能メニュー' + subtitle: str = 'MeDaCA マスターメンテメニュー' diff --git a/ecs/jskult-webapp/src/templates/masterMainteMenu.html b/ecs/jskult-webapp/src/templates/masterMainteMenu.html index 147ab2da..987615bf 100644 --- a/ecs/jskult-webapp/src/templates/masterMainteMenu.html +++ b/ecs/jskult-webapp/src/templates/masterMainteMenu.html @@ -7,7 +7,7 @@
-

MeDaCA
マスタメンテメニュー

+

MeDaCA
マスターメンテメニュー



施設担当者データCSVアップロード

From 535ae8fa482d601da6d7e6154edecc2aeda37d65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 30 Jun 2023 15:15:55 +0900 Subject: [PATCH 75/86] =?UTF-8?q?feat:=20=E4=BB=95=E6=A7=98=E5=A4=89?= =?UTF-8?q?=E6=9B=B4=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrate_dcf_inst_merge.py | 34 +++++++++++++------ 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py index ab4d107a..816a0545 100644 --- a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -55,12 +55,14 @@ def _set_disabled_dct_inst_merge(db: Database): _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') -def _select_ult_ident_presc_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: - # 納入先処方元マスタから、DCF施設コードに対応した領域コードの取得 +def _select_ult_ident_presc_dcf_inst_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 納入先処方元マスタから、DCF施設コードに対応したレコードの取得 try: sql = """ SELECT - ta_cd + ta_cd, + ult_ident_cd, + ratio FROM src05.ult_ident_presc WHERE @@ -70,9 +72,9 @@ def _select_ult_ident_presc_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: params = {'dcf_inst_cd': dcf_inst_cd} ult_ident_presc_ta_cd_records = db.execute_select(sql, params) logging_sql(logger, sql) - logger.info('納入先処方元マスタから領域コードの取得に成功') + logger.info('納入先処方元マスタからDCF施設コードに対応したレコードの取得に成功') except Exception as e: - logger.debug('納入先処方元マスタから領域コードの取得に失敗') + logger.debug('納入先処方元マスタからDCF施設コードに対応したレコードの取得に失敗') raise e return ult_ident_presc_ta_cd_records @@ -83,11 +85,12 @@ def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict logger.info('納入先処方元マスタの登録 開始') for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) - ult_ident_presc_ta_cd_records = _select_ult_ident_presc_ta_cd(db, enabled_merge_record['dcf_inst_cd']) - for ult_ident_presc_ta_cd_record in ult_ident_presc_ta_cd_records: - ult_ident_presc_records = _select_ult_ident_presc(db, enabled_merge_record['dcf_inst_cd'], + ult_ident_presc_source_records = _select_ult_ident_presc_dcf_inst_cd(db, enabled_merge_record['dcf_inst_cd']) + for ult_ident_presc_source_record in ult_ident_presc_source_records: + ult_ident_presc_records = _select_ult_ident_presc(db, + enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd'], - ult_ident_presc_ta_cd_record['ta_cd']) + ult_ident_presc_source_record) for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') # 処方元コード=重複時相手先コードが発生した場合 @@ -554,7 +557,8 @@ def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: return emp_chg_inst_records -def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, + ult_ident_presc_row: dict) -> list[dict]: # ult_ident_prescからSELECT try: sql = """ @@ -572,6 +576,8 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_ WHERE uipopp.presc_cd = :dup_opp_cd AND uipopp.ta_cd = :ta_cd + AND uipopp.ult_ident_cd = :ult_ident_cd + AND uipopp.ratio = :ratio ) AS opp_count FROM src05.ult_ident_presc AS uip @@ -580,7 +586,13 @@ def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_ AND uip.ta_cd = :ta_cd AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date """ - params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} + params = { + 'dcf_inst_cd': dcf_inst_cd, + 'dup_opp_cd': dup_opp_cd, + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'] + } ult_ident_presc_records = db.execute_select(sql, params) logging_sql(logger, sql) logger.info('納入先処方元マスタの取得 成功') From ae6aa29994fa8f5b14f6e272c8c4c56c81760231 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 15:26:24 +0900 Subject: [PATCH 76/86] =?UTF-8?q?feat:=20=E5=90=84=E3=83=9E=E3=82=B9?= =?UTF-8?q?=E3=82=BF=E3=83=A1=E3=83=B3=E3=83=86=E7=94=BB=E9=9D=A2=E3=81=AE?= =?UTF-8?q?=E5=81=B4=E3=81=A0=E3=81=91=E4=BD=9C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/controller/master_mainte.py | 125 ++++++++++++++++++ .../view/inst_emp_csv_download_view_model.py | 5 + .../view/inst_emp_csv_upload_view_model.py | 5 + .../model/view/table_override_view_model.py | 5 + .../src/templates/instEmpCsvDL.html | 13 ++ .../src/templates/instEmpCsvUL.html | 13 ++ .../src/templates/masterMainteMenu.html | 2 +- .../src/templates/tableOverride.html | 13 ++ 8 files changed, 180 insertions(+), 1 deletion(-) create mode 100644 ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py create mode 100644 ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py create mode 100644 ecs/jskult-webapp/src/model/view/table_override_view_model.py create mode 100644 ecs/jskult-webapp/src/templates/instEmpCsvDL.html create mode 100644 ecs/jskult-webapp/src/templates/instEmpCsvUL.html create mode 100644 ecs/jskult-webapp/src/templates/tableOverride.html diff --git a/ecs/jskult-webapp/src/controller/master_mainte.py b/ecs/jskult-webapp/src/controller/master_mainte.py index ad5eb3e1..b8b8e087 100644 --- a/ecs/jskult-webapp/src/controller/master_mainte.py +++ b/ecs/jskult-webapp/src/controller/master_mainte.py @@ -4,8 +4,13 @@ from starlette import status from src.depends.services import get_service from src.model.internal.session import UserSession +from src.model.view.inst_emp_csv_download_view_model import \ + InstEmpCsvDownloadViewModel +from src.model.view.inst_emp_csv_upload_view_model import \ + InstEmpCsvUploadViewModel from src.model.view.master_mainte_menu_view_model import \ MasterMainteMenuViewModel +from src.model.view.table_override_view_model import TableOverrideViewModel from src.router.session_router import AuthenticatedRoute from src.services.batch_status_service import BatchStatusService from src.services.session_service import set_session @@ -58,3 +63,123 @@ def menu_view( headers={'session_key': session.session_key} ) return templates_response + + +@router.get('/instEmpCsvUL', response_class=HTMLResponse) +def inst_emp_csv_upload_view( + request: Request, + batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +): + session: UserSession = request.session + + # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる + if session.master_mainte_flg != '1': + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) + + # バッチ処理中の場合、ログアウトさせる + if batch_status_service.is_batch_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, + detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) + # dump処理中の場合、ログアウトさせる + if batch_status_service.is_dump_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) + + # 画面表示用のモデル + view_model = InstEmpCsvUploadViewModel() + # セッション書き換え + session.update( + actions=[ + UserSession.last_access_time.set(UserSession.new_last_access_time()), + UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()), + ] + ) + set_session(session) + templates_response = templates.TemplateResponse( + 'instEmpCsvUL.html', + { + 'request': request, + 'view': view_model + }, + headers={'session_key': session.session_key} + ) + return templates_response + + +@router.get('/instEmpCsvDL', response_class=HTMLResponse) +def new_inst_view( + request: Request, + batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +): + session: UserSession = request.session + + # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる + if session.master_mainte_flg != '1': + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) + + # バッチ処理中の場合、ログアウトさせる + if batch_status_service.is_batch_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, + detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) + # dump処理中の場合、ログアウトさせる + if batch_status_service.is_dump_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) + + # 画面表示用のモデル + view_model = InstEmpCsvDownloadViewModel() + # セッション書き換え + session.update( + actions=[ + UserSession.last_access_time.set(UserSession.new_last_access_time()), + UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()), + ] + ) + set_session(session) + templates_response = templates.TemplateResponse( + 'instEmpCsvDL.html', + { + 'request': request, + 'view': view_model + }, + headers={'session_key': session.session_key} + ) + return templates_response + + +@router.get('/tableOverride', response_class=HTMLResponse) +def table_override_view( + request: Request, + batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) +): + session: UserSession = request.session + + # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる + if session.master_mainte_flg != '1': + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) + + # バッチ処理中の場合、ログアウトさせる + if batch_status_service.is_batch_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, + detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE) + # dump処理中の場合、ログアウトさせる + if batch_status_service.is_dump_processing(): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING) + + # 画面表示用のモデル + view_model = TableOverrideViewModel() + # セッション書き換え + session.update( + actions=[ + UserSession.last_access_time.set(UserSession.new_last_access_time()), + UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()), + ] + ) + set_session(session) + templates_response = templates.TemplateResponse( + 'tableOverride.html', + { + 'request': request, + 'view': view_model + }, + headers={'session_key': session.session_key} + ) + return templates_response diff --git a/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py b/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py new file mode 100644 index 00000000..220294ba --- /dev/null +++ b/ecs/jskult-webapp/src/model/view/inst_emp_csv_download_view_model.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel + + +class InstEmpCsvDownloadViewModel(BaseModel): + subtitle: str = '施設担当者データCSVダウンロード' diff --git a/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py b/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py new file mode 100644 index 00000000..64bde407 --- /dev/null +++ b/ecs/jskult-webapp/src/model/view/inst_emp_csv_upload_view_model.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel + + +class InstEmpCsvUploadViewModel(BaseModel): + subtitle: str = '施設担当者データCSVアップロード' diff --git a/ecs/jskult-webapp/src/model/view/table_override_view_model.py b/ecs/jskult-webapp/src/model/view/table_override_view_model.py new file mode 100644 index 00000000..e03b1fd0 --- /dev/null +++ b/ecs/jskult-webapp/src/model/view/table_override_view_model.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel + + +class TableOverrideViewModel(BaseModel): + subtitle: str = 'テーブル上書きコピー' diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html new file mode 100644 index 00000000..07712d73 --- /dev/null +++ b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html @@ -0,0 +1,13 @@ + + + + {% with subtitle = view.subtitle %} + {% include '_header.html' %} + {% endwith %} + + + + +

施設担当者データCSVダウンロード

+ + diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html new file mode 100644 index 00000000..bdd305d8 --- /dev/null +++ b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html @@ -0,0 +1,13 @@ + + + + {% with subtitle = view.subtitle %} + {% include '_header.html' %} + {% endwith %} + + + + +

施設担当者データCSVアップロード

+ + diff --git a/ecs/jskult-webapp/src/templates/masterMainteMenu.html b/ecs/jskult-webapp/src/templates/masterMainteMenu.html index 987615bf..957279fa 100644 --- a/ecs/jskult-webapp/src/templates/masterMainteMenu.html +++ b/ecs/jskult-webapp/src/templates/masterMainteMenu.html @@ -10,7 +10,7 @@

MeDaCA
マスターメンテメニュー



- 施設担当者データCSVアップロード

+ 施設担当者データCSVアップロード

施設担当者データCSVダウンロード

diff --git a/ecs/jskult-webapp/src/templates/tableOverride.html b/ecs/jskult-webapp/src/templates/tableOverride.html new file mode 100644 index 00000000..272cd0cd --- /dev/null +++ b/ecs/jskult-webapp/src/templates/tableOverride.html @@ -0,0 +1,13 @@ + + + + {% with subtitle = view.subtitle %} + {% include '_header.html' %} + {% endwith %} + + + + +

テーブル上書きコピー

+ + From ef6a079467681146d777700ff88f7630593c2c9b Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 16:34:56 +0900 Subject: [PATCH 77/86] =?UTF-8?q?fix:=20=E3=83=A1=E3=83=B3=E3=83=86?= =?UTF-8?q?=E3=83=8A=E3=83=B3=E3=82=B9=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC?= =?UTF-8?q?=E3=81=AE=E3=83=AD=E3=82=B0=E3=82=A4=E3=83=B3=E6=99=82=E3=80=81?= =?UTF-8?q?=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=81=8C=E8=A6=8B=E3=81=A4?= =?UTF-8?q?=E3=81=8B=E3=82=89=E3=81=AA=E3=81=84=E3=81=A8=E3=81=8D=E3=81=AE?= =?UTF-8?q?=E3=83=AD=E3=82=B0=E3=82=A2=E3=82=A6=E3=83=88=E7=94=BB=E9=9D=A2?= =?UTF-8?q?=E3=81=8B=E3=82=89=E3=83=A1=E3=83=B3=E3=83=86=E3=83=8A=E3=83=B3?= =?UTF-8?q?=E3=82=B9=E3=83=A6=E3=83=BC=E3=82=B6=E3=83=BC=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A4=E3=83=B3=E7=94=BB=E9=9D=A2=E3=81=AB=E6=88=BB=E3=82=8C?= =?UTF-8?q?=E3=82=8B=E3=82=88=E3=81=86=E3=81=AB=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/logout.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/controller/logout.py b/ecs/jskult-webapp/src/controller/logout.py index 79de281c..4f30d802 100644 --- a/ecs/jskult-webapp/src/controller/logout.py +++ b/ecs/jskult-webapp/src/controller/logout.py @@ -22,11 +22,15 @@ def logout_view( reason: Optional[str] = None, session: Union[UserSession, None] = Depends(verify_session) ): + # どういうルートでログインしたかを判断するため、refererを取得 + referer = request.headers.get('referer', '') + redirect_to = '/login/userlogin' link_text = 'MeDaCA機能メニューへ' - if session is not None and session.user_flg == '1': + if (session is not None and session.user_flg == '1') or referer.endswith('maintlogin'): redirect_to = '/login/maintlogin' link_text = 'Login画面に戻る' + logout = LogoutViewModel() logout.redirect_to = redirect_to logout.reason = constants.LOGOUT_REASON_MESSAGE_MAP.get(reason, '') From 7ba0ec99422fe137b8a51dc356695285df355976 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 16:36:10 +0900 Subject: [PATCH 78/86] =?UTF-8?q?style:=20=E3=82=B3=E3=83=A1=E3=83=B3?= =?UTF-8?q?=E3=83=88=E8=BF=BD=E5=8A=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/logout.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ecs/jskult-webapp/src/controller/logout.py b/ecs/jskult-webapp/src/controller/logout.py index 4f30d802..c841e48c 100644 --- a/ecs/jskult-webapp/src/controller/logout.py +++ b/ecs/jskult-webapp/src/controller/logout.py @@ -27,6 +27,7 @@ def logout_view( redirect_to = '/login/userlogin' link_text = 'MeDaCA機能メニューへ' + # セッションが切れておらず、メンテユーザである、またはメンテログイン画面から遷移した場合、メンテログイン画面に戻す if (session is not None and session.user_flg == '1') or referer.endswith('maintlogin'): redirect_to = '/login/maintlogin' link_text = 'Login画面に戻る' From 3bf951c632da508bbfdf8435bf5cf71c2cc6e137 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=AB=98=E6=9C=A8=E8=A6=81?= Date: Fri, 30 Jun 2023 16:55:31 +0900 Subject: [PATCH 79/86] =?UTF-8?q?feat:=20=E4=BE=8B=E5=A4=96=E5=87=A6?= =?UTF-8?q?=E7=90=86=E4=BF=AE=E6=AD=A3=E3=81=AE=E6=A8=AA=E5=B1=95=E9=96=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src05/inst_merge_t_create.sql | 118 ++++++++-------- .../src05/v_inst_merge_t_create.sql | 130 +++++++++--------- 2 files changed, 130 insertions(+), 118 deletions(-) diff --git a/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql b/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql index 74472fcc..b825ac53 100644 --- a/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql +++ b/rds_mysql/stored_procedure/src05/inst_merge_t_create.sql @@ -2,69 +2,75 @@ CREATE PROCEDURE src05.inst_merge_t_create() SQL SECURITY INVOKER BEGIN - -- スキーマ名 - DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); - -- プロシージャ名 - DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_t_create'; - -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'inst_merge_t_create'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); - -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION - BEGIN - GET DIAGNOSTICS CONDITION 1 - @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, - 'inst_merge_t_createでエラーが発生', @error_state, @error_msg); - SIGNAL SQLSTATE '45000' - SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; - END; + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'inst_merge_t_createでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') + ELSE @error_msg + END + ); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; - SET @error_state = NULL, @error_msg = NULL; + SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 開始' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 開始' + ); - TRUNCATE TABLE internal05.inst_merge_t; + TRUNCATE TABLE internal05.inst_merge_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 終了' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成① 終了' + ); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 開始' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 開始' + ); - INSERT INTO - internal05.inst_merge_t ( - dcf_dsf_inst_cd, - dup_opp_cd, - form_inst_name_kanji, - form_inst_name_kana, - inst_addr, - prefc_cd - ) - SELECT - dim.dcf_inst_cd, - dim.dcf_inst_cd_new, - ci.form_inst_name_kanji, - ci.form_inst_name_kana, - ci.inst_addr, - ci.prefc_cd - FROM - src05.dcf_inst_merge dim - LEFT OUTER JOIN src05.com_inst ci - ON dim.dcf_inst_cd_new = ci.dcf_dsf_inst_cd - AND ci.delete_flg = '0' - WHERE - dim.muko_flg = '0' - AND dim.dcf_inst_cd_new IS NOT NULL - AND dim.enabled_flg = 'Y' - AND src05.to_date_yyyymm01(dim.tekiyo_month) <= src05.get_syor_date(); + INSERT INTO + internal05.inst_merge_t ( + dcf_dsf_inst_cd, + dup_opp_cd, + form_inst_name_kanji, + form_inst_name_kana, + inst_addr, + prefc_cd + ) + SELECT + dim.dcf_inst_cd, + dim.dcf_inst_cd_new, + ci.form_inst_name_kanji, + ci.form_inst_name_kana, + ci.inst_addr, + ci.prefc_cd + FROM + src05.dcf_inst_merge AS dim + LEFT OUTER JOIN src05.com_inst AS ci + ON dim.dcf_inst_cd_new = ci.dcf_dsf_inst_cd + AND ci.delete_flg = '0' + WHERE + dim.muko_flg = '0' + AND dim.dcf_inst_cd_new IS NOT NULL + AND dim.enabled_flg = 'Y' + AND src05.to_date_yyyymm01(dim.tekiyo_month) <= src05.get_syor_date(); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 終了' - ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'アルトマーク施設統合マスタ(洗替処理一時テーブル)作成② 終了' + ); END diff --git a/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql b/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql index 6a6f9dd7..1c75c4ea 100644 --- a/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql +++ b/rds_mysql/stored_procedure/src05/v_inst_merge_t_create.sql @@ -2,75 +2,81 @@ CREATE PROCEDURE src05.v_inst_merge_t_create() SQL SECURITY INVOKER BEGIN - -- スキーマ名 - DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); - -- プロシージャ名 - DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_t_create'; - -- プロシージャの引数 - DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); + -- スキーマ名 + DECLARE schema_name VARCHAR(50) DEFAULT (SELECT DATABASE()); + -- プロシージャ名 + DECLARE procedure_name VARCHAR(100) DEFAULT 'v_inst_merge_t_create'; + -- プロシージャの引数 + DECLARE procedure_args JSON DEFAULT JSON_OBJECT(); - -- 例外処理 - DECLARE EXIT HANDLER FOR SQLEXCEPTION - BEGIN - GET DIAGNOSTICS CONDITION 1 - @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; - call medaca_common.put_error_log(schema_name, procedure_name, procedure_args, - 'v_inst_merge_t_createでエラーが発生', @error_state, @error_msg); - SIGNAL SQLSTATE '45000' - SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; - END; + -- 例外処理 + DECLARE EXIT HANDLER FOR SQLEXCEPTION + BEGIN + GET DIAGNOSTICS CONDITION 1 + @error_state = RETURNED_SQLSTATE, @error_msg = MESSAGE_TEXT; + CALL medaca_common.put_error_log(schema_name, procedure_name, procedure_args, + 'v_inst_merge_t_createでエラーが発生', @error_state, @error_msg); + SET @error_msg = ( + CASE + WHEN LENGTH(@error_msg) > 128 THEN CONCAT(SUBSTRING(@error_msg, 1, 125), '...') + ELSE @error_msg + END + ); + SIGNAL SQLSTATE '45000' + SET MYSQL_ERRNO = @error_state, MESSAGE_TEXT = @error_msg; + END; - SET @error_state = NULL, @error_msg = NULL; + SET @error_state = NULL, @error_msg = NULL; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成① 開始'); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成① 開始'); - TRUNCATE TABLE internal05.v_inst_merge_t; + TRUNCATE TABLE internal05.v_inst_merge_t; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成① 終了'); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成① 終了'); - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成② 開始'); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成② 開始'); - INSERT INTO - internal05.v_inst_merge_t ( - v_inst_cd, - v_inst_cd_merge, - fcl_name, - fcl_kn_name, - fmt_addr, - prft_cd, - fcl_type - ) - SELECT - vhmv.v_inst_cd, - vhmv.v_inst_cd_merg, - fmv.fcl_name, - fmv.fcl_kn_name, - fmv.fmt_addr, - fmv.prft_cd, - fmv.fcl_type - FROM - src05.vop_hco_merge_v vhmv, - src05.fcl_mst_v fmv - INNER JOIN ( - SELECT - v_inst_cd, - MAX(sub_num) AS sno - FROM - src05.fcl_mst_v - GROUP BY - v_inst_cd - ) max_sno_fmv - ON fmv.v_inst_cd = max_sno_fmv.v_inst_cd - AND fmv.sub_num = max_sno_fmv.sno - WHERE - vhmv.v_inst_cd_merg = fmv.v_inst_cd - AND STR_TO_DATE(vhmv.apply_dt, '%Y-%m-%d') <= src05.get_syor_date() - AND fmv.rec_sts_kbn != '9'; + INSERT INTO + internal05.v_inst_merge_t ( + v_inst_cd, + v_inst_cd_merge, + fcl_name, + fcl_kn_name, + fmt_addr, + prft_cd, + fcl_type + ) + SELECT + vhmv.v_inst_cd, + vhmv.v_inst_cd_merg, + fmv.fcl_name, + fmv.fcl_kn_name, + fmv.fmt_addr, + fmv.prft_cd, + fmv.fcl_type + FROM + src05.vop_hco_merge_v AS vhmv, + src05.fcl_mst_v AS fmv + INNER JOIN ( + SELECT + v_inst_cd, + MAX(sub_num) AS sno + FROM + src05.fcl_mst_v + GROUP BY + v_inst_cd + ) AS max_sno_fmv + ON fmv.v_inst_cd = max_sno_fmv.v_inst_cd + AND fmv.sub_num = max_sno_fmv.sno + WHERE + vhmv.v_inst_cd_merg = fmv.v_inst_cd + AND STR_TO_DATE(vhmv.apply_dt, '%Y-%m-%d') <= src05.get_syor_date() + AND fmv.rec_sts_kbn != '9'; - call medaca_common.put_info_log(schema_name, procedure_name, procedure_args, - 'V施設統合マスタ(洗替処理一時テーブル)作成② 終了' ); + CALL medaca_common.put_info_log(schema_name, procedure_name, procedure_args, + 'V施設統合マスタ(洗替処理一時テーブル)作成② 終了' ); END From b77eab5e7c82fbc0b2a4f8d9e9439aa703059724 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 18:27:11 +0900 Subject: [PATCH 80/86] =?UTF-8?q?NEWDWH2021-1130=20LOAD=E6=96=87=E3=81=AEW?= =?UTF-8?q?arning=E3=81=AF=E3=82=82=E3=81=86=E8=A6=8B=E3=81=AA=E3=81=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index fa82805d..b345b0d8 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -23,6 +23,7 @@ class VjskDataLoadManager: try: db.connect() + db.execute("SET SESSION sql_mode = 'TRADITIONAL';") # orgをtruncate db.execute(f"TRUNCATE TABLE {table_name_org};") @@ -37,18 +38,6 @@ class VjskDataLoadManager: """ db.begin() result = db.execute(sql, {"src_file_name": src_file_name}) - logger.debug(sql) - result_w = db.execute("SHOW WARNINGS;") - has_mysql_warnings = False - for row in result_w.fetchall(): - # 例外スロー対象から除外 : Warning(1261) Row {ROW NUMBER} doesn't contain data for all columns - if len(row) >= 2 and row[0] == "Warning" and row[1] == 1261: - logger.info(f"SHOW WARNINGS (SKIP) : {row}") - continue - has_mysql_warnings = True - logger.info(f"SHOW WARNINGS : {row}") - if has_mysql_warnings: - raise Exception("LOAD文実行時にWARNINGが発生しました。") logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') db.commit() From 9e6403dea521cfc95ced01e5ef3f6231d6ed6630 Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Fri, 30 Jun 2023 19:23:42 +0900 Subject: [PATCH 81/86] =?UTF-8?q?NEWDWH2021-1130=20tsv=E3=83=88=E3=83=81?= =?UTF-8?q?=E5=88=87=E3=82=8C=E5=88=A4=E5=AE=9A=E3=82=92LOAD=E5=AE=9F?= =?UTF-8?q?=E8=A1=8C=E5=89=8D=E3=81=AB=E8=A6=8B=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 27 +++++++++++++++++++ .../src/batch/vjsk/vjsk_recv_file_mapper.py | 19 +++++++++++++ 2 files changed, 46 insertions(+) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index b345b0d8..3ef87186 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -60,6 +60,26 @@ class VjskDataLoadManager: logger.debug("_import_to_db done") return + def _get_tsv_last_row_tab_count(src_file_name: str) -> int: + # memo: tsvファイルが数百MBに及ぶことを想定して、末尾から1行分を参照する + # memo: 前提1 行区切りは LF('\n') + buf_count = 0 + + # バイナリモードでファイルオープン + with open(src_file_name, 'rb') as file: + # ファイルの末尾から2バイト手前に移動 + file.seek(-2, 2) + # 改行文字を見つけるまで逆方向に読み進める + while file.read(1) != b'\n': + # 1バイト戻って再度読み込み + file.seek(-2, 1) + # 末尾行を抽出 + last_line = file.readline().decode().rstrip('\n') + # 末尾行に含まれるタブ文字の数を抽出 + buf_count = last_line.count('\t') + + return buf_count + @classmethod def load(self, target: dict): logger.debug(f'load start target:{target}') @@ -67,6 +87,13 @@ class VjskDataLoadManager: # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 local_file_name = target["src_file_path"] + # tsvファイル末尾行のTABの数が総定数と一致しない場合は例外をスロー + tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) + expect_tabs = mapper.get_file_column_separators(target["condkey"]) + if tsv_tabs != expect_tabs: + msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" + raise BatchOperationException(msg) + # データベース登録 self._import_to_db(local_file_name, target["condkey"]) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py index 352e2f91..19f70067 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -21,6 +21,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME = "data_name" _KEY_FILE_PREFIX = "file_prefix" _KEY_FILE_SUFFIX = "file_suffix" + _KEY_FILE_COLUMN_SEPARATORS = "file_column_separators" _KEY_ORG_TABLE = "org_table" _KEY_SRC_TABLE = "src_table" _KEY_UPSERT_SQL = "upsert_sql" @@ -30,6 +31,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "販売実績データ", _KEY_FILE_PREFIX: "slip_data_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "82", _KEY_ORG_TABLE: "org05.sales", _KEY_SRC_TABLE: "src05.sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -299,6 +301,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸ホールディングスマスタ", _KEY_FILE_PREFIX: "hld_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "10", _KEY_ORG_TABLE: "org05.hld_mst_v", _KEY_SRC_TABLE: "src05.hld_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -352,6 +355,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸マスタ", _KEY_FILE_PREFIX: "whs_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "15", _KEY_ORG_TABLE: "org05.whs_mst_v", _KEY_SRC_TABLE: "src05.whs_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -420,6 +424,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "Vメーカー卸組織展開表", _KEY_FILE_PREFIX: "mkr_org_horizon_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "45", _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -578,6 +583,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸組織変換マスタ", _KEY_FILE_PREFIX: "org_cnv_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "10", _KEY_ORG_TABLE: "org05.org_cnv_mst_v", _KEY_SRC_TABLE: "src05.org_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -631,6 +637,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V取引区分マスタ", _KEY_FILE_PREFIX: "tran_kbn_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "8", _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -678,6 +685,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V施設マスタ", _KEY_FILE_PREFIX: "fcl_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "23", _KEY_ORG_TABLE: "org05.fcl_mst_v", _KEY_SRC_TABLE: "src05.fcl_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -770,6 +778,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V製品マスタ", _KEY_FILE_PREFIX: "phm_prd_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "27", _KEY_ORG_TABLE: "org05.phm_prd_mst_v", _KEY_SRC_TABLE: "src05.phm_prd_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -874,6 +883,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V製品価格マスタ", _KEY_FILE_PREFIX: "phm_price_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "9", _KEY_ORG_TABLE: "org05.phm_price_mst_v", _KEY_SRC_TABLE: "src05.phm_price_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -924,6 +934,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V施設統合マスタ", _KEY_FILE_PREFIX: "vop_hco_merge_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "3", _KEY_ORG_TABLE: "org05.vop_hco_merge_v", _KEY_SRC_TABLE: "src05.vop_hco_merge_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -956,6 +967,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "V卸得意先情報マスタ", _KEY_FILE_PREFIX: "whs_customer_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "16", _KEY_ORG_TABLE: "org05.whs_customer_mst_v", _KEY_SRC_TABLE: "src05.whs_customer_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1027,6 +1039,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "MDBコード変換表", _KEY_FILE_PREFIX: "mdb_conv_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "7", _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1071,6 +1084,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "卸在庫データ", _KEY_FILE_PREFIX: "stock_slip_data_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "28", _KEY_ORG_TABLE: "org05.whole_stock", _KEY_SRC_TABLE: "src05.whole_stock", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1178,6 +1192,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "生物由来データ", _KEY_FILE_PREFIX: "bio_slip_data_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "77", _KEY_ORG_TABLE: "org05.bio_sales", _KEY_SRC_TABLE: "src05.bio_sales", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1432,6 +1447,7 @@ class VjskReceiveFileMapper: _KEY_DATA_NAME: "ロットマスタデータ", _KEY_FILE_PREFIX: "lot_num_mst_", _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "5", _KEY_ORG_TABLE: "org05.lot_num_mst", _KEY_SRC_TABLE: "src05.lot_num_mst", _KEY_UPSERT_SQL: textwrap.dedent("""\ @@ -1481,6 +1497,9 @@ class VjskReceiveFileMapper: def get_file_suffix(self, condkey: str) -> str: return self._get_interface_property(condkey, self._KEY_FILE_SUFFIX) + def get_file_column_separators(self, condkey: str) -> int: + return int(self._get_interface_property(condkey, self._KEY_FILE_COLUMN_SEPARATORS)) + def get_org_table(self, condkey: str) -> str: return self._get_interface_property(condkey, self._KEY_ORG_TABLE) From 788e13f1da8d60dbfeefffadaed2f758e96ed785 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 3 Jul 2023 10:31:14 +0900 Subject: [PATCH 82/86] =?UTF-8?q?refactor:=20=E9=96=A2=E6=95=B0=E5=90=8D?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/master_mainte.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/controller/master_mainte.py b/ecs/jskult-webapp/src/controller/master_mainte.py index b8b8e087..ea972ad6 100644 --- a/ecs/jskult-webapp/src/controller/master_mainte.py +++ b/ecs/jskult-webapp/src/controller/master_mainte.py @@ -106,7 +106,7 @@ def inst_emp_csv_upload_view( @router.get('/instEmpCsvDL', response_class=HTMLResponse) -def new_inst_view( +def inst_emp_csv_download_view( request: Request, batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)) ): From 3573bf08618362c6e2e5192424a9180a6c7e06e3 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 3 Jul 2023 10:54:06 +0900 Subject: [PATCH 83/86] =?UTF-8?q?style:=20CSS=E7=A7=BB=E6=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/static/css/masterMainte.css | 164 ++++++++++++++++++ .../src/templates/instEmpCsvDL.html | 3 +- .../src/templates/instEmpCsvUL.html | 2 +- .../src/templates/tableOverride.html | 3 +- 4 files changed, 167 insertions(+), 5 deletions(-) create mode 100644 ecs/jskult-webapp/src/static/css/masterMainte.css diff --git a/ecs/jskult-webapp/src/static/css/masterMainte.css b/ecs/jskult-webapp/src/static/css/masterMainte.css new file mode 100644 index 00000000..a59c1681 --- /dev/null +++ b/ecs/jskult-webapp/src/static/css/masterMainte.css @@ -0,0 +1,164 @@ +body{ + background-color: LightCyan; + font-family : "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; +} + +h1{ + margin-left : 1%; +} + + +/*ヘッダー*/ +.headerTable{ + width: 100%; +} + +.headerTdLeft{ + width: 80%; +} + +.headerTdRight{ + text-align: right; + padding-right: 2%; + width: 20%; +} + +.buttonSize{ + width: 85px; +} + +/*////////////////////////*/ +/*施設担当者データCSVダウンロード*/ +/*////////////////////////*/ +.searchColumnTd{ + width: 14%; +} + +.searchTextboxTd{ + width: 18%; +} + +.searchTable{ + margin-left: 3%; + margin-right: 3%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 94%; +} + +.searchLabelTd{ + text-align: right; + width: 10%; + +} + +.searchInputTd{ + width: 19%; +} + +.searchTextbox{ + width: 90%; + margin-left: 2.5%; + margin-right: 2.5%; + margin-top: 0.8%; + margin-bottom: 0.8%; +} + +.searchDateTextbox{ + width: 37%; + margin-left: 2.5%; + margin-right: 2.5%; + margin-top: 0.8%; + margin-bottom: 0.8%; +} + +.searchButtonTd{ + text-align: right; + padding-top: 1%; +} + + +.csvOutputMessage{ + margin-left: 3%; +} + +.errorColor{ + color: red; +} + +/*//////////////////////////*/ +/*施設担当者データExcelアップロード*/ +/*//////////////////////////*/ +.inputTable{ + margin-left: 3%; + margin-right: 3%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 94%; +} + +.inputLabelTd{ + width: 10%; +} + +.inputTd{ + width:20%; +} + +.inputButtonTd{ + width: 50%; + text-align: right; +} + +.dataCntDisp{ + text-align: right; + margin-right: 3%; +} + +table.inputData { + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; + white-space: nowrap; + border: 0.1px solid silver; + padding: 4px; + padding-right: 20px; + border-collapse: collapse; + margin-left: 3%; + width: 94%; +} +table.inputData tbody th { + color: #3D3D3D; + padding: 4px; + background-color: #e6EEEE; + border: 0.1px solid silver; + vertical-align: top; +} + +table.inputData tbody td { + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; +} + +.footerMsg{ + margin-left: 3%; +} + + +/*//////////////////////////*/ +/*データ上書きコピー */ +/*//////////////////////////*/ +.tableOverRide{ + margin-right: 3%; + margin-left: 3%; + margin-bottom: 2%; + border-bottom: solid 1px gray; + width: 94%; +} + diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html index 07712d73..7e84fd4c 100644 --- a/ecs/jskult-webapp/src/templates/instEmpCsvDL.html +++ b/ecs/jskult-webapp/src/templates/instEmpCsvDL.html @@ -4,8 +4,7 @@ {% with subtitle = view.subtitle %} {% include '_header.html' %} {% endwith %} - - +

施設担当者データCSVダウンロード

diff --git a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html index bdd305d8..9ec84207 100644 --- a/ecs/jskult-webapp/src/templates/instEmpCsvUL.html +++ b/ecs/jskult-webapp/src/templates/instEmpCsvUL.html @@ -5,7 +5,7 @@ {% include '_header.html' %} {% endwith %} - +

施設担当者データCSVアップロード

diff --git a/ecs/jskult-webapp/src/templates/tableOverride.html b/ecs/jskult-webapp/src/templates/tableOverride.html index 272cd0cd..e473b469 100644 --- a/ecs/jskult-webapp/src/templates/tableOverride.html +++ b/ecs/jskult-webapp/src/templates/tableOverride.html @@ -4,8 +4,7 @@ {% with subtitle = view.subtitle %} {% include '_header.html' %} {% endwith %} - - +

テーブル上書きコピー

From 2e6937eb71275d43c66172cd696efdce1a212410 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Mon, 3 Jul 2023 11:11:06 +0900 Subject: [PATCH 84/86] =?UTF-8?q?fix:=20=E3=82=BB=E3=83=83=E3=82=B7?= =?UTF-8?q?=E3=83=A7=E3=83=B3=E5=88=87=E3=82=8C=E3=81=AE=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A2=E3=82=A6=E3=83=88=E6=99=82=E3=80=81=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=82=A2=E3=82=A6=E3=83=88=E7=94=BB=E9=9D=A2=E3=81=AB=E4=BD=95?= =?UTF-8?q?=E3=82=82=E5=87=BA=E3=81=95=E3=81=AA=E3=81=84=E7=94=A8=E3=81=AB?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 4 ++-- ecs/jskult-webapp/src/router/session_router.py | 3 +-- ecs/jskult-webapp/src/system_var/constants.py | 2 -- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 09032af5..412ba068 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -73,7 +73,7 @@ def login( raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: logger.exception(e) - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) verified_token = jwt_token.verify_token() # 普通の認証だと、`cognito:username`に入る。 @@ -127,7 +127,7 @@ def sso_authorize( verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: logger.exception(e) - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) # トークンからユーザーIDを取得 user_id = verified_token.user_id diff --git a/ecs/jskult-webapp/src/router/session_router.py b/ecs/jskult-webapp/src/router/session_router.py index 90f3a5c9..324c777f 100644 --- a/ecs/jskult-webapp/src/router/session_router.py +++ b/ecs/jskult-webapp/src/router/session_router.py @@ -90,8 +90,7 @@ class BeforeCheckSessionRoute(MeDaCaRoute): verified_session = verify_session(checked_session) # セッションが有効でない場合、エラーにする if verified_session is None: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, - detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) scope = request.scope scope['session'] = verified_session session_request = Request(receive=request.receive, scope=scope) diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index 899c19e5..2ea9454f 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -113,7 +113,6 @@ LOGOUT_REASON_LOGIN_ERROR = 'login_error' LOGOUT_REASON_BATCH_PROCESSING = 'batch_processing' LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE = 'batch_processing_ult' LOGOUT_REASON_NOT_LOGIN = 'not_login' -LOGOUT_REASON_SESSION_EXPIRED = 'session_expired' LOGOUT_REASON_DB_ERROR = 'db_error' LOGOUT_REASON_UNEXPECTED = 'unexpected' @@ -123,7 +122,6 @@ LOGOUT_REASON_MESSAGE_MAP = { LOGOUT_REASON_BATCH_PROCESSING: '日次バッチ処理中なので、
生物由来データ参照は使用出来ません。', LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE: '日次バッチ処理中のため、
マスタ-メンテは使用出来ません。', LOGOUT_REASON_NOT_LOGIN: 'Loginしてからページにアクセスしてください。', - LOGOUT_REASON_SESSION_EXPIRED: 'セッションが切れています。
再度Loginしてください。', LOGOUT_REASON_DB_ERROR: 'DB接続に失敗しました。
再度Loginするか、
管理者にお問い合わせください。', LOGOUT_REASON_UNEXPECTED: '予期しないエラーが発生しました。
再度Loginするか、
管理者に問い合わせてください。' } From c1a0f9a11e0cafa9ffe75886e633d1c11a933bef Mon Sep 17 00:00:00 2001 From: "x.azuma.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 16:29:04 +0900 Subject: [PATCH 85/86] =?UTF-8?q?=E4=BB=96=E3=82=BF=E3=82=B9=E3=82=AF?= =?UTF-8?q?=E3=81=AB=E5=BD=B1=E9=9F=BF=E3=81=8C=E3=81=82=E3=82=8B=E3=81=AE?= =?UTF-8?q?=E3=81=A7develop=E3=83=9E=E3=83=BC=E3=82=B8=E3=82=92=E5=84=AA?= =?UTF-8?q?=E5=85=88=E3=81=99=E3=82=8B=E3=81=9F=E3=82=81=E3=80=81=E6=9C=AA?= =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=81=AE=E3=83=AD=E3=82=B8=E3=83=83?= =?UTF-8?q?=E3=82=AF=E3=82=92=E4=B8=80=E6=97=A6=E3=82=B3=E3=83=A1=E3=83=B3?= =?UTF-8?q?=E3=83=88=E3=82=A2=E3=82=A6=E3=83=88=E3=81=97=E3=81=A6=E5=8B=95?= =?UTF-8?q?=E4=BD=9C=E3=81=AB=E5=BD=B1=E9=9F=BF=E3=81=97=E3=81=AA=E3=81=84?= =?UTF-8?q?=E3=82=88=E3=81=86=E3=81=AB=E3=81=99=E3=82=8B=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/vjsk/vjsk_data_load_manager.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py index 3ef87186..cef4e1ec 100644 --- a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -88,11 +88,13 @@ class VjskDataLoadManager: local_file_name = target["src_file_path"] # tsvファイル末尾行のTABの数が総定数と一致しない場合は例外をスロー - tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) - expect_tabs = mapper.get_file_column_separators(target["condkey"]) - if tsv_tabs != expect_tabs: - msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" - raise BatchOperationException(msg) + # TODO: ↓↓↓developへのマージを優先させたいので、未テストのロジックはコメントアウトする + # tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) + # expect_tabs = mapper.get_file_column_separators(target["condkey"]) + # if tsv_tabs != expect_tabs: + # msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" + # raise BatchOperationException(msg) + # TODO: ↑↑↑developへのマージを優先させたいので、未テストのロジックはコメントアウトする # データベース登録 self._import_to_db(local_file_name, target["condkey"]) From 449dedd3a5d5ce3a6caa99df3f0e19484c8b393f Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 5 Jul 2023 16:30:35 +0900 Subject: [PATCH 86/86] =?UTF-8?q?fix:=20=E3=83=AD=E3=82=B0=E3=82=A4?= =?UTF-8?q?=E3=83=B3=E6=99=82=E3=81=AE=E3=82=A8=E3=83=A9=E3=83=BC=E3=81=AF?= =?UTF-8?q?ERROR=E3=83=AD=E3=82=B0=E3=81=A7=E5=87=BA=E3=81=95=E3=81=AA?= =?UTF-8?q?=E3=81=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-webapp/src/controller/login.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 5659c993..c8a5663c 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -130,7 +130,7 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(f'SSOログイン失敗:{e}') + logger.info(f'SSOログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) # トークンからユーザーIDを取得