Merge branch 'develop' into featrue-NEWDWH2021-1876

This commit is contained in:
mori.k 2025-06-09 14:51:34 +09:00
commit 7abe373a95
86 changed files with 560 additions and 267 deletions

View File

@ -109,5 +109,5 @@ class VjskSendBucket(S3Bucket):
# バックアップバケットにコピー
vjsk_backup_bucket = VjskBackupBucket()
dat_key = f'{self._send_folder}/{dat_file_key}'
backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}'
backup_key = f'{vjsk_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}'
self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key)

View File

@ -55,18 +55,17 @@ def exec():
batch_status_manager.set_process_status(constants.PROCESS_STATUS_ERROR)
return constants.BATCH_EXIT_CODE_SUCCESS
# アルトマーク取込が正常終了していればバッチステータスを処理済に変更
# DCF/DSFデータ作成でエラーになっても、バッチ処理としては完了したと判断する。
batch_status_manager.set_process_status(constants.PROCESS_STATUS_DONE)
try:
logger.info('実消化用DCF/DSFデータ作成処理起動')
output_dcf_dsf_data.exec()
logger.info('実消化用DCF/DSFデータ作成処理終了')
except BatchOperationException as e:
logger.exception(f'実消化用施設DCF/DSF作成処理エラー異常終了{e}')
# バッチステータスをエラーに変更
batch_status_manager.set_process_status(constants.PROCESS_STATUS_ERROR)
return constants.BATCH_EXIT_CODE_SUCCESS
# バッチステータスを処理済に変更
batch_status_manager.set_process_status(constants.PROCESS_STATUS_DONE)
logger.info('アルトマーク取込/データ出力:終了')
return constants.BATCH_EXIT_CODE_SUCCESS

View File

@ -15,6 +15,6 @@ RUN \
pip uninstall -y pipenv virtualenv-clone virtualenv
COPY src ./src
COPY entrypoint.py entrypoint.py
COPY entrypoint.py entrypoint.py
CMD ["python", "entrypoint.py"]

View File

@ -85,7 +85,7 @@ class JskUltBackupBucket(S3Bucket):
class JskBackupBucket(JskUltBackupBucket):
_folder = environment.JSKULT_BACKUP_BUCKET
_folder = environment.JSK_BACKUP_FOLDER
class JskTransferListBucket(JskUltBackupBucket):
@ -118,8 +118,8 @@ class JskSendBucket(S3Bucket):
def backup_dcf_inst_merge_csv_file(self, dat_file_key: str, datetime_key: str):
# バックアップバケットにコピー
jskult_backup_bucket = JskUltBackupBucket()
jskult_backup_bucket = JskBackupBucket()
dat_key = f'{self._send_folder}/{dat_file_key}'
backup_key = f'{jskult_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}'
backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}'
self._s3_client.copy(self._bucket_name, dat_key,
jskult_backup_bucket._bucket_name, backup_key)

View File

@ -17,7 +17,7 @@ from src.manager.jskult_batch_status_manager import JskultBatchStatusManager
from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager
from src.system_var import constants
logger = get_logger('DCF削除新規マスタ作成')
logger = get_logger('DCF削除新規マスタ作成/データ出力')
class DcfInstMergeIO(JskultBatchEntrypoint):
@ -26,9 +26,6 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
# 環境変数をimport
self.environment = DCFInstMergeEnvironment()
def execute(self):
logger.info("DCF削除新規マスタ作成処理を開始します。")
# 必須の環境変数が設定されていない場合、エラーにする
try:
self.environment.validate()
@ -36,9 +33,14 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
logger.exception(e)
return
def execute(self):
logger.info("DCF削除新規マスタ作成/データ出力処理を開始します。")
jskult_hdke_tbl_manager = JskultHdkeTblManager()
jskult_batch_run_manager = JskultBatchRunManager(
self.environment.BATCH_MANAGE_DYNAMODB_TABLE_NAME,
self.environment.BATCH_EXECUTION_ID)
if not jskult_hdke_tbl_manager.can_run_process():
logger.error(
'日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。')
@ -58,6 +60,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
logger.exception(f'転送ファイル一覧の取得に失敗しました。 {e}')
# バッチ実行管理テーブルをfailedで登録
jskult_batch_run_manager.batch_failed()
return
with open(transfer_list_file_path) as f:
transfer_list = json.load(f)
@ -73,7 +76,6 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
receive_file_count
)
try:
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_START)
try:
@ -85,7 +87,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
# バッチ実行管理テーブルに「retry」で登録
jskult_batch_run_manager.batch_retry()
logger.info("起動条件を満たしていないため、DCF削除新規マスタ作成処理を終了します。")
return
except MaxRunCountReachedException:
logger.info('最大起動回数に到達したため、DCF削除新規マスタ作成処理を実行します。')
@ -93,27 +95,39 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_DOING)
# DCF削除新規マスタ作成、出力用にDB接続を開始。
# トランザクションも開始。
db = Database.get_instance()
db.connect()
db.to_jst()
# アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行
if jskult_batch_status_manager.is_done_ultmarc_import():
logger.info("アルトマークデータが取り込まれているため、DCF削除新規マスタ作成処理を開始します。")
db.begin()
# COM_施設からDCF削除新規マスタに登録
(is_add_dcf_inst_merge,
duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(self)
duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(db)
if is_add_dcf_inst_merge:
logger.info('[NOTICE]DCF施設削除新規マスタが追加されました。')
self._output_add_dcf_inst_merge_log(
duplication_inst_records)
logger.info("DCF削除新規マスタ作成処理が正常終了しました。")
db.commit()
# CSV出力
dcf_inst_merge_all_records = self._select_dcf_inst_merge_all()
# DCF施設削除新規マスタ出力
logger.info('DCF施設削除新規マスタ出力を開始します。')
dcf_inst_merge_all_records = self._select_dcf_inst_merge_all(db)
file_path = self._make_csv_data(
self.environment.DCF_INST_MERGE_SEND_FILE_NAME,
dcf_inst_merge_all_records)
# CSVをS3にアップロード
self._upload_dcf_inst_merge_csv_file(
file_path, process_date, self.environment.DCF_INST_MERGE_SEND_FILE_NAME)
self.environment.DCF_INST_MERGE_SEND_FILE_NAME, process_date, file_path)
logger.info("DCF施設削除新規マスタ出力が正常終了しました。")
# 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録
logger.info("DCF削除新規マスタ作成処理を正常終了します。")
logger.info("DCF削除新規マスタ作成/データ出力処理を終了します。")
jskult_batch_run_manager.batch_success()
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_DONE)
@ -121,146 +135,136 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
return
except Exception as e:
db.rollback()
# 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録
logger.exception(f'予期せぬエラーが発生したため、DCF削除新規マスタ作成処理を終了します。{e}')
jskult_batch_run_manager.batch_failed()
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_ERROR)
def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]:
try:
self._db = Database.get_instance()
self._db.connect()
sql = """\
SELECT
*
FROM
src07.dcf_inst_merge
"""
dcf_inst_merge_all_records = self._db.execute_select(sql)
return dcf_inst_merge_all_records
except Exception as e:
raise BatchOperationException(e)
finally:
self._db.disconnect()
db.disconnect()
# com_instからdcf_inst_mergeにinsert
def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]:
def _insert_dcf_inst_merge_from_com_inst(self, db: Database) -> tuple[bool, list[dict]]:
try:
self._db = Database.get_instance()
self._db.connect()
self._db.begin()
self._db.to_jst()
sql = """\
SELECT
ci.DCF_DSF_INST_CD,
ci.FORM_INST_NAME_KANJI,
ci.DELETE_SCHE_REASON_CD,
ci.DUP_OPP_CD,
ci.SYS_UPDATE_DATE
ci.dcf_dsf_inst_cd AS dcf_dsf_inst_cd,
ci.form_inst_name_kanji AS form_inst_name_kanji,
ci.dup_opp_cd AS dup_opp_cd,
(
SELECT
dupci.form_inst_name_kanji
FROM
src05.com_inst AS dupci
WHERE
dupci.dcf_dsf_inst_cd = ci.dup_opp_cd
) AS dup_inst_name_kanji,
DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), '%Y%m') AS start_month
FROM
src05.COM_INST AS ci
src05.com_inst AS ci
WHERE
ci.DUP_OPP_CD IS NOT NULL
(ci.dup_opp_cd IS NOT NULL OR CHAR_LENGTH(ci.dup_opp_cd) > 0)
AND
ci.DELETE_SCHE_REASON_CD = 'D'
ci.delete_sche_reason_cd = 'D'
AND
ci.DELETE_DATA IS NULL
ci.abolish_ymd IS NULL
AND
ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW()
ci.sys_update_date BETWEEN src07.get_syor_date() AND NOW()
AND
NOT EXISTS (
SELECT
dim.DCF_INST_CD
1
FROM
src07.DCF_INST_MERGE AS dim
src07.dcf_inst_merge AS dim
WHERE
dim.DCF_INST_CD = ci.DCF_DSF_INST_CD
dim.dcf_inst_cd = ci.dcf_dsf_inst_cd
)
AND
(ci.DCF_DSF_INST_CD EXISTS(
AND(
EXISTS(
SELECT
mia.INST_CD
1
FROM
src07.MST_INST_ASSN as mia
src07.mst_inst_assn as mia
WHERE
mia.INST_CD = ci.DCF_DSF_INST_CD
mia.inst_cd = ci.dcf_dsf_inst_cd
)
OR EXISTS(
SELECT
1
FROM
src07.atc_pharm AS ap
WHERE
ap.prsb_inst_cd = ci.dcf_dsf_inst_cd
)
OR ci.DCF_DSF_INST_CD EXISTS(
OR EXISTS(
SELECT
ap.PRSB_INST_CD
1
FROM
src07.ATC_PHARM AS ap
view07.vw_tebra_sales_refreshed AS vtsr
WHERE
ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD
)
OR ci.DCF_DSF_INST_CD EXISTS(
SELECT
trd.INST_CD
FROM
src07.TRN_RESULT_DATA AS trd
WHERE
trd.INST_CD = ci.DCF_DSF_INST_CD
)
vtsr.cnvs_inst_cd = ci.dcf_dsf_inst_cd
)
;
);
"""
duplication_inst_records = self._db.execute_select(sql)
duplication_inst_records = db.execute_select(sql)
if len(duplication_inst_records) == 0:
logger.info('施設統合対象データはありません')
return (False, None)
# DCF削除新規マスタ取り込み
values_clauses = []
params = {}
for clauses_no, row in enumerate(duplication_inst_records, start=1):
dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}"
dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}"
dcf_inst_cd_arr = f"dcf_inst_cd{clauses_no}"
dup_opp_cd_arr = f"dup_opp_cd{clauses_no}"
start_month_arr = f'start_month{clauses_no}'
values_clause = f"""(:{dcf_inst_cd_arr},
:{dup_opp_cd_arr},
DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH),
:{start_month_arr},
NULL,
NULL,
NULL,
"Y",
batchuser,
CURRENT_USER(),
SYSDATE(),
batchuser,
CURRENT_USER(),
SYSDATE()
)"""
values_clauses.append(values_clause)
params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD']
params[dup_opp_cd_arr] = row['DUP_OPP_CD']
params[dcf_inst_cd_arr] = row['dcf_dsf_inst_cd']
params[dup_opp_cd_arr] = row['dup_opp_cd']
params[start_month_arr] = row['start_month']
insert_sql = f"""
INSERT INTO
src07.dcf_inst_merge (
DCF_INST_CD,
DUP_OPP_CD,
START_MONTH,
INVALID_FLG,
REMARKS,
DCF_INST_CD_NEW,
ENABLED_FLG,
CREATER,
CREATE_DATE,
UPDATER,
UPDATE_DATE
dcf_inst_cd,
dup_opp_cd,
start_month,
invalid_flg,
remarks,
dcf_inst_cd_new,
enabled_flg,
creater,
create_date,
updater,
update_date
)
VALUES
{','.join(values_clauses)}
"""
self._db.execute(insert_sql, params)
db.execute(insert_sql, params)
return (True, duplication_inst_records)
except Exception as e:
self._db.rollback()
raise BatchOperationException(e)
finally:
self._db.disconnect()
def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]):
sys_update_date = duplication_inst_records[0]['sys_update_date']
def _output_add_dcf_inst_merge_log(self, duplication_inst_records: list[dict]):
start_month = duplication_inst_records[0]['start_month']
set_year_month = '{set_year}{set_month}'.format(
set_year=sys_update_date[0:4],
set_month=sys_update_date[-2:]
set_year=start_month[0:4],
set_month=start_month[-2:]
)
add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}'
add_dct_inst_merge_list = []
@ -270,7 +274,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list)
# 顧客報告用にログ出力
logger.info(
f"""DCF施設統合マスタが追加されました。
f"""DCF施設削除新規マスタが追加されました。
**********************************************************
適用月度 {set_year_month}
**********************************************************
@ -281,7 +285,20 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
return
def _make_csv_data(csv_file_name: str, record_inst: list):
def _select_dcf_inst_merge_all(self, db: Database) -> tuple[bool, list[dict]]:
try:
sql = """\
SELECT
*
FROM
src07.dcf_inst_merge
"""
dcf_inst_merge_all_records = db.execute_select(sql)
return dcf_inst_merge_all_records
except Exception as e:
raise BatchOperationException(e)
def _make_csv_data(self, csv_file_name: str, record_inst: list):
temporary_dir = tempfile.mkdtemp()
csv_file_path = path.join(temporary_dir, csv_file_name)
head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH',

View File

@ -23,13 +23,16 @@ class TrnResultDataBioLotEnvironment(JskultBatchEnvironment):
Raises:
EnvironmentVariableNotSetException: 環境変数の設定ミス
"""
super()._assert_variable_not_empty(self.JSKULT_BACKUP_BUCKET, 'JSKULT_BACKUP_BUCKET')
super()._assert_variable_not_empty(self.BATCH_MANAGE_DYNAMODB_TABLE_NAME, 'BATCH_MANAGE_DYNAMODB_TABLE_NAME')
super()._assert_variable_not_empty(
self.JSKULT_BACKUP_BUCKET, 'JSKULT_BACKUP_BUCKET')
super()._assert_variable_not_empty(
self.BATCH_MANAGE_DYNAMODB_TABLE_NAME, 'BATCH_MANAGE_DYNAMODB_TABLE_NAME')
super()._assert_variable_not_empty(self.BATCH_EXECUTION_ID, 'BATCH_EXECUTION_ID')
super()._assert_variable_is_int(self.MAX_RUN_COUNT, 'MAX_RUN_COUNT')
# MAX_RUN_COUNTは数値として扱うため、検査後に変換
self.MAX_RUN_COUNT = int(self.MAX_RUN_COUNT)
super()._assert_variable_not_empty(self.PROCESS_NAME, 'PROCESS_NAME')
super()._assert_variable_not_empty(self.TRANSFER_RESULT_FOLDER, 'TRANSFER_RESULT_FOLDER')
super()._assert_variable_not_empty(self.TRANSFER_RESULT_FILE_NAME, 'TRANSFER_RESULT_FILE_NAME')
super()._assert_variable_not_empty(
self.TRANSFER_RESULT_FOLDER, 'TRANSFER_RESULT_FOLDER')
super()._assert_variable_not_empty(
self.TRANSFER_RESULT_FILE_NAME, 'TRANSFER_RESULT_FILE_NAME')

View File

@ -1,25 +1,22 @@
import json
from src.aws.s3 import JskTransferListBucket
from src.batch.environment.trn_result_data_bio_lot_environment import \
TrnResultDataBioLotEnvironment
from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint
from src.manager.jskult_batch_run_manager import JskultBatchRunManager
from src.manager.jskult_batch_status_manager import JskultBatchStatusManager
from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager
from src.db.database import Database
from src.error.exceptions import (BatchOperationException,
EnvironmentVariableNotSetException,
MaxRunCountReachedException)
from src.logging.get_logger import get_logger
from src.manager.jskult_batch_run_manager import JskultBatchRunManager
from src.manager.jskult_batch_status_manager import JskultBatchStatusManager
from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager
from src.system_var import constants
logger = get_logger('生物由来ロット分解')
logger = get_logger('生物由来卸販売ロット分解')
class TrnResultDataBioLot(JskultBatchEntrypoint):
def __init__(self):
super().__init__()
@ -33,9 +30,8 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
return
def execute(self):
"""生物由来卸販売ロット分解"""
logger.info('生物由来卸販売ロット分解処理開始')
"""生物由来ロット分解"""
logger.info('生物由来ロット分解処理開始')
jskult_hdke_tbl_manager = JskultHdkeTblManager()
jskult_batch_run_manager = JskultBatchRunManager(
@ -43,7 +39,7 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
self.environment.BATCH_EXECUTION_ID)
if not jskult_hdke_tbl_manager.can_run_process():
logger.error(
'日次バッチ処理中またはdump取得が正常終了していないため、生物由来卸販売ロット分解処理を終了します。')
'日次バッチ処理中またはdump取得が正常終了していないため、生物由来ロット分解処理を終了します。')
# バッチ実行管理テーブルをfailedで登録
jskult_batch_run_manager.batch_failed()
return
@ -61,6 +57,8 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
# バッチ実行管理テーブルをfailedで登録
jskult_batch_run_manager.batch_failed()
return
with open(transfer_list_file_path) as f:
transfer_list = json.load(f)
@ -70,28 +68,31 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
jskult_batch_status_manager = JskultBatchStatusManager(
self.environment.PROCESS_NAME,
# TODO チケットNEWDWH2021-1847の実装で作成した定数に置き換え
'post_process',
constants.PROCESS_TYPE_POST_PROCESS,
self.environment.MAX_RUN_COUNT,
receive_file_count
)
try:
jskult_batch_status_manager.set_process_status(constants.BATCH_ACTF_BATCH_START)
jskult_batch_status_manager.set_process_status(
constants.BATCH_ACTF_BATCH_START)
try:
if not jskult_batch_status_manager.can_run_post_process():
# 後続処理の起動条件を満たしていない場合
# 処理ステータスを「処理待」に設定
jskult_batch_status_manager.set_process_status(constants.PROCESS_STATUS_WAITING)
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_WAITING)
# バッチ実行管理テーブルに「retry」で登録
jskult_batch_run_manager.batch_retry()
logger.info('起動条件を満たしていないため、生物由来ロット分解処理を終了します。')
return
except MaxRunCountReachedException:
logger.info('最大起動回数に到達したため、生物由来卸販売ロット分解処理を実行します。')
logger.info('最大起動回数に到達したため、生物由来ロット分解処理を実行します。')
jskult_batch_status_manager.set_process_status(constants.PROCESS_STATUS_DOING)
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_DOING)
db = Database.get_instance()
try:
db.connect()
@ -102,18 +103,20 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
self._insert_trn_result_data_bio_lot(db)
# 生物由来ロット分解データの不要レコードを削除する
self._delete_empty_lot_record(db)
# 製造ロット管理番号マスタから有効期限を生物由来ロット分解データにセットする
self._set_expr_dt_from_lot_num_mst(db)
# 施設情報を生物由来ロット分解データにセットする
self._set_inst_info_from_com_inst_or_com_pharm_or_mst_inst_merck(db)
self._set_inst_info_from_com_inst_or_com_pharm_or_mst_inst_merck(
db)
# 製造ロット管理番号マスタから有効期限を生物由来ロット分解データにセットする
self._set_expr_dt_from_customer_lotno_all(db)
db.commit()
logger.info('生物由来卸販売ロット分解処理終了')
logger.info('生物由来ロット分解処理終了')
# 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録
logger.info("生物由来卸販売ロット分解処理を正常終了します。")
logger.info("生物由来ロット分解処理を正常終了します。")
jskult_batch_run_manager.batch_success()
jskult_batch_status_manager.set_process_status(constants.PROCESS_STATUS_DONE)
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_DONE)
return
except Exception as e:
db.rollback()
raise BatchOperationException(e)
@ -121,10 +124,10 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
db.disconnect()
except Exception as e:
# 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録
logger.exception(f'予期せぬエラーが発生したため、生物由来卸販売ロット分解処理を終了します。{e}')
logger.exception(f'予期せぬエラーが発生したため、生物由来ロット分解処理を終了します。{e}')
jskult_batch_run_manager.batch_failed()
jskult_batch_status_manager.set_process_status(constants.PROCESS_STATUS_ERROR)
jskult_batch_status_manager.set_process_status(
constants.PROCESS_STATUS_ERROR)
def _delete_not_confirm_data_in_trn_result_data_bio_lot(self, db: Database):
logger.info('生物由来ロット分解データの未確定データ削除開始')
@ -136,13 +139,12 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
AND bio.seq_no = lot.seq_no
AND IFNULL(bio.upd_date, bio.ins_date) >= src07.get_syor_date()
"""
res = db.execute(sql)
db.execute(sql)
except Exception as e:
logger.info('生物由来ロット分解データの未確定データ削除に失敗')
raise e
logger.info('生物由来ロット分解データの未確定データ削除に成功')
def _insert_trn_result_data_bio_lot(self, db: Database):
logger.info('生物由来ロット分解データの作成開始')
try:
@ -157,7 +159,7 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
bio.edit_whlslr_org_cd AS edit_whlslr_org_cd,
bio.orig_univ_product_cd AS orig_univ_product_cd,
bio.edit_deal_div_cd AS edit_deal_div_cd,
bio.cnvs_sales_dt AS cnvs_sales_dt,
bio.cnvs_sales_dt AS cnvs_sales_dt, -- datetime date
bio.orig_slip_no AS orig_slip_no,
bio.orig_prod_nm AS orig_prod_nm,
bio.edit_endusr_cd AS edit_endusr_cd,
@ -169,7 +171,7 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
WHEN 2 THEN bio.cnvs_lot_no_2
WHEN 3 THEN bio.cnvs_lot_no_3
END AS cnvs_lot_no,
bio.load_dt AS load_dt,
bio.load_dt AS load_dt, -- datetime date
bio.cnvs_deal_div_cd AS cnvs_deal_div_cd,
bio.cls_deal_div_nm AS cls_deal_div_nm,
bio.cnvs_depo_cd AS cnvs_depo_cd,
@ -187,8 +189,8 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
bio.cnvs_inst_cd AS cnvs_inst_cd,
bio.cls_inst_nm AS cls_inst_nm,
-- COM_施設 or COM_薬局 or メルク独自施設マスタから後ほどセット
NULL AS inst_addr,
NULL AS inst_tel,
NULL AS address,
NULL AS tel_num,
bio.result_cd AS result_cd,
bio.src_cd AS src_cd,
-- 判定結果CDより値を設定する
@ -200,14 +202,14 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
WHEN 'Z' THEN 'エラー(想定外)'
END AS data_kbn,
-- SRC_種類より値を設定する
CASE bio.result_cd
CASE bio.src_cd
WHEN '1' THEN 'VAN'
WHEN '2' THEN '手入力'
WHEN '3' THEN 'VAN-Web'
WHEN 'S' THEN 'SCSK-VAN'
END AS if_kind,
-- 製品コードロット番号でロットマスタより後ほどセット
NULL AS ck_last_dt_txt
NULL AS expr_dt
FROM
src07.trn_result_data_bio bio
-- 生物由来変換マスタ
@ -222,7 +224,6 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
raise e
logger.info('生物由来ロット分解データの作成に成功')
def _delete_empty_lot_record(self, db: Database):
logger.info('生物由来ロット分解データのロット番号が空のレコードを削除開始')
try:
@ -232,13 +233,12 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
-- 空白15桁のデータはロット情報が空とみなして削除する
lot.cnvs_lot_no = REPEAT(' ', 15) OR lot.cnvs_lot_no IS NULL
"""
res = db.execute(sql)
db.execute(sql)
except Exception as e:
logger.info('生物由来ロット分解データのロット番号が空のレコードを削除に失敗')
raise e
logger.info('生物由来ロット分解データのロット番号が空のレコードを削除に成功')
def _set_inst_info_from_com_inst_or_com_pharm_or_mst_inst_merck(self, db: Database):
logger.info('COM_施設 or COM_薬局 or メルク独自施設マスタから施設情報を生物由来ロット分解データにセット開始')
try:
@ -257,7 +257,7 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
AND DATE_FORMAT(bio.cnvs_sales_dt, '%Y%m') BETWEEN mim.eff_start_ym
AND mim.eff_end_ym
-- 施設住所
SET bio.inst_addr = (
SET bio.address = (
CASE LEFT(bio.cnvs_inst_cd, 2)
WHEN '00' THEN ci.inst_addr
WHEN '03' THEN cp.inst_addr
@ -265,22 +265,22 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
END
),
-- 施設電話番号
bio.inst_tel = (
bio.tel_num = (
CASE LEFT(bio.cnvs_inst_cd, 2)
WHEN '00' THEN ci.inst_phone_number
WHEN '03' THEN cm.inst_phone_number
WHEN '03' THEN cp.inst_phone_number
ELSE mim.tel_no
END
)
"""
res = db.execute(sql)
db.execute(sql)
except Exception as e:
logger.info('COM_施設 or COM_薬局 or メルク独自施設マスタから施設情報を生物由来ロット分解データにセット失敗')
logger.info(
'COM_施設 or COM_薬局 or メルク独自施設マスタから施設情報を生物由来ロット分解データにセット失敗')
raise e
logger.info('COM_施設 or COM_薬局 or メルク独自施設マスタから施設情報を生物由来ロット分解データにセット成功')
def _set_ck_last_dt_txt_from_customer_lotno_all(self, db: Database):
def _set_expr_dt_from_customer_lotno_all(self, db: Database):
# ロットマスタから有効期限をセット
logger.info('ロットマスタから有効期限をセット開始')
try:
@ -291,9 +291,9 @@ class TrnResultDataBioLot(JskultBatchEntrypoint):
ON bio.cnvs_prod_cd = cla.material_cd
AND bio.cnvs_lot_no = cla.lot_no_txt
SET
bio.ck_last_dt_txt = cla.ck_last_dt_txt
bio.expr_dt = STR_TO_DATE(cla.ck_last_dt_txt, '%Y%m%d')
"""
res = db.execute(sql)
db.execute(sql)
except Exception as e:
logger.info('ロットマスタから有効期限をセット失敗')
raise e

View File

@ -1,11 +1,7 @@
import gzip
import json
import os
import os.path as path
import shutil
import tempfile
import boto3
from src.system_var import environment
@ -20,7 +16,7 @@ class S3Client:
return []
contents = response['Contents']
# 末尾がスラッシュで終わるものはフォルダとみなしてスキップする
objects = [{'filename': content['Key'], 'size': content['Size']}
objects = [content['Key']
for content in contents if not content['Key'].endswith('/')]
return objects
@ -60,46 +56,26 @@ class JskIOBucket(S3Bucket):
self._bucket_name, self._recv_folder)
return self._s3_file_list
def download_data_file(self, data_filename: str):
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(
temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}')
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, data_filename, f)
f.seek(0)
return temporary_file_path
def unzip_data_file(self, filename: str):
temp_dir = os.path.dirname(filename)
decompress_filename = os.path.basename(filename).replace('.gz', '')
decompress_file_path = os.path.join(temp_dir, decompress_filename)
with gzip.open(filename, 'rb') as gz:
with open(decompress_file_path, 'wb') as decompressed_file:
shutil.copyfileobj(gz, decompressed_file)
ret = [decompress_file_path]
return ret
def transfer_file_to_import(self, target_file: dict):
def transfer_file_to_import(self, target_file: str):
data_import_bucket = DataImportBucket()
transfer_from_file_path = target_file.get("filename")
transfer_from_file_path = target_file
transfer_to_filename = transfer_from_file_path.replace(
f"{self._recv_folder}/", "")
data_import_key = f'{data_import_bucket._folder}/{transfer_to_filename}'
self._s3_client.copy(self._bucket_name, transfer_from_file_path,
data_import_bucket._bucket_name, data_import_key)
def backup_file(self, target_file: dict, datetime_key: str):
def backup_file(self, target_file: str, datetime_key: str):
jsk_backup_bucket = JskBackupBucket()
backup_from_file_path = target_file.get("filename")
backup_from_file_path = target_file
backup_to_filename = backup_from_file_path.replace(
f"{self._recv_folder}/", "")
backup_key = f'{jsk_backup_bucket._folder}/{datetime_key}/{backup_to_filename}'
self._s3_client.copy(self._bucket_name, backup_from_file_path,
jsk_backup_bucket._bucket_name, backup_key)
def delete_file(self, target_file: dict):
delete_path = target_file.get("filename")
def delete_file(self, target_file: str):
delete_path = target_file
self._s3_client.delete_file(
self._bucket_name, delete_path)
@ -116,16 +92,16 @@ class UltmarcBucket(S3Bucket):
def get_file_list(self):
return self._s3_client.list_objects(self._bucket_name, self._folder)
def backup_file(self, target_file: dict, datetime_key: str):
def backup_file(self, target_file: str, datetime_key: str):
# バックアップバケットにコピー
ultmarc_backup_bucket = UltmarcBackupBucket()
target_file_path = target_file.get("filename")
target_file_path = target_file
backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{target_file_path.replace(f"{self._folder}/", "")}'
self._s3_client.copy(self._bucket_name, target_file_path,
ultmarc_backup_bucket._bucket_name, backup_key)
def delete_file(self, target_file: dict):
delete_path = target_file.get("filename")
def delete_file(self, target_file: str):
delete_path = target_file
self._s3_client.delete_file(
self._bucket_name, delete_path)
@ -134,8 +110,8 @@ class UltmarcImportBucket(S3Bucket):
_bucket_name = environment.ULTMARC_DATA_BUCKET
_folder = environment.ULTMARC_IMPORT_FOLDER
def transfer_file_to_import(self, target_file: dict):
from_file_path = target_file.get("filename")
def transfer_file_to_import(self, target_file: str):
from_file_path = target_file
to_filename = from_file_path.replace(
f"{UltmarcBucket()._folder}/", "")
data_import_key = f'{self._folder}/{to_filename}'

View File

@ -1,5 +1,9 @@
"""実消化&アルトマーク データ転送処理"""
import itertools
import os
import re
from src.aws.s3 import (JskIOBucket, TransferResultOutputBucket, UltmarcBucket,
UltmarcImportBucket)
from src.error.exceptions import BatchOperationException
@ -32,12 +36,12 @@ def exec():
# 日次バッチ処理中の場合、後続の処理は行わない
if batch_processing_flag == constants.BATCH_ACTF_BATCH_START:
logger.error('日次バッチ処理中のため、日次バッチ処理を終了します。')
logger.error('日次バッチ処理中のため、実消化&アルトマークデータ転送を終了します。')
return constants.BATCH_EXIT_CODE_SUCCESS
# dump取得が正常終了していない場合、後続の処理は行わない
if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE:
logger.error('dump取得が正常終了していないため、日次バッチ処理を終了します。')
logger.error('dump取得が正常終了していないため、実消化&アルトマークデータ転送を終了します。')
return constants.BATCH_EXIT_CODE_SUCCESS
logger.info(f'処理日={syor_date}')
@ -54,10 +58,45 @@ def exec():
jsk_receive_file_list = None
try:
jsk_io_bucket = JskIOBucket()
jsk_receive_file_list: str = jsk_io_bucket.get_file_list()
jsk_receive_file_list: list[str] = jsk_io_bucket.get_file_list()
except Exception as e:
logger.exception(f'実消化データリスト取得に失敗しました。{e}')
return constants.BATCH_EXIT_CODE_SUCCESS
# 実消化データリストの中で、ファイル種類(ファイル名のプレフィックス)が重複するものがあるかどうかをチェックする。
# 1) プレフィックスごとにマップを作り、該当するファイル名をリストで集める
# 以下のようなマップが作られる
# {
# "TRN_RESULT_DATA": ["TRN_RESULT_DATA_20250606102030.zip",
# "TRN_RESULT_DATA_20250606112030.zip"],
# "TRN_Recive_Inventry": ["TRN_Recive_Inventry_20250606102030.zip"],
# ...
# }
prefix_map: dict[str, list[str]] = {}
for filename in jsk_receive_file_list:
p = extract_prefix(filename)
prefix_map.setdefault(p, []).append(filename)
# 2) 重複しているプレフィックスを探す
duplicates = {prefix: file_list for prefix,
file_list in prefix_map.items() if len(file_list) > 1}
# 3) 重複があれば転送一覧から除外する
if duplicates:
# マップをフラットなリストに変換する
duplicate_files = list(
itertools.chain.from_iterable(duplicates.values()))
logger.warning(
f'W-1 実消化データの中で一部重複データがあります。重複データは転送から除外します。重複データ一覧: {duplicate_files}')
# 転送しなかったファイルもバックアップに移動させる
for filename in duplicate_files:
jsk_io_bucket.backup_file(filename, syor_date)
jsk_io_bucket.delete_file(filename)
# S3内のファイル数と重複ファイルの差集合を取ることで、要素を削除
jsk_receive_file_list = list(
set(jsk_receive_file_list) - set(duplicate_files))
logger.info(f'I-4 実消化データリスト取得終了。取得データ一覧:{jsk_receive_file_list}')
# ④ 取得した実消化データのリストでループ開始
@ -72,7 +111,7 @@ def exec():
# ⑧ 転送が完了したファイル名を転送データリストに追加する
# ファイル名のみ切り出して追加
transfer_file_lists['jsk_transfer_list'].append(
receive_file['filename'].split('/')[1])
receive_file.split('/')[1])
# ⑨ ループ終了後、実消化データ転送終了ログI-6)を出力する
logger.info(f'I-6 実消化データ転送処理終了')
@ -86,6 +125,17 @@ def exec():
except Exception as e:
logger.exception(f'アルトマークデータリスト取得に失敗しました。{e}')
return constants.BATCH_EXIT_CODE_SUCCESS
# アルトマークデータは1件以上送られてくるのが想定外のため、1件より多かったら連携から除外する
if len(ultmarc_receive_file_list) > 1:
logger.warning(
f'W-2 アルトマークデータが複数配置されているため、転送から除外します。重複データ一覧: {ultmarc_receive_file_list}')
# 転送しなかった場合でもバックアップに移動させる
for filename in ultmarc_receive_file_list:
ultmarc_bucket.backup_file(filename, syor_date)
ultmarc_bucket.delete_file(filename)
# 連携しないようにするため、リストを0件に書き換える。
ultmarc_receive_file_list = []
logger.info(
f'I-8 アルトマークデータリスト取得終了。取得データ一覧:{ultmarc_receive_file_list}')
@ -102,12 +152,13 @@ def exec():
# ⑮ 転送が完了したファイル名を転送データリストに追加する
# ファイル名のみ切り出して追加
transfer_file_lists['ult_transfer_list'].append(
receive_file['filename'].split('/')[1])
receive_file.split('/')[1])
# ⑯ ループ終了後、アルトマークデータ転送終了ログI-10)を出力する
logger.info(f'I-6 実消化データ転送処理終了')
logger.info(f'I-10 アルトマークデータ転送処理終了')
# ⑰ 転送データリストをJSONファイル化し、S3バケットにアップロードする
logger.info(f'I-11 データ転送結果アップロード')
TransferResultOutputBucket().put_transfer_result(transfer_file_lists, syor_date)
# ⑱ 処理終了ログ(I-12)を出力する
@ -118,3 +169,20 @@ def exec():
except Exception as e:
logger.exception(f'実消化&アルトマーク データ転送処理中に想定外のエラーが発生しました {e}')
raise e
def extract_prefix(filename: str) -> str:
"""
ファイル名からタイムスタンプ部分 + 拡張子を除いたプレフィックスを返す
: "TRN_RESULT_DATA_20250606102030.zip" -> "TRN_RESULT_DATA"
"""
# 拡張子を取り除く
file, ext = os.path.splitext(os.path.basename(filename))
# ファイル名の末尾がタイムスタンプ数字14桁である想定なので、最後の '_' 以降を削除
# TRN_RESULT_DATA_20250606102030 -> ["TRN_RESULT_DATA", "20250606102030"]
parts = file.rsplit('_', 1)
if len(parts) == 2 and re.fullmatch(r"\d{14}", parts[1]):
return parts[0]
else:
# 「最後がタイムスタンプじゃない」場合、そのままのファイル名全体を返す
return filename

View File

@ -8,13 +8,13 @@ class TrnResultDataBioLotDBModel(BaseDBModel):
if_kind: Optional[str]
account_ym: Optional[str]
seq_no: Optional[int]
load_dt: Optional[datetime]
load_dt: Optional[date]
orig_whlslr_cd: Optional[str]
orig_whlslr_sub_cd: Optional[str]
cls_whlslr_nm: Optional[str]
edit_whlslr_org_cd: Optional[str]
orig_slip_no: Optional[str]
cnvs_sales_dt: Optional[datetime]
cnvs_sales_dt: Optional[date]
edit_deal_div_cd: Optional[str]
cls_deal_div_nm: Optional[str]
cnvs_prod_cd: Optional[str]

View File

@ -16,7 +16,7 @@ class BioModel(BaseModel):
load_dt_from: Optional[str]
load_dt_to: Optional[str]
cnvs_lot_no: Optional[str]
result_cd: Optional[str]
data_kbn: Optional[str]
cnvs_prod_cd: Optional[str]
cnvs_sales_dt_from: Optional[str]
cnvs_sales_dt_to: Optional[str]
@ -31,7 +31,7 @@ class BioModel(BaseModel):
ctrl_load_dt_from: str = Form(None),
ctrl_load_dt_to: str = Form(None),
ctrl_cnvs_lot_no: str = Form(None),
ctrl_result_cd: str = Form(None),
ctrl_data_kbn: str = Form(None),
ctrl_maker_cd: str = Form(None),
ctrl_cnvs_sales_dt_from: str = Form(None),
ctrl_cnvs_sales_dt_to: str = Form(None),
@ -46,7 +46,7 @@ class BioModel(BaseModel):
ctrl_load_dt_from,
ctrl_load_dt_to,
ctrl_cnvs_lot_no,
ctrl_result_cd,
ctrl_data_kbn,
ctrl_maker_cd,
ctrl_cnvs_sales_dt_from,
ctrl_cnvs_sales_dt_to,
@ -62,7 +62,7 @@ class BioModel(BaseModel):
ctrl_load_dt_from: str = Body(None),
ctrl_load_dt_to: str = Body(None),
ctrl_cnvs_lot_no: str = Body(None),
ctrl_result_cd: str = Body(None),
ctrl_data_kbn: str = Body(None),
ctrl_maker_cd: str = Body(None),
ctrl_cnvs_sales_dt_from: str = Body(None),
ctrl_cnvs_sales_dt_to: str = Body(None)
@ -75,7 +75,7 @@ class BioModel(BaseModel):
ctrl_load_dt_from,
ctrl_load_dt_to,
ctrl_cnvs_lot_no,
ctrl_result_cd,
ctrl_data_kbn,
ctrl_maker_cd,
ctrl_cnvs_sales_dt_from,
ctrl_cnvs_sales_dt_to
@ -88,7 +88,7 @@ class BioModel(BaseModel):
ctrl_load_dt_from: str,
ctrl_load_dt_to: str,
ctrl_cnvs_lot_no: str,
ctrl_result_cd: str,
ctrl_data_kbn: str,
ctrl_maker_cd: str,
ctrl_cnvs_sales_dt_from: str,
ctrl_cnvs_sales_dt_to: str,
@ -130,7 +130,7 @@ class BioModel(BaseModel):
load_dt_from=load_dt_from,
load_dt_to=load_dt_to,
cnvs_lot_no=ctrl_cnvs_lot_no,
result_cd=ctrl_result_cd,
data_kbn=ctrl_data_kbn,
cnvs_prod_cd=ctrl_maker_cd,
cnvs_sales_dt_from=cnvs_sales_dt_from,
cnvs_sales_dt_to=cnvs_sales_dt_to,

View File

@ -38,7 +38,7 @@ class BioViewModel(BaseModel):
{
'': '',
'1':'正常',
'2':'卸間転送除外対象データ',
'2':'卸間転送除外対象',
'E':'エラー',
'D':'エラー(重複)',
'Z':'エラー(想定外)'

View File

@ -141,8 +141,8 @@ class BioSalesLotRepository(BaseRepository):
cnvs_lot_no_comparator = condition.LIKE if '%' in cnvs_lot_no else condition.EQ
where_clauses.append(SQLCondition('TRIM(cnvs_lot_no)', cnvs_lot_no_comparator, 'cnvs_lot_no'))
# データ区分
if is_not_empty(parameter.result_cd):
where_clauses.append(SQLCondition('result_cd', condition.EQ, 'result_cd'))
if is_not_empty(parameter.data_kbn):
where_clauses.append(SQLCondition('data_kbn', condition.EQ, 'data_kbn'))
# 製品
if is_not_empty(parameter.cnvs_prod_cd):
where_clauses.append(SQLCondition('cnvs_prod_cd', condition.EQ, 'cnvs_prod_cd'))

View File

@ -13,7 +13,7 @@ class WholesalerMasterRepository(BaseRepository):
b.orig_whlslr_sub_cd as rec_whs_sub_cd,
v2.ws_nm_kj as name,
b.cls_whlslr_nm as whs_name
FROM src07.trn_result_data_bio b
FROM src07.trn_result_data_bio_lot b
LEFT OUTER JOIN
(
SELECT

View File

@ -17,8 +17,9 @@ BIO_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data')
BIO_EXCEL_TEMPLATE_FILE_PATH = path.join(BIO_TEMPORARY_FILE_DIR_PATH, 'BioData_template.xlsx')
BIO_CSV_HEADER = [
'データ種別',
'伝票管理NO',
'連携種別',
'計上年月',
'管理番号',
'処理日',
'卸コード',
'卸サブコード',
@ -31,29 +32,21 @@ BIO_CSV_HEADER = [
'製品コード',
'統一商品コード',
'商品名',
'卸報告商品名',
'納入先コード',
'納入先名',
'卸報告納入先名',
'納入先住所',
'卸報告納入先住所',
'ロット番号',
'数量',
'有効期限',
'データ区分',
'エラー詳細種別',
'訂正前伝票管理NO',
'修正者',
'修正日時',
'施設コード',
'施設名',
'施設住所',
'施設電話番号',
'Veeva卸コード',
'Veeva卸組織コード',
'卸組織名',
'Veeva取引区分コード',
'2017年11月以前データ'
'tebra卸コード',
'tebraデポコード',
'デポ名',
'tebra取引区分コード'
]
LOGOUT_REASON_DO_LOGOUT = 'do_logout'

View File

@ -47,7 +47,7 @@
<td class="search_tb">
<select class="text search_dropdown" name="ctrl_org_kbn" onChange="formBtDisabled();applySearchParam(this)" value="">
{% for org_kbn_code, org_kbn_value in bio.display_org_kbn().items() %}
<option value="{{org_kbn_code}}">
<option value="{{org_kbn_value}}">
{{org_kbn_value}}
</option>
{% endfor %}
@ -75,7 +75,7 @@
</td>
<td>データ区分:</td>
<td class="search_tb">
<select class="text search_dropdown" name="ctrl_result_cd" onchange="formBtDisabled();applySearchParam(this)">
<select class="text search_dropdown" name="ctrl_data_kbn" onchange="formBtDisabled();applySearchParam(this)">
{% for data_kbn_code, data_kbn_value in bio.display_data_kbn().items() %}
<option option value="{{data_kbn_value}}">
{{data_kbn_value}}
@ -174,7 +174,7 @@
<input type="hidden" name="ctrl_load_dt_from" value="">
<input type="hidden" name="ctrl_load_dt_to" value="">
<input type="hidden" name="ctrl_cnvs_lot_no" value="">
<input type="hidden" name="ctrl_result_cd" value="">
<input type="hidden" name="ctrl_data_kbn" value="">
<input type="hidden" name="ctrl_maker_cd" value="">
<input type="hidden" name="ctrl_cnvs_sales_dt_from" value="">
<input type="hidden" name="ctrl_cnvs_sales_dt_to" value="">
@ -185,7 +185,7 @@
<input type="hidden" name="ctrl_load_dt_from" value="">
<input type="hidden" name="ctrl_load_dt_to" value="">
<input type="hidden" name="ctrl_cnvs_lot_no" value="">
<input type="hidden" name="ctrl_result_cd" value="">
<input type="hidden" name="ctrl_data_kbn" value="">
<input type="hidden" name="ctrl_maker_cd" value="">
<input type="hidden" name="ctrl_cnvs_sales_dt_from" value="">
<input type="hidden" name="ctrl_cnvs_sales_dt_to" value="">

View File

@ -6,7 +6,6 @@ JSK_BACKUP_FOLDER=jsk/send
TRANSFER_RESULT_FOLDER=transfer_result
TRANSFER_RESULT_FILE_NAME=transfer_result.json
DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv
JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config
DB_CONNECTION_MAX_RETRY_ATTEMPT=1
DB_CONNECTION_RETRY_INTERVAL_INIT=1
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1

View File

@ -9,7 +9,7 @@ INST_CD,DRUG_RLT_PROD_GRP_CD,PRSB_INST_CD,DVSN_RT,REMARK,EFF_START_YM,EFF_END_YM
inst_cd,drug_rlt_prod_grp_cd,prsb_inst_cd,dvsn_rt,remark,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.atc_pharm
org07.atc_pharm
ATC_PHARM_ex.sql
truncate_src_table:src07.atc_pharm
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'ATC_PHARM',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ MATERIAL_CD,BATCH_ID,JAN_GTIN13,UNIV_PRODUCT_CD,LOT_NO_TXT,CK_FIRST_DT_TXT,CK_LA
material_cd,batch_id,jan_gtin13,univ_product_cd,lot_no_txt,ck_first_dt_txt,ck_last_dt_txt,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id,file_name,file_row_cnt,delete_flg,ins_user,ins_date,upd_user,upd_date
src07.customer_lotno_all
org07.customer_lotno_all
CUSTOMER_LOTNO_ALL_ex.sql
truncate_src_table:src07.customer_lotno_all
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'CUSTOMER_LOTNO_ALL',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ EMP_CD,EMP_NM_KJ,EMP_NM_KJ_S,EMP_NM_EN,EMP_NM_EN_S,EMP_NM_KN,EMP_NM_KN_S,EMP_NM_
emp_cd,emp_nm_kj,emp_nm_kj_s,emp_nm_en,emp_nm_en_s,emp_nm_kn,emp_nm_kn_s,emp_nm_bi,org_lvln_cd,org_lvl,position_nm,tebra_navi_role_cd,mail_address,prod_assn_team_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.emp
org07.emp
EMP_ex.sql
truncate_src_table:src07.emp
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'EMP',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ ACCOUNT_YM,SEQ_NO,SRC_CD,RESULT_CD,JD_NHI_CD,DATA_DIV_CD,LOAD_DT,REC_SEQ_NO,LOAD
account_ym,seq_no,src_cd,result_cd,jd_nhi_cd,data_div_cd,load_dt,rec_seq_no,load_seq_no,orig_whlslr_cd,orig_whlslr_sub_cd,orig_whlslr_org_cd,orig_endusr_cd,orig_sales_dt,orig_slip_no,orig_deal_div_cd,orig_bill_ym,orig_univ_product_cd,orig_sales_qty,orig_sales_untprc,orig_sales_amt,orig_pre_disc_untprc,orig_after_disc_untprc,orig_pre_sales_dt,orig_pre_slip_no,orig_prod_nm,orig_endusr_nm,orig_jan_cd,orig_arb_item,orig_lot_no_1,orig_lot_sales_qty_1,orig_lot_no_2,orig_lot_sales_qty_2,orig_lot_no_3,orig_lot_sales_qty_3,orig_jd44_filler1,orig_jd44_filler2,orig_lot_no,orig_expiration_date,orig_jan_gtin13,orig_gtin14,orig_sales_dt8,orig_pre_sales_dt8,orig_actual_endusr_cd,orig_endusr_common_cd,orig_actual_endusr_common_cd,orig_actual_endusr_nm,orig_actual_endusr_addr_nm,orig_actual_endusr_tel,orig_detail_seq_no,orig_nodelivered_kbn,orig_bio_org_kbn,orig_depo_cd,orig_depo_nm,orig_whlslr_org_nm,orig_biko,orig_ms_cd,orig_filler3,orig_jd45_addr_txt,orig_jd45_filler1,orig_jd45_filler2,orig_jd45_filler3,orig_nhi_filler1,orig_nhi_proc_dt,orig_nhi_addr1_txt,orig_nhi_addr2_txt,orig_nhi_tally_div_cd,orig_nhi_tally_qty,orig_nhi_par_univ_product_cd,orig_man_remark1,orig_man_remark2,orig_man_remark3,orig_man_remark4,orig_man_remark5,orig_man_remark6,orig_man_remark7,orig_man_remark8,orig_man_remark9,orig_man_remark10,edit_whlslr_org_cd,edit_deal_div_cd,edit_endusr_cd,cnvs_whlslr_cd,cls_whlslr_nm,cnvs_depo_cd,cls_depo_nm,cnvs_slip_no,cnvs_sales_dt,cnvs_deal_div_cd,cls_deal_div_nm,cnvs_bill_ym,cnvs_inst_cd,cls_inst_nm,cnvs_prod_cd,cls_prod_nm,cnvs_sales_untprc,cnvs_sales_amt,cnvs_sales_qty,cls_chrg_fr_mdcn,cls_invc_prcng,cnvs_sign_cnv_sales_qty,cnvs_sign_cnv_sales_untprc,cnvs_sign_cnv_sales_amt,cnvs_lot_no_1,cnvs_lot_sales_qty_1,cnvs_lot_no_2,cnvs_lot_sales_qty_2,cnvs_lot_no_3,cnvs_lot_sales_qty_3,last_cnvs_dt,cnvs_cd,account_dt,customer_id,last_upd_dtt
src07.hst_result_data
org07.hst_result_data
HST_RESULT_DATA_ex.sql
truncate_src_table:src07.hst_result_data
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'HST_RESULT_DATA',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ IM_PLN_PROD_CD,IM_PLN_PROD_NM_KJ,IM_PLN_PROD_NM_KJ_S,IM_PLN_PROD_NM_EN,IM_PLN_PR
im_pln_prod_cd,im_pln_prod_nm_kj,im_pln_prod_nm_kj_s,im_pln_prod_nm_en,im_pln_prod_nm_en_s,im_pln_prod_nm_kn,im_pln_prod_nm_kn_s,im_pln_prod_nm_bi,pln_flg,prod_grp_cd,prod_assn_team_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.im_pln_prod
org07.im_pln_prod
IM_PLN_PROD_ex.sql
truncate_src_table:src07.im_pln_prod
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'IM_PLN_PROD',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ VERSION_KEY,PLN_YM,IM_PLN_PROD_CD,INST_CD,PLN_QTY,PLN_AMT,CREATE_ID,CREATE_DTT,L
version_key,pln_ym,im_pln_prod_cd,inst_cd,pln_qty,pln_amt,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.inst_im_pln
org07.inst_im_pln
INST_IM_PLN_ex.sql
truncate_src_table:src07.inst_im_pln
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'INST_IM_PLN',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ VERSION_KEY,VERSION,EFF_FLG,CREATE_ID,CREATE_DTT,LAST_UPD_ID,LAST_UPD_DTT,CUSTOM
version_key,version,eff_flg,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mr_yr_pln_inst_ky
org07.mr_yr_pln_inst_ky
MR_YR_PLN_INST_KY_ex.sql
truncate_src_table:src07.mr_yr_pln_inst_ky
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MR_YR_PLN_INST_KY',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ PROD_GRP_CD,PROD_GRP_NM_KJ,PROD_GRP_NM_KJ_S,PROD_GRP_NM_EN,PROD_GRP_NM_EN_S,PROD
prod_grp_cd,prod_grp_nm_kj,prod_grp_nm_kj_s,prod_grp_nm_en,prod_grp_nm_en_s,prod_grp_nm_kn,prod_grp_nm_kn_s,prod_assn_team_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_assn_prod_grp
org07.mst_assn_prod_grp
MST_ASSN_PROD_GRP_ex.sql
truncate_src_table:src07.mst_assn_prod_grp
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_ASSN_PROD_GRP',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ DRUG_RLT_PROD_GRP_CD,DRUG_RLT_PROD_GRP_NM_KJ,DRUG_RLT_PROD_GRP_NM_KJ_S,DRUG_RLT_
drug_rlt_prod_grp_cd,drug_rlt_prod_grp_nm_kj,drug_rlt_prod_grp_nm_kj_s,drug_rlt_prod_grp_nm_en,drug_rlt_prod_grp_nm_en_s,drug_rlt_prod_grp_nm_kn,drug_rlt_prod_grp_nm_kn_s,drug_rlt_prod_grp_nm_bi,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_atc_pharm_prod_grp
org07.mst_atc_pharm_prod_grp
MST_ATC_PHARM_PROD_GRP_ex.sql
truncate_src_table:src07.mst_atc_pharm_prod_grp
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_ATC_PHARM_PROD_GRP',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ CORP_CD,CORP_NM_KJ,CORP_NM_KJ_S,CORP_NM_KN,CORP_NM_KN_S,CORP_NM_EN,CORP_NM_EN_S,
corp_cd,corp_nm_kj,corp_nm_kj_s,corp_nm_kn,corp_nm_kn_s,corp_nm_en,corp_nm_en_s,corp_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,corp_grp_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_company
org07.mst_company
MST_COMPANY_ex.sql
truncate_src_table:src07.mst_company
1

View File

@ -9,7 +9,7 @@ CORP_GRP_CD,CORP_GRP_NM_KJ,CORP_GRP_NM_KJ_S,CORP_GRP_NM_KN,CORP_GRP_NM_KN_S,CORP
corp_grp_cd,corp_grp_nm_kj,corp_grp_nm_kj_s,corp_grp_nm_kn,corp_grp_nm_kn_s,corp_grp_nm_en,corp_grp_nm_en_s,corp_grp_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_company_grp
org07.mst_company_grp
MST_COMPANY_GRP_ex.sql
truncate_src_table:src07.mst_company_grp
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_COMPANY_GRP',
'data_import',
'done',
NULL,
NULL
);

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_COMPANY',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ DEAL_DIV_CD,DEAL_DIV_NM,DEAL_DIV_BI,SORT_CD,EFF_START_YM,EFF_END_YM,CREATE_ID,CR
deal_div_cd,deal_div_nm,deal_div_bi,sort_cd,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_deal_div
org07.mst_deal_div
MST_DEAL_DIV_ex.sql
truncate_src_table:src07.mst_deal_div
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_DEAL_DIV',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ CATEGORY_CD,CATEGORY_NM,GENERAL_CD,GENERAL_NM,SORT_CD,REMARK,EFF_START_YM,EFF_EN
category_cd,category_nm,general_cd,general_nm,sort_cd,remark,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_general
org07.mst_general
MST_GENERAL_ex.sql
truncate_src_table:src07.mst_general
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_GENERAL',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ INST_CD,PROD_ASSN_TEAM_CD,EMP_TYPE_CD,EMP_CD,SORT_CD,EFF_START_YM,EFF_END_YM,CRE
inst_cd,prod_assn_team_cd,emp_type_cd,emp_cd,sort_cd,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_inst_assn
org07.mst_inst_assn
MST_INST_ASSN_ex.sql
truncate_src_table:src07.mst_inst_assn
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_INST_ASSN',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ INST_CD,REC_CD,INST_NM_KJ,INST_NM_KJ_S,INST_NM_KN,INST_NM_KN_S,INST_NM_EN,INST_N
inst_cd,rec_cd,inst_nm_kj,inst_nm_kj_s,inst_nm_kn,inst_nm_kn_s,inst_nm_en,inst_nm_en_s,jis_pref_cd,jis_city_cd,postal_no,pref_nm_kj,pref_nm_kn,pref_nm_en,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,international_tel_no,international_fax_no,bed_qty,par_inst_cd,inst_type_cd,inst_type_nm_kj,inst_div_cd,inst_mjr_div_nm_kj,inst_div_nm_kj,del_reason_cd,del_reason_nm_kj,mngmnt_bdy_mjr_cd,mngmnt_bdy_mjr_nm_kj,mngmnt_bdy_mnr_cd,mngmnt_bdy_mnr_nm_kj,closed_flg,closed_ym,open_expected_flg,open_expected_ym,re_screening_cd,re_screening_nm_kj,rep_nm_kn,rep_nm_kj,sort_cd,data_src_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,auto_flg,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_inst_merck
org07.mst_inst_merck
MST_INST_MERCK_ex.sql
truncate_src_table:src07.mst_inst_merck
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_INST_MERCK',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ JIS_PREF_CD,JIS_CITY_CD,JIS_CITY_NM_KJ,JIS_CITY_NM_KN,JIS_CITY_NM_EN,JIS_CITY_NM
jis_pref_cd,jis_city_cd,jis_city_nm_kj,jis_city_nm_kn,jis_city_nm_en,jis_city_nm_bi,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_jis_city
org07.mst_jis_city
MST_JIS_CITY_ex.sql
truncate_src_table:src07.mst_jis_city
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_JIS_CITY',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ JIS_PREF_CD,JIS_PREF_NM_KJ,JIS_PREF_NM_KN,JIS_PREF_NM_EN,JIS_PREF_NM_BI,EFF_STAR
jis_pref_cd,jis_pref_nm_kj,jis_pref_nm_kn,jis_pref_nm_en,jis_pref_nm_bi,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_jis_pref
org07.mst_jis_pref
MST_JIS_PREF_ex.sql
truncate_src_table:src07.mst_jis_pref
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_JIS_PREF',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ PROD_PKG_CD,PROD_PKG_NM_KJ,PROD_PKG_NM_KJ_S,PROD_PKG_NM_EN,PROD_PKG_NM_EN_S,PROD
prod_pkg_cd,prod_pkg_nm_kj,prod_pkg_nm_kj_s,prod_pkg_nm_en,prod_pkg_nm_en_s,prod_pkg_nm_kn,prod_pkg_nm_kn_s,prod_pkg_nm_bi,prod_assn_team_cd,univ_product_cd,jan_cd,im_pln_prod_cd,drug_rlt_prod_grp_cd,standard,drug_shape,biomaterial_flg,cnvs_rt,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_prod_pkg
org07.mst_prod_pkg
MST_PROD_PKG_ex.sql
truncate_src_table:src07.mst_prod_pkg
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_PROD_PKG',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WS_SALES_PLACE_CD,WS_CD,WS_LVL4_CD,WS_SALES_PLACE_NM_KJ,WS_SALES_PLACE_NM_KJ_S,W
ws_sales_place_cd,ws_cd,ws_lvl4_cd,ws_sales_place_nm_kj,ws_sales_place_nm_kj_s,ws_sales_place_nm_kn,ws_sales_place_nm_kn_s,ws_sales_place_nm_en,ws_sales_place_nm_en_s,ws_sales_place_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_sales_place
org07.mst_sales_place
MST_SALES_PLACE_ex.sql
truncate_src_table:src07.mst_sales_place
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_SALES_PLACE',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WAREHOUSE_CD,CORP_CD,WAREHOUSE_NM_KJ,WAREHOUSE_NM_KJ_S,WAREHOUSE_NM_KN,WAREHOUSE
warehouse_cd,corp_cd,warehouse_nm_kj,warehouse_nm_kj_s,warehouse_nm_kn,warehouse_nm_kn_s,warehouse_nm_en,warehouse_nm_en_s,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_warehouse
org07.mst_warehouse
MST_WAREHOUSE_ex.sql
truncate_src_table:src07.mst_warehouse
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_WAREHOUSE',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WS_CD,WS_NM_KJ,WS_NM_KJ_S,WS_NM_KN,WS_NM_KN_S,WS_NM_EN,WS_NM_EN_S,WS_NM_BI,JIS_P
ws_cd,ws_nm_kj,ws_nm_kj_s,ws_nm_kn,ws_nm_kn_s,ws_nm_en,ws_nm_en_s,ws_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.mst_whlslr
org07.mst_whlslr
MST_WHLSLR_ex.sql
truncate_src_table:src07.mst_whlslr
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'MST_WHLSLR',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ ORG_LVL1_CD,ORG_LVL1_NM_KJ,ORG_LVL1_NM_KJ_S,ORG_LVL1_NM_EN,ORG_LVL1_NM_EN_S,ORG_
org_lvl1_cd,org_lvl1_nm_kj,org_lvl1_nm_kj_s,org_lvl1_nm_en,org_lvl1_nm_en_s,org_lvl1_nm_kn,org_lvl1_nm_kn_s,org_lvl1_nm_bi,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.org_lvl1
org07.org_lvl1
ORG_LVL1_ex.sql
truncate_src_table:src07.org_lvl1
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'ORG_LVL1',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ ORG_LVL2_CD,ORG_LVL2_NM_KJ,ORG_LVL2_NM_KJ_S,ORG_LVL2_NM_EN,ORG_LVL2_NM_EN_S,ORG_
org_lvl2_cd,org_lvl2_nm_kj,org_lvl2_nm_kj_s,org_lvl2_nm_en,org_lvl2_nm_en_s,org_lvl2_nm_kn,org_lvl2_nm_kn_s,org_lvl2_nm_bi,org_lvl1_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.org_lvl2
org07.org_lvl2
ORG_LVL2_ex.sql
truncate_src_table:src07.org_lvl2
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'ORG_LVL2',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ ORG_LVL3_CD,ORG_LVL3_NM_KJ,ORG_LVL3_NM_KJ_S,ORG_LVL3_NM_EN,ORG_LVL3_NM_EN_S,ORG_
org_lvl3_cd,org_lvl3_nm_kj,org_lvl3_nm_kj_s,org_lvl3_nm_en,org_lvl3_nm_en_s,org_lvl3_nm_kn,org_lvl3_nm_kn_s,org_lvl3_nm_bi,org_lvl2_cd,sort_cd,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.org_lvl3
org07.org_lvl3
ORG_LVL3_ex.sql
truncate_src_table:src07.org_lvl3
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'ORG_LVL3',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ ORG_LVL4_CD,ORG_LVL4_NM_KJ,ORG_LVL4_NM_KJ_S,ORG_LVL4_NM_EN,ORG_LVL4_NM_EN_S,ORG_
org_lvl4_cd,org_lvl4_nm_kj,org_lvl4_nm_kj_s,org_lvl4_nm_en,org_lvl4_nm_en_s,org_lvl4_nm_kn,org_lvl4_nm_kn_s,org_lvl4_nm_bi,org_lvl3_cd,sort_cd,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.org_lvl4
org07.org_lvl4
ORG_LVL4_ex.sql
truncate_src_table:src07.org_lvl4
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'ORG_LVL4',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ PROD_ASSN_TEAM_CD,PROD_ASSN_TEAM_NM_KJ,PROD_ASSN_TEAM_NM_KJ_S,PROD_ASSN_TEAM_NM_
prod_assn_team_cd,prod_assn_team_nm_kj,prod_assn_team_nm_kj_s,prod_assn_team_nm_en,prod_assn_team_nm_en_s,prod_assn_team_nm_kn,prod_assn_team_nm_kn_s,prod_assn_team_nm_bi,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.prod_assn_team
org07.prod_assn_team
PROD_ASSN_TEAM_ex.sql
truncate_src_table:src07.prod_assn_team
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'PROD_ASSN_TEAM',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ PROD_PKG_CD,EFF_START_DT,CHRG_FR_MDCN,INVC_PRCNG,PRICE_1,PRICE_2,PRICE_3,EFF_END
prod_pkg_cd,eff_start_dt,chrg_fr_mdcn,invc_prcng,price_1,price_2,price_3,eff_end_dt,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.prod_price
org07.prod_price
PROD_PRICE_ex.sql
truncate_src_table:src07.prod_price
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'PROD_PRICE',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ ACCOUNT_YM,SEQ_NO,SRC_CD,RESULT_CD,JD_NHI_CD,DATA_DIV_CD,LOAD_DT,REC_SEQ_NO,LOAD
account_ym,seq_no,src_cd,result_cd,jd_nhi_cd,data_div_cd,load_dt,rec_seq_no,load_seq_no,orig_whlslr_cd,orig_whlslr_sub_cd,orig_whlslr_org_cd,orig_endusr_cd,orig_sales_dt,orig_slip_no,orig_deal_div_cd,orig_bill_ym,orig_univ_product_cd,orig_sales_qty,orig_sales_untprc,orig_sales_amt,orig_pre_disc_untprc,orig_after_disc_untprc,orig_pre_sales_dt,orig_pre_slip_no,orig_prod_nm,orig_endusr_nm,orig_jan_cd,orig_arb_item,orig_lot_no_1,orig_lot_sales_qty_1,orig_lot_no_2,orig_lot_sales_qty_2,orig_lot_no_3,orig_lot_sales_qty_3,orig_jd44_filler1,orig_jd44_filler2,orig_lot_no,orig_expiration_date,orig_jan_gtin13,orig_gtin14,orig_sales_dt8,orig_pre_sales_dt8,orig_actual_endusr_cd,orig_endusr_common_cd,orig_actual_endusr_common_cd,orig_actual_endusr_nm,orig_actual_endusr_addr_nm,orig_actual_endusr_tel,orig_detail_seq_no,orig_nodelivered_kbn,orig_bio_org_kbn,orig_depo_cd,orig_depo_nm,orig_whlslr_org_nm,orig_biko,orig_ms_cd,orig_filler3,orig_jd45_addr_txt,orig_jd45_filler1,orig_jd45_filler2,orig_jd45_filler3,orig_nhi_filler1,orig_nhi_proc_dt,orig_nhi_addr1_txt,orig_nhi_addr2_txt,orig_nhi_tally_div_cd,orig_nhi_tally_qty,orig_nhi_par_univ_product_cd,orig_man_remark1,orig_man_remark2,orig_man_remark3,orig_man_remark4,orig_man_remark5,orig_man_remark6,orig_man_remark7,orig_man_remark8,orig_man_remark9,orig_man_remark10,edit_whlslr_org_cd,edit_deal_div_cd,edit_endusr_cd,cnvs_whlslr_cd,cls_whlslr_nm,cnvs_depo_cd,cls_depo_nm,cnvs_slip_no,cnvs_sales_dt,cnvs_deal_div_cd,cls_deal_div_nm,cnvs_bill_ym,cnvs_inst_cd,cls_inst_nm,cnvs_prod_cd,cls_prod_nm,cnvs_sales_untprc,cnvs_sales_amt,cnvs_sales_qty,cls_chrg_fr_mdcn,cls_invc_prcng,cnvs_sign_cnv_sales_qty,cnvs_sign_cnv_sales_untprc,cnvs_sign_cnv_sales_amt,cnvs_lot_no_1,cnvs_lot_sales_qty_1,cnvs_lot_no_2,cnvs_lot_sales_qty_2,cnvs_lot_no_3,cnvs_lot_sales_qty_3,last_cnvs_dt,cnvs_cd,account_dt,customer_id,last_upd_dtt
src07.trn_result_data
org07.trn_result_data
TRN_RESULT_DATA_ex.sqlTRN_RESULT_DATA_ex.sql
truncate_src_table:src07.trn_result_data
1

View File

@ -9,7 +9,7 @@ ACCOUNT_YM,SEQ_NO,SRC_CD,RESULT_CD,JD_NHI_CD,DATA_DIV_CD,LOAD_DT,REC_SEQ_NO,LOAD
account_ym,seq_no,src_cd,result_cd,jd_nhi_cd,data_div_cd,load_dt,rec_seq_no,load_seq_no,orig_whlslr_cd,orig_whlslr_sub_cd,orig_whlslr_org_cd,orig_endusr_cd,orig_sales_dt,orig_slip_no,orig_deal_div_cd,orig_bill_ym,orig_univ_product_cd,orig_sales_qty,orig_sales_untprc,orig_sales_amt,orig_pre_disc_untprc,orig_after_disc_untprc,orig_pre_sales_dt,orig_pre_slip_no,orig_prod_nm,orig_endusr_nm,orig_jan_cd,orig_arb_item,orig_lot_no_1,orig_lot_sales_qty_1,orig_lot_no_2,orig_lot_sales_qty_2,orig_lot_no_3,orig_lot_sales_qty_3,orig_jd44_filler1,orig_jd44_filler2,orig_jd45_addr_txt,orig_jd45_filler1,orig_jd45_filler2,orig_jd45_filler3,orig_nhi_filler1,orig_nhi_proc_dt,orig_nhi_addr1_txt,orig_nhi_addr2_txt,orig_nhi_tally_div_cd,orig_nhi_tally_qty,orig_nhi_par_univ_product_cd,orig_man_remark1,orig_man_remark2,orig_man_remark3,orig_man_remark4,orig_man_remark5,orig_man_remark6,orig_man_remark7,orig_man_remark8,orig_man_remark9,orig_man_remark10,edit_whlslr_org_cd,edit_deal_div_cd,edit_endusr_cd,cnvs_whlslr_cd,cls_whlslr_nm,cnvs_depo_cd,cls_depo_nm,cnvs_slip_no,cnvs_sales_dt,cnvs_deal_div_cd,cls_deal_div_nm,cnvs_bill_ym,cnvs_inst_cd,cls_inst_nm,cnvs_prod_cd,cls_prod_nm,cnvs_sales_untprc,cnvs_sales_amt,cnvs_sales_qty,cls_chrg_fr_mdcn,cls_invc_prcng,cnvs_sign_cnv_sales_qty,cnvs_sign_cnv_sales_untprc,cnvs_sign_cnv_sales_amt,cnvs_lot_no_1,cnvs_lot_sales_qty_1,cnvs_lot_no_2,cnvs_lot_sales_qty_2,cnvs_lot_no_3,cnvs_lot_sales_qty_3,last_cnvs_dt,cnvs_cd,account_dt,customer_id,last_upd_dtt
src07.trn_result_data_bio
org07.trn_result_data_bio
TRN_RESULT_DATA_BIO_ex.sql
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'TRN_RESULT_DATA_BIO',
'data_import',
'done',
NULL,
NULL
);

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'TRN_RESULT_DATA',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ INVENTORY_YM,CURRENT_MONTH_SEQ,RECIVE_YMD,CURRENT_DAY_SEQ,RECIVE_DATA_COLUMN,PRO
inventory_ym,current_month_seq,recive_ymd,current_day_seq,recive_data_column,process_status,process_status2,result_cd,orig_whlslr_cd,orig_whlslr_sub_cd,orig_warehouse_cd,orig_inv_dt,orig_univ_product_cd,orig_inv_qty,orig_inv_qty_sign,orig_jan_cd,orig_arb_item,orig_jan_gtin13,orig_gtin14,orig_warehouse_nm,orig_inv_dt8,orig_filler,edt_warehouse_cd,edt_inv_dt,data_div_cd,ws_latest_flag,ws_cd,ws_nm_kj_s_trx,ws_warehouse_cd,ws_warehouse_nm_kj_s_trx,prod_cd,prod_nm_en_s_trx,inv_dt,inv_qty,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.trn_recive_inventry
org07.trn_recive_inventry
TRN_Recive_Inventry_ex.sql
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'TRN_Recive_Inventry',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WS_LVL1_CD,WS_LVL1_NM_KJ,WS_LVL1_NM_KJ_S,WS_LVL1_NM_KN,WS_LVL1_NM_KN_S,WS_LVL1_N
ws_lvl1_cd,ws_lvl1_nm_kj,ws_lvl1_nm_kj_s,ws_lvl1_nm_kn,ws_lvl1_nm_kn_s,ws_lvl1_nm_en,ws_lvl1_nm_en_s,ws_lvl1_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,corp_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.whlslr_lvl1
org07.whlslr_lvl1
WHLSLR_LVL1_ex.sql
truncate_src_table:src07.whlslr_lvl1
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'WHLSLR_LVL1',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WS_LVL2_CD,WS_LVL2_NM_KJ,WS_LVL2_NM_KJ_S,WS_LVL2_NM_KN,WS_LVL2_NM_KN_S,WS_LVL2_N
ws_lvl2_cd,ws_lvl2_nm_kj,ws_lvl2_nm_kj_s,ws_lvl2_nm_kn,ws_lvl2_nm_kn_s,ws_lvl2_nm_en,ws_lvl2_nm_en_s,ws_lvl2_nm_bi,ws_lvl1_cd,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.whlslr_lvl2
org07.whlslr_lvl2
WHLSLR_LVL2_ex.sql
truncate_src_table:src07.whlslr_lvl2
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'WHLSLR_LVL2',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WS_LVL3_CD,WS_LVL3_NM_KJ,WS_LVL3_NM_KJ_S,WS_LVL3_NM_KN,WS_LVL3_NM_KN_S,WS_LVL3_N
ws_lvl3_cd,ws_lvl3_nm_kj,ws_lvl3_nm_kj_s,ws_lvl3_nm_kn,ws_lvl3_nm_kn_s,ws_lvl3_nm_en,ws_lvl3_nm_en_s,ws_lvl3_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,ws_lvl2_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.whlslr_lvl3
org07.whlslr_lvl3
WHLSLR_LVL3_ex.sql
truncate_src_table:src07.whlslr_lvl3
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'WHLSLR_LVL3',
'data_import',
'done',
NULL,
NULL
);

View File

@ -9,7 +9,7 @@ WS_LVL4_CD,WS_LVL4_NM_KJ,WS_LVL4_NM_KJ_S,WS_LVL4_NM_KN,WS_LVL4_NM_KN_S,WS_LVL4_N
ws_lvl4_cd,ws_lvl4_nm_kj,ws_lvl4_nm_kj_s,ws_lvl4_nm_kn,ws_lvl4_nm_kn_s,ws_lvl4_nm_en,ws_lvl4_nm_en_s,ws_lvl4_nm_bi,jis_pref_cd,jis_city_cd,postal_no,addr1_nm_kj,addr1_nm_kn,addr1_nm_en,addr2_nm_kj,addr2_nm_kn,addr2_nm_en,tel_no,fax_no,ws_lvl3_cd,sort_cd,arb_item_01,arb_item_02,arb_item_03,arb_item_04,arb_item_05,arb_item_06,arb_item_07,arb_item_08,arb_item_09,arb_item_10,arb_item_11,arb_item_12,arb_item_13,arb_item_14,arb_item_15,arb_item_16,arb_item_17,arb_item_18,arb_item_19,arb_item_20,eff_start_ym,eff_end_ym,create_id,create_dtt,last_upd_id,last_upd_dtt,customer_id
src07.whlslr_lvl4
org07.whlslr_lvl4
WHLSLR_LVL4_ex.sql
truncate_src_table:src07.whlslr_lvl4
1

View File

@ -0,0 +1,7 @@
CALL internal07.upsert_jskult_batch_status_manage(
'WHLSLR_LVL4',
'data_import',
'done',
NULL,
NULL
);