Merge branch 'develop' into feature-NEWDWH2021-1005_jskimport_temp

# Conflicts:
#	ecs/jskult-batch-daily/.env.example
This commit is contained in:
x.azuma.m@nds-tyo.co.jp 2023-07-05 16:40:09 +09:00
commit 7c9490c142
139 changed files with 8621 additions and 1352 deletions

File diff suppressed because it is too large Load Diff

View File

@ -15,3 +15,9 @@ JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt
JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt
JSKULT_DATA_BUCKET=**********************
JSKULT_DATA_FOLDER_RECV=**********************
# 連携データ抽出期間
SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0
# 洗替対象テーブル名
SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau
# 卸実績洗替で作成するデータの期間(年単位)
SALES_LAUNDERING_TARGET_YEAR_OFFSET=5

View File

@ -1,5 +1,9 @@
from src.batch.batch_functions import logging_sql
from src.batch.common.batch_context import BatchContext
from src.db.database import Database
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.time.elapsed_time import ElapsedTime
batch_context = BatchContext.get_instance()
logger = get_logger('生物由来卸販売ロット分解')
@ -7,19 +11,388 @@ logger = get_logger('生物由来卸販売ロット分解')
def exec():
"""生物由来卸販売ロット分解"""
logger.debug('生物由来卸販売ロット分解:起動')
logger.debug('生物由来卸販売ロット分解処理開始')
# 営業日ではない場合、処理をスキップする
if batch_context.is_not_business_day:
logger.info('営業日ではないため、生物由来卸販売ロット分解処理をスキップします。')
return
db = Database.get_instance()
try:
db.connect()
db.begin()
# 生物由来ロット分解データの未確定データを削除する
_delete_not_confirm_data_in_bio_sales_lot(db)
# 生物由来ロット分解データを作成する
_insert_bio_sales_lot(db)
# 生物由来ロット分解データの不要レコードを削除する
_delete_empty_lot_record(db)
# MDB変換マスタビュー生物由来ロット分解処理用、メルク施設マスタから施設情報を生物由来ロット分解データにセットする
_set_inst_info_from_mdb_or_mst_inst(db)
# V製品マスタから製品情報を生物由来ロット分解データにセットする
_set_prd_info_from_v_prd_mst(db)
# 製造ロット管理番号マスタから有効期限を生物由来ロット分解データにセットする
_set_expr_dt_from_lot_num_mst(db)
db.commit()
logger.debug('生物由来卸販売ロット分解処理終了')
return
except Exception as e:
db.rollback()
raise BatchOperationException(e)
finally:
db.disconnect()
# # 非同期処理のサンプル
# import time
# for _ in range(50):
# logger.info('処理中')
# time.sleep(0.5)
# TODO: ここに処理を追記していく
def _delete_not_confirm_data_in_bio_sales_lot(db: Database):
logger.debug('生物由来ロット分解データの未確定データ削除開始')
try:
elapsed_time = ElapsedTime()
sql = """
DELETE lot FROM src05.bio_sales_lot AS lot
INNER JOIN src05.bio_sales AS bio
ON bio.slip_mgt_num = lot.slip_mgt_num
AND DATE(bio.dwh_upd_dt) = src05.get_syor_date()
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.debug('生物由来ロット分解データの未確定データ削除に失敗')
raise e
logger.debug('生物由来ロット分解データの未確定データ削除に成功')
logger.debug('生物由来卸販売ロット分解:終了')
return
def _insert_bio_sales_lot(db: Database):
logger.debug('生物由来ロット分解データの作成開始')
try:
elapsed_time = ElapsedTime()
sql = """
INSERT INTO src05.bio_sales_lot
SELECT
bio.slip_mgt_num AS slip_mgt_num,
conv.conv_cd AS conv_cd,
bio.rec_whs_cd AS rec_whs_cd,
bio.rec_whs_sub_cd AS rec_whs_sub_cd,
bio.rec_whs_org_cd AS rec_whs_org_cd,
bio.rec_comm_cd AS rec_comm_cd,
bio.rec_tran_kbn AS rec_tran_kbn,
bio.rev_hsdnymd_srk AS rev_hsdnymd_srk,
bio.rec_urag_num AS rec_urag_num,
bio.rec_comm_name AS rec_comm_name,
bio.rec_nonyu_fcl_name AS rec_nonyu_fcl_name,
bio.rec_nonyu_fcl_addr AS rec_nonyu_fcl_addr,
-- 3レコードに分解する
CASE conv.conv_cd
WHEN 1 THEN bio.rec_lot_num1
WHEN 2 THEN bio.rec_lot_num2
WHEN 3 THEN bio.rec_lot_num3
END AS rec_lot_num,
bio.rec_ymd AS rec_ymd,
bio.v_tran_cd AS v_tran_cd,
bio.tran_kbn_name AS tran_kbn_name,
bio.whs_org_cd AS whs_org_cd,
bio.v_whsorg_cd AS v_whsorg_cd,
bio.whs_org_name AS whs_org_name,
bio.v_whs_cd AS v_whs_cd,
bio.whs_name AS whs_name,
bio.nonyu_fcl_cd AS nonyu_fcl_cd,
bio.v_inst_cd AS v_inst_cd,
bio.v_inst_kn AS v_inst_kn,
bio.v_inst_name AS v_inst_name,
bio.v_inst_addr AS v_inst_addr,
bio.comm_cd AS comm_cd,
bio.product_name AS product_name,
bio.whs_rep_comm_name AS whs_rep_comm_name,
bio.whs_rep_nonyu_fcl_name AS whs_rep_nonyu_fcl_name,
bio.whs_rep_nonyu_fcl_addr AS whs_rep_nonyu_fcl_addr,
/* 製品名と製品コードは後ほどV製品マスタからセットする */
-- 製品名
NULL AS mkr_inf_1,
-- 製品コード
NULL AS mkr_cd,
-- 数量
-- Veeva取引区分の先頭が2の場合マイナス表示にする
CASE conv.conv_cd
WHEN 1 THEN
CASE
WHEN (LEFT(bio.v_tran_cd, 1) = 2 AND bio.qty1 >= 1) THEN -bio.qty1
ELSE bio.qty1
END
WHEN 2 THEN
CASE
WHEN (LEFT(bio.v_tran_cd, 1) = 2 AND bio.qty2 >= 1) THEN -bio.qty2
ELSE bio.qty2
END
WHEN 3 THEN
CASE
WHEN (LEFT(bio.v_tran_cd, 1) = 2 AND bio.qty3 >= 1) THEN -bio.qty3
ELSE bio.qty3
END
END AS qty,
bio.slip_org_kbn AS slip_org_kbn,
bio.bef_slip_mgt_num AS bef_slip_mgt_num,
CASE conv.conv_cd
WHEN 1 THEN bio.err_flg11
WHEN 2 THEN bio.err_flg12
WHEN 3 THEN bio.err_flg13
END AS lot_no_err_flg,
CASE bio.err_flg20
WHEN 'M' THEN '*'
ELSE NULL
END AS iko_flg,
CASE bio.rec_sts_kbn
WHEN '0' THEN bio.rec_sts_kbn
WHEN '1' THEN
CASE conv.conv_cd
WHEN 1 THEN bio.err_flg11
WHEN 2 THEN bio.err_flg12
WHEN 3 THEN bio.err_flg13
END
END AS rec_sts_kbn,
CASE
WHEN bio.bef_slip_mgt_num IS NOT NULL THEN bio.ins_dt
ELSE NULL
END AS ins_dt,
CASE
WHEN bio.bef_slip_mgt_num IS NOT NULL THEN bio.ins_usr
ELSE NULL
END AS ins_usr,
bio.dwh_upd_dt AS dwh_upd_dt,
/* 施設情報は後ほどセットする */
-- 施設コード
NULL AS inst_cd,
-- 正式施設名漢字
NULL AS inst_name_form,
-- 施設住所
NULL AS address,
-- 施設電話番号
NULL AS tel_num,
CASE conv.conv_cd
WHEN 1 THEN
CASE bio.err_flg11
WHEN '0' THEN '正常'
WHEN '1' THEN 'ロットエラー'
WHEN '2' THEN 'ロットエラー'
WHEN '3' THEN 'エラー(解消済)'
WHEN '4' THEN 'ロット不明'
WHEN '5' THEN 'エラー(解消済)'
WHEN '6' THEN 'ロット不明'
WHEN '7' THEN '除外'
WHEN '8' THEN '除外'
WHEN 'Z' THEN '除外'
END
WHEN 2 THEN
CASE bio.err_flg12
WHEN '0' THEN '正常'
WHEN '1' THEN 'ロットエラー'
WHEN '2' THEN 'ロットエラー'
WHEN '3' THEN 'エラー(解消済)'
WHEN '4' THEN 'ロット不明'
WHEN '5' THEN 'エラー(解消済)'
WHEN '6' THEN 'ロット不明'
WHEN '7' THEN '除外'
WHEN '8' THEN '除外'
WHEN 'Z' THEN '除外'
END
WHEN 3 THEN
CASE bio.err_flg13
WHEN '0' THEN '正常'
WHEN '1' THEN 'ロットエラー'
WHEN '2' THEN 'ロットエラー'
WHEN '3' THEN 'エラー(解消済)'
WHEN '4' THEN 'ロット不明'
WHEN '5' THEN 'エラー(解消済)'
WHEN '6' THEN 'ロット不明'
WHEN '7' THEN '除外'
WHEN '8' THEN '除外'
WHEN 'Z' THEN '除外'
END
END AS data_kbn,
CASE bio.slip_org_kbn
WHEN 'J' THEN 'JD-NET'
WHEN 'N' THEN 'NHI'
WHEN 'H' THEN '手入力'
END AS data_kind,
CASE conv.conv_cd
WHEN 1 THEN
CASE bio.err_flg11
WHEN '0' THEN '正常'
WHEN '1' THEN 'ロットエラー'
WHEN '2' THEN '日付エラー'
WHEN '3' THEN 'ロットエラー(解消済)'
WHEN '4' THEN 'ロットエラー(調査不能)'
WHEN '5' THEN '日付エラー(解消済)'
WHEN '6' THEN '日付エラー(調査不能)'
WHEN '7' THEN '除外(卸都合)'
WHEN '8' THEN '除外(再送信)'
WHEN 'Z' THEN '過去データ'
END
WHEN 2 THEN
CASE bio.err_flg12
WHEN '0' THEN '正常'
WHEN '1' THEN 'ロットエラー'
WHEN '2' THEN '日付エラー'
WHEN '3' THEN 'ロットエラー(解消済)'
WHEN '4' THEN 'ロットエラー(調査不能)'
WHEN '5' THEN '日付エラー(解消済)'
WHEN '6' THEN '日付エラー(調査不能)'
WHEN '7' THEN '除外(卸都合)'
WHEN '8' THEN '除外(再送信)'
WHEN 'Z' THEN '過去データ'
END
WHEN 3 THEN
CASE bio.err_flg13
WHEN '0' THEN '正常'
WHEN '1' THEN 'ロットエラー'
WHEN '2' THEN '日付エラー'
WHEN '3' THEN 'ロットエラー(解消済)'
WHEN '4' THEN 'ロットエラー(調査不能)'
WHEN '5' THEN '日付エラー(解消済)'
WHEN '6' THEN '日付エラー(調査不能)'
WHEN '7' THEN '除外(卸都合)'
WHEN '8' THEN '除外(再送信)'
WHEN 'Z' THEN '過去データ'
END
END AS err_dtl_kind,
NULL AS expr_dt
FROM
src05.bio_sales bio
-- 生物由来変換マスタ
CROSS JOIN src05.bio_conv conv
WHERE
bio.err_flg1 = '0'
AND bio.err_flg2 = '0'
AND bio.err_flg3 = '0'
AND bio.err_flg4 = '0'
AND bio.err_flg5 = '0'
AND bio.err_flg6 = '0'
AND bio.err_flg7 = '0'
AND bio.err_flg8 = '0'
AND bio.err_flg9 = '0'
AND bio.err_flg10 = '0'
AND bio.rec_sts_kbn <> '99'
AND DATE(bio.dwh_upd_dt) = src05.get_syor_date()
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.debug('生物由来ロット分解データの作成に失敗')
raise e
logger.debug('生物由来ロット分解データの作成に成功')
def _delete_empty_lot_record(db: Database):
logger.debug('生物由来ロット分解データの製造番号が空のレコードを削除開始')
try:
elapsed_time = ElapsedTime()
sql = """
DELETE FROM src05.bio_sales_lot lot
WHERE
-- 空白15桁のデータはロット情報が空とみなして削除する
lot.rec_lot_num = REPEAT(' ', 15) OR lot.rec_lot_num IS NULL
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.debug('生物由来ロット分解データの製造番号が空のレコードを削除に失敗')
raise e
logger.debug('生物由来ロット分解データの製造番号が空のレコードを削除に成功')
def _set_inst_info_from_mdb_or_mst_inst(db: Database):
logger.debug('MDB変換マスタビュー生物由来ロット分解処理用、メルク施設マスタから施設情報を生物由来ロット分解データにセット開始')
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.bio_sales_lot bio
LEFT OUTER JOIN internal05.view_mdb_cnv_mst mdb
ON bio.v_inst_cd = mdb.hco_vid_v
LEFT OUTER JOIN src05.mst_inst inst
ON bio.v_inst_cd = inst.inst_cd
SET
-- 施設コード
bio.inst_cd = (
CASE
WHEN mdb.mdb_cd IS NOT NULL THEN mdb.mdb_cd
ELSE bio.v_inst_cd
END
),
-- 正式施設名漢字
bio.inst_name_form = (
CASE
WHEN mdb.mdb_cd IS NOT NULL THEN mdb.inst_name_form
ELSE inst.inst_name_form
END
),
-- 施設住所
bio.address = (
CASE
WHEN mdb.mdb_cd IS NOT NULL THEN mdb.address
ELSE inst.address
END
),
-- 施設電話番号
bio.tel_num = (
CASE
WHEN mdb.mdb_cd IS NOT NULL THEN mdb.tel_num
ELSE inst.tel_num
END
)
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.debug('MDB変換マスタビュー生物由来ロット分解処理用、メルク施設マスタから施設情報を生物由来ロット分解データにセットに失敗')
raise e
logger.debug('MDB変換マスタビュー生物由来ロット分解処理用、メルク施設マスタから施設情報を生物由来ロット分解データにセットに成功')
def _set_prd_info_from_v_prd_mst(db: Database):
logger.debug('V製品マスタから製品情報を生物由来ロット分解データにセット開始')
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.bio_sales_lot bio
LEFT OUTER JOIN src05.phm_prd_mst_v prd
ON bio.comm_cd = prd.prd_cd
AND STR_TO_DATE(bio.rev_hsdnymd_srk,'%Y%m%d') BETWEEN prd.start_date AND prd.end_date
AND prd.rec_sts_kbn <> '9'
SET
bio.mkr_inf_1 = prd.mkr_inf_1,
bio.mkr_cd = prd.mkr_cd
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.debug('V製品マスタから製品情報を生物由来ロット分解データにセットに失敗')
raise e
logger.debug('V製品マスタから製品情報を生物由来ロット分解データにセットに成功')
def _set_expr_dt_from_lot_num_mst(db: Database):
# 製造ロット管理番号マスタから有効期限をセット
logger.debug('製造ロット管理番号マスタから有効期限をセット開始')
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.bio_sales_lot bio
LEFT OUTER JOIN src05.lot_num_mst lot
ON bio.mkr_cd = lot.ser_num
AND bio.rec_lot_num = lot.lot_num
SET
bio.expr_dt = lot.expr_dt
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.debug('製造ロット管理番号マスタから有効期限をセットに失敗')
raise e
logger.debug('製造ロット管理番号マスタから有効期限をセットに成功')

View File

@ -24,6 +24,7 @@ def exec():
logger.debug('DCF施設統合マスタ作成処理開始')
# COM施設からDCF施設統合マスタに登録
(is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(db)
db.commit()
# DCF施設統合マスタ追加のログを出力する
if is_add_dcf_inst_merge:
logger.info('[NOTICE]DCF施設統合マスタが追加されました。')
@ -131,7 +132,6 @@ def _insert_dcf_inst_merge_from_com_inst(db: Database) -> tuple[bool, list[dict]
elapsed_time = ElapsedTime()
res = db.execute(insert_sql, params)
logging_sql(logger, insert_sql)
db.commit()
logger.info(f'COM施設からDCF施設統合マスタに登録成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug('COM施設からDCF施設統合マスタの登録に失敗')

View File

@ -0,0 +1,647 @@
from datetime import datetime, timedelta
from src.batch.batch_functions import logging_sql
from src.batch.common.batch_context import BatchContext
from src.db.database import Database
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.time.elapsed_time import ElapsedTime
batch_context = BatchContext.get_instance()
logger = get_logger('DCF施設統合マスタ日次更新')
def exec():
db = Database.get_instance()
try:
db.connect()
db.begin()
logger.debug('DCF施設統合マスタ日次更新処理開始')
# DCF施設統合マスタ移行先コードのセット(無効フラグが『0(有効)』)
enabled_dst_inst_merge_records = _set_enabled_dct_inst_merge(db)
# DCF施設統合マスタ移行先コードのセット(無効フラグが『1(無効)』)
_set_disabled_dct_inst_merge(db)
# DCF施設統合マスタに無効フラグが『0(有効)』データが存在する場合
if len(enabled_dst_inst_merge_records) > 0:
_add_emp_chg_inst(db, enabled_dst_inst_merge_records)
_add_ult_ident_presc(db, enabled_dst_inst_merge_records)
db.commit()
logger.debug('DCF施設統合マスタ日次更新処理終了')
except Exception as e:
db.rollback()
raise BatchOperationException(e)
finally:
db.disconnect()
def _set_enabled_dct_inst_merge(db: Database) -> list[dict]:
# データ取得無効フラグが『0(有効)』)
enabled_dst_inst_merge_records = _select_dct_inst_merge(db, 0)
# 移行先DCF施設コードの更新無効フラグが『0(有効)』)
if _update_dcf_inst_merge(db, 0) > 0:
# DCF施設統合マスタの過去分の洗い替え
for row in enabled_dst_inst_merge_records:
_update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], '')
return enabled_dst_inst_merge_records
def _set_disabled_dct_inst_merge(db: Database):
# データ取得無効フラグが『1(無効)』)
disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1)
# 移行先DCF施設コードの更新無効フラグが『1(無効)』)
if _update_dcf_inst_merge(db, 1) > 0:
# DCF施設統合マスタの過去分の洗い替え
for row in disabled_dst_inst_merge_records:
_update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し')
def _select_ult_ident_presc_dcf_inst_cd(db: Database, dcf_inst_cd: str) -> list[dict]:
# 納入先処方元マスタから、DCF施設コードに対応したレコードの取得
try:
sql = """
SELECT
ta_cd,
ult_ident_cd,
ratio
FROM
src05.ult_ident_presc
WHERE
presc_cd = :dcf_inst_cd
AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date
"""
params = {'dcf_inst_cd': dcf_inst_cd}
ult_ident_presc_ta_cd_records = db.execute_select(sql, params)
logging_sql(logger, sql)
logger.info('納入先処方元マスタからDCF施設コードに対応したレコードの取得に成功')
except Exception as e:
logger.debug('納入先処方元マスタからDCF施設コードに対応したレコードの取得に失敗')
raise e
return ult_ident_presc_ta_cd_records
def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]):
# 納入先処方元マスタの追加
logger.info('納入先処方元マスタの登録 開始')
for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1):
tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month'])
ult_ident_presc_source_records = _select_ult_ident_presc_dcf_inst_cd(db, enabled_merge_record['dcf_inst_cd'])
for ult_ident_presc_source_record in ult_ident_presc_source_records:
ult_ident_presc_records = _select_ult_ident_presc(db,
enabled_merge_record['dcf_inst_cd'],
enabled_merge_record['dup_opp_cd'],
ult_ident_presc_source_record)
for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1):
logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始')
# 処方元コード=重複時相手先コードが発生した場合
if ult_ident_presc_row['opp_count'] > 0:
continue
start_date = _str_to_date_time(ult_ident_presc_row['start_date'])
set_start_date = start_date \
if start_date > tekiyo_month_first_day else tekiyo_month_first_day
set_start_date = _date_time_to_str(set_start_date)
is_exists_duplicate_key = False
if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0:
_delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row,
'納入先処方元マスタの重複予定データの削除')
is_exists_duplicate_key = True
else:
logger.info('納入先処方元マスタの重複予定データなし')
_insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row)
# 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合
if not is_exists_duplicate_key and _str_to_date_time(ult_ident_presc_row['end_date']) >= start_date:
last_end_date = tekiyo_month_first_day - timedelta(days=1)
# 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新
_update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row)
logger.info('納入先処方元マスタの登録 終了')
def _select_emp_chg_inst_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]:
# 従業員担当施設マスタから、DCF施設コードに対応した領域コードの取得
try:
sql = """
SELECT
ta_cd
FROM
src05.emp_chg_inst
WHERE
inst_cd = :dcf_inst_cd
AND enabled_flg = 'Y'
AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date
"""
params = {'dcf_inst_cd': dcf_inst_cd}
emp_chg_inst_ta_cd_records = db.execute_select(sql, params)
logging_sql(logger, sql)
logger.info('従業員担当施設マスタから領域コードの取得に成功')
except Exception as e:
logger.debug('従業員担当施設マスタから領域コードの取得に失敗')
raise e
return emp_chg_inst_ta_cd_records
def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]):
# 従業員担当施設マスタの登録
logger.info('従業員担当施設マスタの登録 開始')
for enabled_merge_record in enabled_dst_inst_merge_records:
tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month'])
emp_chg_inst_ta_cd_records = _select_emp_chg_inst_ta_cd(db, enabled_merge_record['dcf_inst_cd'])
for emp_chg_inst_ta_cd_record in emp_chg_inst_ta_cd_records:
emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd'],
emp_chg_inst_ta_cd_record['ta_cd'])
for emp_chg_inst_row in emp_chg_inst_records:
# 重複時相手先コードが存在したかのチェック
if emp_chg_inst_row['opp_count'] > 0:
continue
start_date = _str_to_date_time(emp_chg_inst_row['start_date'])
set_start_date = start_date \
if start_date > tekiyo_month_first_day else tekiyo_month_first_day
_insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date),
emp_chg_inst_row)
# 適用開始日 < DCF施設統合マスタの適用月度の1日の場合
if start_date < tekiyo_month_first_day:
# DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する
last_end_date = tekiyo_month_first_day - timedelta(days=1)
_update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date),
emp_chg_inst_row)
continue
# 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する
_update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'],
emp_chg_inst_row['start_date'])
logger.info('従業員担当施設マスタの登録 終了')
def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: dict,
log_message: str):
# ult_ident_prescのDelete
try:
elapsed_time = ElapsedTime()
sql = """
DELETE FROM
src05.ult_ident_presc
WHERE
ta_cd = :ta_cd
AND ult_ident_cd = :ult_ident_cd
AND ratio = :ratio
AND start_date = :start_date
"""
params = {
'ta_cd': ult_ident_presc_row['ta_cd'],
'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'],
'ratio': ult_ident_presc_row['ratio'],
'start_date': start_date
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug(f'{log_message} 失敗')
raise e
def _update_emp_chg_inst_disabled(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str):
# emp_chg_instをUPDATE
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.emp_chg_inst
SET
enabled_flg = 'N',
updater = CURRENT_USER(),
update_date = SYSDATE()
WHERE
inst_cd = :dcf_inst_cd
AND ta_cd = :ta_cd
AND start_date = :start_date
"""
params = {'dcf_inst_cd': dcf_inst_cd, 'ta_cd': ta_cd, 'start_date': start_date}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'従業員担当施設マスタのYorNフラグ更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug('従業員担当施設マスタのYorNフラグ更新に失敗')
raise e
def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str,
emp_chg_inst_row: dict):
# emp_chg_instをUPDATE
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.emp_chg_inst
SET end_date = :end_date,
updater = CURRENT_USER(),
update_date = SYSDATE()
WHERE
inst_cd = :dcf_inst_cd
AND ta_cd = :ta_cd
AND emp_cd = :emp_cd
AND bu_cd = :bu_cd
AND start_date = :start_date
"""
params = {
'end_date': last_end_date,
'dcf_inst_cd': dcf_inst_cd,
'ta_cd': emp_chg_inst_row['ta_cd'],
'emp_cd': emp_chg_inst_row['emp_cd'],
'bu_cd': emp_chg_inst_row['bu_cd'],
'start_date': emp_chg_inst_row['start_date']
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'従業員担当施設マスタの適用終了日更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug('従業員担当施設マスタの適用終了日更新 失敗')
raise e
def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str,
emp_chg_inst_row: dict):
# emp_chg_instにINSERT
try:
elapsed_time = ElapsedTime()
sql = """
INSERT INTO
src05.emp_chg_inst(
inst_cd,
ta_cd,
emp_cd,
bu_cd,
start_date,
end_date,
main_chg_flg,
enabled_flg,
creater,
create_date,
updater,
update_date
)
VALUES(
:dup_opp_cd,
:ta_cd,
:emp_cd,
:bu_cd,
:start_date,
:end_date,
:main_chg_flg,
'Y',
CURRENT_USER(),
SYSDATE(),
CURRENT_USER(),
SYSDATE()
)
"""
params = {
'dup_opp_cd': dup_opp_cd,
'ta_cd': emp_chg_inst_row['ta_cd'],
'emp_cd': emp_chg_inst_row['emp_cd'],
'bu_cd': emp_chg_inst_row['bu_cd'],
'start_date': set_start_date,
'end_date': emp_chg_inst_row['end_date'],
'main_chg_flg': None
if emp_chg_inst_row['main_chg_flg'] is None else emp_chg_inst_row['main_chg_flg']
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'従業員担当施設マスタの追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug('従業員担当施設マスタの追加に失敗')
raise e
def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]:
# dcf_inst_mergeからSELECT
# 無効フラグがOFFのときは、移行先DCF施設コードが設定されてないデータを抽出する。
# ONのときは、移行先DCF施設コードが設定されているデータを抽出する。
try:
sql = """
SELECT
dim.dcf_inst_cd,
dim.dup_opp_cd,
dim.tekiyo_month
FROM
src05.dcf_inst_merge AS dim
INNER JOIN
src05.hdke_tbl AS ht
ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m')
WHERE
dim.muko_flg = :muko_flg
AND dim.enabled_flg = 'Y'
AND dim.dcf_inst_cd_new IS {not_null}NULL
""".format(
not_null='' if muko_flg == 0 else 'NOT '
)
params = {
'muko_flg': muko_flg
}
dst_inst_merge_records = db.execute_select(sql, params)
logging_sql(logger, sql)
logger.info('DCF施設統合マスタの取得に成功')
except Exception as e:
logger.debug('DCF施設統合マスタの取得に失敗')
raise e
return dst_inst_merge_records
def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int:
# dcf_inst_mergeをUPDATE
# 無効フラグがOFFのときは、
# 移行先DCF施設コードが設定されていないデータを抽出し、移行先DCF施設コードに重複時相手先コードを上書きする
# 無効フラグがONのときは、
# 移行先DCF施設コードが設定されているデータを抽出し、移行先DCF施設コードにNULLを上書きする。
try:
elapsed_time = ElapsedTime()
log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました'
sql = """
UPDATE
src05.dcf_inst_merge AS updim
INNER JOIN(
SELECT
dim.dcf_inst_cd AS base_dcf_inst_cd,
dim.dup_opp_cd AS base_dup_opp_cd,
dim.tekiyo_month AS base_tekiyo_month,
dim.muko_flg AS base_muko_flg,
dim.enabled_flg AS base_enabled_flg
FROM
src05.dcf_inst_merge AS dim
INNER JOIN
src05.hdke_tbl AS ht
ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m')
WHERE
dim.muko_flg = :muko_flg
AND dim.enabled_flg ='Y'
AND dim.dcf_inst_cd_new IS {not_null}NULL
) AS bf_dim
SET
updim.dcf_inst_cd_new = {column},
updim.updater = CURRENT_USER(),
updim.update_date = SYSDATE()
WHERE
updim.dcf_inst_cd = base_dcf_inst_cd
AND updim.dup_opp_cd = base_dup_opp_cd
AND updim.tekiyo_month = base_tekiyo_month
AND updim.muko_flg = base_muko_flg
AND updim.enabled_flg = base_enabled_flg
""".format(
not_null='' if muko_flg == 0 else 'NOT ',
column='base_dup_opp_cd' if muko_flg == 0 else 'NULL'
)
params = {
'muko_flg': muko_flg
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'DCF施設統合マスタの有効データを{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug(f'DCF施設統合マスタの有効データを{log_message} 失敗')
raise e
return res.rowcount
def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, log_message: str):
# dcf_inst_mergeをUPDATE
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.dcf_inst_merge
SET
dcf_inst_cd_new = :dcf_inst_cd_new_after,
updater = CURRENT_USER(),
update_date = SYSDATE()
WHERE
dcf_inst_cd_new = :dcf_inst_cd_new_before
AND enabled_flg = 'Y'
AND muko_flg = 0
"""
params = {
'dcf_inst_cd_new_after': dcf_inst_cd_new_after,
'dcf_inst_cd_new_before': dcf_inst_cd_new_before
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'移行先DCF施設コードの{log_message}更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug(f'移行先DCF施設コードの{log_message}更新に失敗')
raise e
def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: dict):
# ult_ident_presc_endをUPDATE
try:
elapsed_time = ElapsedTime()
sql = """
UPDATE
src05.ult_ident_presc
SET end_date = :end_date,
updater = CURRENT_USER(),
update_date = SYSDATE()
WHERE
ta_cd = :ta_cd
AND ult_ident_cd = :ult_ident_cd
AND ratio = :ratio
AND start_date = :start_date
"""
params = {
'end_date': last_end_date,
'ta_cd': ult_ident_presc_row['ta_cd'],
'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'],
'ratio': ult_ident_presc_row['ratio'],
'start_date': ult_ident_presc_row['start_date']
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'終了日 > 開始月のため適用終了日を更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug('終了日 > 開始月のため適用終了日を更新 失敗')
raise e
def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str,
ult_ident_presc_row: dict):
# ult_ident_prescにINSERT
try:
elapsed_time = ElapsedTime()
sql = """
INSERT INTO
src05.ult_ident_presc(
ta_cd,
ult_ident_cd,
ratio,
start_date,
presc_cd,
end_date,
creater,
create_date,
update_date,
updater
)
VALUES(
:ta_cd,
:ult_ident_cd,
:ratio,
:start_date,
:presc_cd,
:end_date,
CURRENT_USER(),
SYSDATE(),
SYSDATE(),
CURRENT_USER()
)
"""
params = {
'ta_cd': ult_ident_presc_row['ta_cd'],
'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'],
'ratio': ult_ident_presc_row['ratio'],
'start_date': set_Start_Date,
'presc_cd': dup_opp_cd,
'end_date': ult_ident_presc_row['end_date']
}
res = db.execute(sql, params)
logging_sql(logger, sql)
logger.info(f'納入先処方元マスタに追加 成功, {res.rowcount} 行更新 ({elapsed_time.of})')
except Exception as e:
logger.debug('納入先処方元マスタに追加 失敗')
raise e
def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]:
# emp_chg_instからSELECT
try:
sql = """
SELECT
eci.inst_cd,
eci.ta_cd,
eci.emp_cd,
eci.bu_cd,
eci.start_date,
eci.end_date,
eci.main_chg_flg,
eci.enabled_flg,
(
SELECT
COUNT(eciopp.inst_cd)
FROM
src05.emp_chg_inst AS eciopp
WHERE
eciopp.inst_cd = :dup_opp_cd
AND eciopp.ta_cd = :ta_cd
) AS opp_count
FROM
src05.emp_chg_inst AS eci
WHERE
eci.inst_cd = :dcf_inst_cd
AND eci.ta_cd = :ta_cd
AND eci.enabled_flg = 'Y'
AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < eci.end_date
"""
params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd}
emp_chg_inst_records = db.execute_select(sql, params)
logging_sql(logger, sql)
logger.info('従業員担当施設マスタの取得 成功')
except Exception as e:
logger.debug('従業員担当施設マスタの取得 失敗')
raise e
return emp_chg_inst_records
def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str,
ult_ident_presc_row: dict) -> list[dict]:
# ult_ident_prescからSELECT
try:
sql = """
SELECT
uip.ta_cd,
uip.ult_ident_cd,
uip.ratio,
uip.start_date,
uip.end_date,
(
SELECT
COUNT(uipopp.ta_cd)
FROM
src05.ult_ident_presc AS uipopp
WHERE
uipopp.presc_cd = :dup_opp_cd
AND uipopp.ta_cd = :ta_cd
AND uipopp.ult_ident_cd = :ult_ident_cd
AND uipopp.ratio = :ratio
) AS opp_count
FROM
src05.ult_ident_presc AS uip
WHERE
uip.presc_cd = :dcf_inst_cd
AND uip.ta_cd = :ta_cd
AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date
"""
params = {
'dcf_inst_cd': dcf_inst_cd,
'dup_opp_cd': dup_opp_cd,
'ta_cd': ult_ident_presc_row['ta_cd'],
'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'],
'ratio': ult_ident_presc_row['ratio']
}
ult_ident_presc_records = db.execute_select(sql, params)
logging_sql(logger, sql)
logger.info('納入先処方元マスタの取得 成功')
except Exception as e:
logger.debug('納入先処方元マスタの取得 失敗')
raise e
return ult_ident_presc_records
def _count_duplicate_ult_ident_presc(db: Database, set_start_date: str,
ult_ident_presc_row: dict) -> int:
# ult_ident_prescの重複時相手先コードの件数取得
try:
sql = """
SELECT
COUNT(ta_cd) AS cnt
FROM
src05.ult_ident_presc
WHERE
ta_cd = :ta_cd
AND ult_ident_cd = :ult_ident_cd
AND ratio = :ratio
AND start_date = :start_date
"""
params = {
'ta_cd': ult_ident_presc_row['ta_cd'],
'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'],
'ratio': ult_ident_presc_row['ratio'],
'start_date': set_start_date
}
result = db.execute_select(sql, params)
logging_sql(logger, sql)
logger.info('納入先処方元マスタの重複予定データの存在チェック 成功')
except Exception as e:
logger.debug('納入先処方元マスタの重複予定データの存在チェック 失敗')
raise e
return result[0]['cnt']
def _get_first_day_of_month(year_month: str) -> datetime:
# year_monthの初日の日付を日付型に変換し返却する
return datetime.strptime(year_month + '01', '%Y%m%d')
def _str_to_date_time(str_date_time: str) -> datetime:
# str_date_timeを日付型に変換して返却する
return datetime.strptime(str_date_time, '%Y%m%d')
def _date_time_to_str(date_time: datetime) -> str:
# date_timeをYmd型に変換して返却する
return date_time.strftime('%Y%m%d')

View File

@ -1,5 +1,8 @@
from src.batch.common.batch_context import BatchContext
from src.batch.laundering import create_inst_merge_for_laundering, emp_chg_inst_laundering, ult_ident_presc_laundering
from src.batch.laundering import (
create_inst_merge_for_laundering, emp_chg_inst_laundering,
ult_ident_presc_laundering, sales_results_laundering)
from src.batch.dcf_inst_merge import integrate_dcf_inst_merge
from src.logging.get_logger import get_logger
batch_context = BatchContext.get_instance()
@ -16,10 +19,14 @@ def exec():
return
# 洗替用マスタ作成
create_inst_merge_for_laundering.exec()
# DCF施設統合マスタ日次更新
integrate_dcf_inst_merge.exec()
# 施設担当者洗替
emp_chg_inst_laundering.exec()
# 納入先処方元マスタ洗替
ult_ident_presc_laundering.exec()
# 卸販売洗替
sales_results_laundering.exec()
# # 並列処理のテスト用コード
# import time

View File

@ -0,0 +1,167 @@
from src.batch.batch_functions import logging_sql
from src.db.database import Database
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.system_var import environment
logger = get_logger('卸販売洗替')
def exec():
db = Database.get_instance(autocommit=True)
try:
db.connect()
logger.debug('処理開始')
# 卸販売実績テーブル(洗替後)過去5年以前のデータ削除
_call_sales_lau_delete(db)
# 卸販売実績テーブル(洗替後)作成
_call_sales_lau_upsert(db)
# 1:卸組織洗替
_call_whs_org_laundering(db)
# HCO施設コードの洗替
_update_sales_lau_from_vop_hco_merge_v(db)
# 4:メルク施設コードの洗替
_update_mst_inst_laundering(db)
logger.debug('処理終了')
except Exception as e:
raise BatchOperationException(e)
finally:
db.disconnect()
def _call_sales_lau_delete(db: Database):
# 卸販売実績テーブル(洗替後)過去5年以前のデータ削除
logger.info('sales_lau_delete(プロシージャ―) 開始')
db.execute(f"""
CALL src05.sales_lau_delete(
'{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}',
{environment.SALES_LAUNDERING_TARGET_YEAR_OFFSET}
)
""")
logger.info('sales_lau_delete(プロシージャ―) 終了')
return
def _call_sales_lau_upsert(db: Database):
# 卸販売実績テーブル(洗替後)作成
logger.info('sales_lau_upsert(プロシージャ―) 開始')
db.execute(f"""
CALL src05.sales_lau_upsert(
'{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}',
(src05.get_syor_date() - interval {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD} day),
src05.get_syor_date()
)
""")
logger.info('sales_lau_upsert(プロシージャ―) 終了')
return
def _call_whs_org_laundering(db: Database):
# 卸組織洗替
logger.info('whs_org_laundering(プロシージャ―) 開始')
db.execute(f"""
CALL src05.whs_org_laundering(
'{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}'
)
""")
logger.info('whs_org_laundering(プロシージャ―) 終了')
return
def _update_sales_lau_from_vop_hco_merge_v(db: Database):
# HCO施設コードの洗替
if _count_v_inst_merge_t(db) == 0:
logger.info('V施設統合マスタ(洗替処理一時テーブル)にデータは存在しません')
return
_call_v_inst_merge_laundering(db)
return
def _count_v_inst_merge_t(db: Database) -> int:
# V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得
try:
sql = """
SELECT
COUNT(v_inst_cd) AS cnt
FROM
internal05.v_inst_merge_t
"""
result = db.execute_select(sql)
logging_sql(logger, sql)
logger.info('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功')
except Exception as e:
logger.debug('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗')
raise e
return result[0]['cnt']
def _call_v_inst_merge_laundering(db: Database):
# HCO施設コードの洗替(プロシージャ―の呼び出し)
logger.info('v_inst_merge_laundering(プロシージャ―) 開始')
db.execute(f"""
CALL src05.v_inst_merge_laundering(
'{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}'
)
""")
logger.info('v_inst_merge_laundering(プロシージャ―) 終了')
return
def _update_mst_inst_laundering(db: Database):
# メルク施設コードの洗替
_call_hco_to_mdb_laundering(db)
_update_sales_lau_from_dcf_inst_merge(db)
def _call_hco_to_mdb_laundering(db: Database):
# A:医療機関のデータはMDB変換表からHCO⇒DCFへ変換
logger.info('hco_to_mdb_laundering(プロシージャ―) 開始')
db.execute(f"""
CALL src05.hco_to_mdb_laundering(
'{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}'
)
""")
logger.info('hco_to_mdb_laundering(プロシージャ―) 終了')
return
def _update_sales_lau_from_dcf_inst_merge(db: Database):
# B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする
if _count_inst_merge_t(db) == 0:
logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)にデータは存在しません')
return
_call_inst_merge_laundering(db)
return
def _count_inst_merge_t(db: Database) -> int:
# アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得
try:
sql = """
SELECT
COUNT(dcf_dsf_inst_cd) AS cnt
FROM
internal05.inst_merge_t
"""
result = db.execute_select(sql)
logging_sql(logger, sql)
logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功')
except Exception as e:
logger.debug('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗')
raise e
return result[0]['cnt']
def _call_inst_merge_laundering(db: Database):
# B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(プロシージャ―の呼び出し)
logger.info('inst_merge_laundering(プロシージャ―) 開始')
db.execute(f"""
CALL src05.inst_merge_laundering(
'{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}'
)
""")
logger.info('inst_merge_laundering(プロシージャ―) 終了')
return

View File

@ -13,15 +13,17 @@ logger = get_logger(__name__)
class Database:
"""データベース操作クラス"""
__connection: Connection = None
__engine: Engine = None
__transactional_engine: Engine = None
__autocommit_engine: Engine = None
__host: str = None
__port: str = None
__username: str = None
__password: str = None
__schema: str = None
__autocommit: bool = None
__connection_string: str = None
def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None:
def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None:
"""このクラスの新たなインスタンスを初期化します
Args:
@ -30,12 +32,14 @@ class Database:
host (str): DBホスト名
port (int): DBポート
schema (str): DBスキーマ名
autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合トランザクションの有無に限らず即座にコミットされる). Defaults to False.
"""
self.__username = username
self.__password = password
self.__host = host
self.__port = int(port)
self.__schema = schema
self.__autocommit = autocommit
self.__connection_string = URL.create(
drivername='mysql+pymysql',
@ -47,16 +51,20 @@ class Database:
query={"charset": "utf8mb4", "local_infile": "1"},
)
self.__engine = create_engine(
self.__transactional_engine = create_engine(
self.__connection_string,
pool_timeout=5,
poolclass=QueuePool
)
self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT')
@classmethod
def get_instance(cls):
def get_instance(cls, autocommit=False):
"""インスタンスを取得します
Args:
autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合トランザクションの有無に限らず即座にコミットされる). Defaults to False.
Returns:
Database: DB操作クラスインスタンス
"""
@ -65,7 +73,8 @@ class Database:
password=environment.DB_PASSWORD,
host=environment.DB_HOST,
port=environment.DB_PORT,
schema=environment.DB_SCHEMA
schema=environment.DB_SCHEMA,
autocommit=autocommit
)
@retry(
@ -77,12 +86,15 @@ class Database:
stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT))
def connect(self):
"""
DBに接続します接続に失敗した場合リトライします
DBに接続します接続に失敗した場合リトライします\n
インスタンスのautocommitがTrueの場合自動コミットモードで接続する明示的なトランザクションも無視される
Raises:
DBException: 接続失敗
"""
try:
self.__connection = self.__engine.connect()
self.__connection = (
self.__autocommit_engine.connect() if self.__autocommit is True
else self.__transactional_engine.connect())
except Exception as e:
raise DBException(e)

View File

@ -26,3 +26,10 @@ DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_AT
DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))
# 連携データ抽出期間
SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD'])
# 洗替対象テーブル名
SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME']
# 卸実績洗替で作成するデータの期間(年単位)
SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET']

View File

@ -22,4 +22,6 @@ AWS_REGION=ap-northeast-1
AUTHORIZE_ENDPOINT=oauth2/authorize
TOKEN_ENDPOINT=oauth2/token
BIO_SEARCH_RESULT_MAX_COUNT=35000
ULTMARC_SEARCH_RESULT_MAX_COUNT=500
SESSION_EXPIRE_MINUTE=20
LOG_LEVEL=DEBUG

View File

@ -9,6 +9,7 @@ app = "uvicorn src.main:app --reload --no-server-header"
[packages]
fastapi = "*"
uvicorn = "*"
"uvicorn[standard]" = "*"
gunicorn = "*"
boto3 = "*"
jinja2 = "*"
@ -22,6 +23,7 @@ pandas = "*"
openpyxl = "*"
xlrd = "*"
sqlalchemy = "*"
mojimoji = "*"
[dev-packages]
autopep8 = "*"

View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "d78a6bf1a96aa14c45431185961cae6d54ca1da8ea0319e1976bad4c2bebd673"
"sha256": "ebbab78060c475a430a1ce8c817736651277c081230c5c0e94f0b0d02f5f46df"
},
"pipfile-spec": 6,
"requires": {
@ -18,35 +18,35 @@
"default": {
"anyio": {
"hashes": [
"sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421",
"sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"
"sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce",
"sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"
],
"markers": "python_full_version >= '3.6.2'",
"version": "==3.6.2"
"markers": "python_version >= '3.7'",
"version": "==3.7.0"
},
"boto3": {
"hashes": [
"sha256:278d896e9090a976f41ec68da5c572bc4e5b7cb1e515f1898fee8cb2fadfb50d",
"sha256:3ce2225a61832d69831d669d912424ea3863268ca1cfa2a82203bb90952acefa"
"sha256:2d4095e2029ce5ceccb25591f13e55aa5b8ba17794de09963654bd9ced45158f",
"sha256:dd15823e8c0554d98c18584d9a6a0342c67611c1114ef61495934c2e560f632c"
],
"index": "pypi",
"version": "==1.26.91"
"version": "==1.26.155"
},
"botocore": {
"hashes": [
"sha256:4ed6a488aee1b42367eace71f7d0993dda05b02eebd7dcdd78db5c9ce3d80da5",
"sha256:a8a800a2a945da807758cace539fc5b5ec1d5082ce363799d3a3870c2c4ed6fc"
"sha256:32d5da68212e10c060fd484f41df4f7048fc7731ccd16fd00e37b11b6e841142",
"sha256:7fbb7ebba5f645c9750fe557b1ea789d40017a028cdaa2c22fcbf06d4a4d3c1d"
],
"markers": "python_version >= '3.7'",
"version": "==1.29.91"
"version": "==1.29.155"
},
"certifi": {
"hashes": [
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
"sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7",
"sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"
],
"markers": "python_version >= '3.6'",
"version": "==2022.12.7"
"version": "==2023.5.7"
},
"cffi": {
"hashes": [
@ -195,7 +195,7 @@
"sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df",
"sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"
],
"markers": "python_version >= '3.7'",
"markers": "python_full_version >= '3.7.0'",
"version": "==3.1.0"
},
"click": {
@ -206,33 +206,36 @@
"markers": "python_version >= '3.7'",
"version": "==8.1.3"
},
"colorama": {
"hashes": [
"sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44",
"sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"
],
"version": "==0.4.6"
},
"cryptography": {
"hashes": [
"sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1",
"sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7",
"sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06",
"sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84",
"sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915",
"sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074",
"sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5",
"sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3",
"sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9",
"sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3",
"sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011",
"sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536",
"sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a",
"sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f",
"sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480",
"sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac",
"sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0",
"sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108",
"sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828",
"sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354",
"sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612",
"sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3",
"sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97"
"sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db",
"sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a",
"sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039",
"sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c",
"sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3",
"sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485",
"sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c",
"sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca",
"sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5",
"sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5",
"sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3",
"sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb",
"sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43",
"sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31",
"sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc",
"sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b",
"sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006",
"sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a",
"sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"
],
"version": "==39.0.2"
"version": "==41.0.1"
},
"et-xmlfile": {
"hashes": [
@ -242,13 +245,21 @@
"markers": "python_version >= '3.6'",
"version": "==1.1.0"
},
"exceptiongroup": {
"hashes": [
"sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e",
"sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"
],
"markers": "python_version < '3.11'",
"version": "==1.1.1"
},
"fastapi": {
"hashes": [
"sha256:451387550c2d25a972193f22e408a82e75a8e7867c834a03076704fe20df3256",
"sha256:4a75936dbf9eb74be5eb0d41a793adefe9f3fc6ba66dbdabd160120fd3c2d9cd"
"sha256:95d757511c596409930bd20673358d4a4d709004edb85c5d24d6ffc48fabcbf2",
"sha256:b53248ee45f64f19bb7600953696e3edf94b0f7de94df1e5433fc5c6136fa986"
],
"index": "pypi",
"version": "==0.94.1"
"version": "==0.97.0"
},
"greenlet": {
"hashes": [
@ -332,6 +343,52 @@
"markers": "python_version >= '3.7'",
"version": "==0.14.0"
},
"httptools": {
"hashes": [
"sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9",
"sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b",
"sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2",
"sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d",
"sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09",
"sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60",
"sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a",
"sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b",
"sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42",
"sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f",
"sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142",
"sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde",
"sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0",
"sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b",
"sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986",
"sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5",
"sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6",
"sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca",
"sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b",
"sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49",
"sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324",
"sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c",
"sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63",
"sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51",
"sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372",
"sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887",
"sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d",
"sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281",
"sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901",
"sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd",
"sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449",
"sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25",
"sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2",
"sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e",
"sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1",
"sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421",
"sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7",
"sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86",
"sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc",
"sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f",
"sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"
],
"version": "==0.5.0"
},
"idna": {
"hashes": [
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
@ -358,93 +415,113 @@
},
"markupsafe": {
"hashes": [
"sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed",
"sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc",
"sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2",
"sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460",
"sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7",
"sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0",
"sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1",
"sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa",
"sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03",
"sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323",
"sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65",
"sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013",
"sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036",
"sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f",
"sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4",
"sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419",
"sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2",
"sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619",
"sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a",
"sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a",
"sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd",
"sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7",
"sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666",
"sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65",
"sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859",
"sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625",
"sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff",
"sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156",
"sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd",
"sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba",
"sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f",
"sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1",
"sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094",
"sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a",
"sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513",
"sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed",
"sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d",
"sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3",
"sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147",
"sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c",
"sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603",
"sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601",
"sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a",
"sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1",
"sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d",
"sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3",
"sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54",
"sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2",
"sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6",
"sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"
"sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e",
"sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e",
"sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431",
"sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686",
"sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559",
"sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc",
"sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c",
"sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0",
"sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4",
"sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9",
"sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575",
"sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba",
"sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d",
"sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3",
"sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00",
"sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155",
"sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac",
"sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52",
"sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f",
"sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8",
"sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b",
"sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24",
"sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea",
"sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198",
"sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0",
"sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee",
"sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be",
"sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2",
"sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707",
"sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6",
"sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58",
"sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779",
"sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636",
"sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c",
"sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad",
"sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee",
"sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc",
"sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2",
"sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48",
"sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7",
"sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e",
"sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b",
"sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa",
"sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5",
"sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e",
"sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb",
"sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9",
"sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57",
"sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc",
"sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"
],
"markers": "python_version >= '3.7'",
"version": "==2.1.2"
"version": "==2.1.3"
},
"mojimoji": {
"hashes": [
"sha256:01a3d73e8b3c641386b1824d5106b8ea5c351a641623c910d89d8f02520c1b0e",
"sha256:05b589361b6f35aef96df3a7f64c2a286a7e7aed9865cd7baf1a5876bb4cd16a",
"sha256:515f6fa5659d77a172f1fb46950d34674f33cf487e9aa131bf9422cbe127e5f4",
"sha256:59cc0af7d3be3b6847c8dcf1a4e0d09cc0646ff64c101308903d14871245403c",
"sha256:61c03620528650603b3c92f367d6db2a645c2638408add5ba03fed982af7cd1d",
"sha256:7a15864d8de07a487280c528fe3d2aeaad05bb2ce6d8bf201fc1480b0b5db337",
"sha256:8a1285040b7ef7ca5bafe095b62b99507cf19e10c6000e3279bc44d9430da3d9",
"sha256:9723531661911479bbfce08ec8a62c7b5958a307f33a025fe938d19550b13f9d",
"sha256:98f41bf146b731ed4554c60495c53fd96339a52e02b22a869d6eaf2433047505",
"sha256:a98fb09bd49245f9d9cd29603b912dc2f94e619e6d2f5b722553a5dbe113ce6c",
"sha256:ae5e6e8d4e70849f4217fa00ca7d770354f5a13c04778b6f07f3bceb0a636abd",
"sha256:b110101148b920eba60fa4627b904fa18801c5ca3667880494b7f0d25dd1a5fe",
"sha256:b2247233f4b3bee6ab601ac7fcbd84124ad91788a7418c1430f93a4bac340218",
"sha256:b379781a0cb95eeb79939608b4a013ad7b79ba03b7fe9b795eadc40bd96a06c9",
"sha256:d974a034528cb9fb95fcade6f3ffb73dc1dc336e17413486a5094724cbe9e34f",
"sha256:f180e2e8ff47e54cc36b0de147bceaf76fab88ef06775835a55b38ef6b82c161",
"sha256:f7f4da9e809fa68588e817f1ae351c4274be2366e542c51393657cae7bcf114f"
],
"index": "pypi",
"version": "==0.0.12"
},
"numpy": {
"hashes": [
"sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22",
"sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f",
"sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9",
"sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96",
"sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0",
"sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a",
"sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281",
"sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04",
"sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468",
"sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253",
"sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756",
"sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a",
"sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb",
"sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d",
"sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0",
"sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910",
"sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978",
"sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5",
"sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f",
"sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a",
"sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5",
"sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2",
"sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d",
"sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95",
"sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5",
"sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d",
"sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780",
"sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"
"sha256:0ac6edfb35d2a99aaf102b509c8e9319c499ebd4978df4971b94419a116d0790",
"sha256:26815c6c8498dc49d81faa76d61078c4f9f0859ce7817919021b9eba72b425e3",
"sha256:4aedd08f15d3045a4e9c648f1e04daca2ab1044256959f1f95aafeeb3d794c16",
"sha256:4c69fe5f05eea336b7a740e114dec995e2f927003c30702d896892403df6dbf0",
"sha256:5177310ac2e63d6603f659fadc1e7bab33dd5a8db4e0596df34214eeab0fee3b",
"sha256:5aa48bebfb41f93043a796128854b84407d4df730d3fb6e5dc36402f5cd594c0",
"sha256:5b1b90860bf7d8a8c313b372d4f27343a54f415b20fb69dd601b7efe1029c91e",
"sha256:6c284907e37f5e04d2412950960894b143a648dea3f79290757eb878b91acbd1",
"sha256:6d183b5c58513f74225c376643234c369468e02947b47942eacbb23c1671f25d",
"sha256:7412125b4f18aeddca2ecd7219ea2d2708f697943e6f624be41aa5f8a9852cc4",
"sha256:7cd981ccc0afe49b9883f14761bb57c964df71124dcd155b0cba2b591f0d64b9",
"sha256:85cdae87d8c136fd4da4dad1e48064d700f63e923d5af6c8c782ac0df8044542",
"sha256:8aa130c3042052d656751df5e81f6d61edff3e289b5994edcf77f54118a8d9f4",
"sha256:95367ccd88c07af21b379be1725b5322362bb83679d36691f124a16357390153",
"sha256:9c7211d7920b97aeca7b3773a6783492b5b93baba39e7c36054f6e749fc7490c",
"sha256:9e3f2b96e3b63c978bc29daaa3700c028fe3f049ea3031b58aa33fe2a5809d24",
"sha256:b76aa836a952059d70a2788a2d98cb2a533ccd46222558b6970348939e55fc24",
"sha256:b792164e539d99d93e4e5e09ae10f8cbe5466de7d759fc155e075237e0c274e4",
"sha256:c0dc071017bc00abb7d7201bac06fa80333c6314477b3d10b52b58fa6a6e38f6",
"sha256:cc3fda2b36482891db1060f00f881c77f9423eead4c3579629940a3e12095fe8",
"sha256:d6b267f349a99d3908b56645eebf340cb58f01bd1e773b4eea1a905b3f0e4208",
"sha256:d76a84998c51b8b68b40448ddd02bd1081bb33abcdc28beee6cd284fe11036c6",
"sha256:e559c6afbca484072a98a51b6fa466aae785cfe89b69e8b856c3191bc8872a82",
"sha256:ecc68f11404930e9c7ecfc937aa423e1e50158317bf67ca91736a9864eae0232",
"sha256:f1accae9a28dc3cda46a91de86acf69de0d1b5f4edd44a9b0c3ceb8036dfff19"
],
"markers": "python_version < '3.10'",
"version": "==1.24.2"
"version": "==1.25.0"
},
"openpyxl": {
"hashes": [
@ -456,34 +533,34 @@
},
"pandas": {
"hashes": [
"sha256:008aa9843e92753d1345353e643c51017d8a9e303041db3165b683fc16a4d380",
"sha256:1f060ae468cb24e1ab42c6344b097375b24a902d3cefb5524f93ef0cd0db5f4b",
"sha256:2379d66055592480aab24cda5b1543539302e0f85e9a33538e9e4fd309b3063e",
"sha256:26a507e14dc9a5ef29239b85d0ef5f01a7e308b88781b451a415d9d15e2d1a61",
"sha256:314bc00a0575151d3ec3124af23bf2ef7533b0e160fb138007a4ef1b3c6a0e63",
"sha256:3935c394e1b10d5c311bd9378018a468283adfe8469dc8084e21d55ca06be979",
"sha256:47f116fcb3aa533ab6661ca391136a643e25d1387dae989ed3e5b9248b98e2e9",
"sha256:4e99adf0a3b4e040fad8823567b52eacfd48db50d11024244a60197430ec74b8",
"sha256:67a5251a821b5af1c5aefe5a610a7758fae04693434fb98b2ebad10349cd727a",
"sha256:7bb2d670c1f7de9bcef0986ae9f832fbd99acc43db1d5fe22f2f06bda8a67d43",
"sha256:7fc7c85fcf27726633751d064f4d115dbccb202b0b6ea2909b6d89ca071115e3",
"sha256:8010e4c988c2c2ed1f5763a6e579448a13a7c87b810400124bb872121c9ca3f9",
"sha256:867fd5c3325c302e8feaaa7ec2d99c224be38551d8a9e1ae5d15be7e04424172",
"sha256:8cb4789c8b1f361d7b07a25002e871546b108519af9c176f8a5ca66316c09d90",
"sha256:8ce8603f8cf07044458914b81bb7445b6cc31d381657e0fac21b3eee40f404d0",
"sha256:adc1e91f282426d37830837f108747f0628e7635b1e83b2401b4f7e2a0068a82",
"sha256:b72ba4e9553645c0bfd688a4e89efe9694fb2936adb5c6295d31626233cb674a",
"sha256:c3c3be69e186d12a94004b0c76bb390e26b48e4b444f3adc86d2cf6506c71d99",
"sha256:cf960fc1f2545114b9ed1a0f025d6de63c891df31640e454e333e3b38504d36b",
"sha256:dc45eb7f23c92e0aa5278bb210fb30136e6e0b760636cf18874cdf2d6448df0f",
"sha256:e5ebb19a66d8c4a4563e6cb628a23ee6898dc50e5dfe8b73c692cd7ea81def0a",
"sha256:e817d97597be5c21b1a66cbecadd0d0242482b72f6f5b60129fce5cec329e274",
"sha256:e829b927b156f85432390580d8799dfee59db0be3954235cf5f5df8a42eaaacd",
"sha256:ebc301fb34185275d9ad57838f533d5413a02b434174d1be89785141f785b226",
"sha256:f082e075aeac904db0e69d8b8acc1d610362e3d823ace3af029622b24b105900"
"sha256:02755de164da6827764ceb3bbc5f64b35cb12394b1024fdf88704d0fa06e0e2f",
"sha256:0a1e0576611641acde15c2322228d138258f236d14b749ad9af498ab69089e2d",
"sha256:1eb09a242184092f424b2edd06eb2b99d06dc07eeddff9929e8667d4ed44e181",
"sha256:30a89d0fec4263ccbf96f68592fd668939481854d2ff9da709d32a047689393b",
"sha256:50e451932b3011b61d2961b4185382c92cc8c6ee4658dcd4f320687bb2d000ee",
"sha256:51a93d422fbb1bd04b67639ba4b5368dffc26923f3ea32a275d2cc450f1d1c86",
"sha256:598e9020d85a8cdbaa1815eb325a91cfff2bb2b23c1442549b8a3668e36f0f77",
"sha256:66d00300f188fa5de73f92d5725ced162488f6dc6ad4cecfe4144ca29debe3b8",
"sha256:69167693cb8f9b3fc060956a5d0a0a8dbfed5f980d9fd2c306fb5b9c855c814c",
"sha256:6d6d10c2142d11d40d6e6c0a190b1f89f525bcf85564707e31b0a39e3b398e08",
"sha256:713f2f70abcdade1ddd68fc91577cb090b3544b07ceba78a12f799355a13ee44",
"sha256:7376e13d28eb16752c398ca1d36ccfe52bf7e887067af9a0474de6331dd948d2",
"sha256:77550c8909ebc23e56a89f91b40ad01b50c42cfbfab49b3393694a50549295ea",
"sha256:7b21cb72958fc49ad757685db1919021d99650d7aaba676576c9e88d3889d456",
"sha256:9ebb9f1c22ddb828e7fd017ea265a59d80461d5a79154b49a4207bd17514d122",
"sha256:a18e5c72b989ff0f7197707ceddc99828320d0ca22ab50dd1b9e37db45b010c0",
"sha256:a6b5f14cd24a2ed06e14255ff40fe2ea0cfaef79a8dd68069b7ace74bd6acbba",
"sha256:b42b120458636a981077cfcfa8568c031b3e8709701315e2bfa866324a83efa8",
"sha256:c4af689352c4fe3d75b2834933ee9d0ccdbf5d7a8a7264f0ce9524e877820c08",
"sha256:c7319b6e68de14e6209460f72a8d1ef13c09fb3d3ef6c37c1e65b35d50b5c145",
"sha256:cf3f0c361a4270185baa89ec7ab92ecaa355fe783791457077473f974f654df5",
"sha256:dd46bde7309088481b1cf9c58e3f0e204b9ff9e3244f441accd220dd3365ce7c",
"sha256:dd5476b6c3fe410ee95926873f377b856dbc4e81a9c605a0dc05aaccc6a7c6c6",
"sha256:e69140bc2d29a8556f55445c15f5794490852af3de0f609a24003ef174528b79",
"sha256:f908a77cbeef9bbd646bd4b81214cbef9ac3dda4181d5092a4aa9797d1bc7774"
],
"index": "pypi",
"version": "==2.0.0rc0"
"version": "==2.0.2"
},
"pycparser": {
"hashes": [
@ -494,72 +571,72 @@
},
"pydantic": {
"hashes": [
"sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62",
"sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35",
"sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832",
"sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a",
"sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06",
"sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4",
"sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6",
"sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7",
"sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f",
"sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70",
"sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186",
"sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd",
"sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d",
"sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c",
"sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f",
"sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65",
"sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc",
"sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2",
"sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d",
"sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a",
"sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7",
"sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb",
"sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb",
"sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77",
"sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7",
"sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160",
"sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0",
"sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4",
"sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d",
"sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084",
"sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b",
"sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd",
"sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d",
"sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2",
"sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31",
"sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"
"sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d",
"sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a",
"sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc",
"sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3",
"sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a",
"sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7",
"sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf",
"sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f",
"sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91",
"sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece",
"sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29",
"sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60",
"sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a",
"sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305",
"sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766",
"sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f",
"sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8",
"sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276",
"sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c",
"sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60",
"sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896",
"sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be",
"sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb",
"sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298",
"sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4",
"sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572",
"sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d",
"sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82",
"sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0",
"sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4",
"sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca",
"sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1",
"sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f",
"sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f",
"sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6",
"sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"
],
"markers": "python_version >= '3.7'",
"version": "==1.10.6"
"version": "==1.10.9"
},
"pyjwt": {
"extras": [
"crypto"
],
"hashes": [
"sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd",
"sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"
"sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1",
"sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074"
],
"index": "pypi",
"version": "==2.6.0"
"version": "==2.7.0"
},
"pymysql": {
"hashes": [
"sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641",
"sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"
"sha256:766b72e4370aba94e6266a4dbd62c51fbc6a894c38de25a41a8a01f0461a2387",
"sha256:aade29b861e81a3c68a9e90d43f3db257940c0208983a0128b82f1a4cef639aa"
],
"index": "pypi",
"version": "==1.0.2"
"version": "==1.1.0rc2"
},
"pynamodb": {
"hashes": [
"sha256:3c4d10867d59e6d7a2b54ee4ae213f1021d6f50ff93145e3909784bfc2b7560e",
"sha256:e09c39880560e10251778185b3d0c7a97ee8f42ab363a940c674e9330b61bf9d"
"sha256:82f77bb0c21a12756e6781df735ca841f543337847d8522a4ab8db6df7bbfc9f",
"sha256:a44fb486fc3e66a25b58d921e07f016f62416e323b381b96c1b725105868dacf"
],
"index": "pypi",
"version": "==5.4.1"
"version": "==5.5.0"
},
"python-dateutil": {
"hashes": [
@ -569,6 +646,13 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.2"
},
"python-dotenv": {
"hashes": [
"sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba",
"sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"
],
"version": "==1.0.0"
},
"python-multipart": {
"hashes": [
"sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132",
@ -579,34 +663,79 @@
},
"pytz": {
"hashes": [
"sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0",
"sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"
"sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588",
"sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"
],
"version": "==2022.7.1"
"version": "==2023.3"
},
"pyyaml": {
"hashes": [
"sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf",
"sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293",
"sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b",
"sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57",
"sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b",
"sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4",
"sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07",
"sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba",
"sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9",
"sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287",
"sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513",
"sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0",
"sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782",
"sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0",
"sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92",
"sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f",
"sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2",
"sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc",
"sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1",
"sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c",
"sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86",
"sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4",
"sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c",
"sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34",
"sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b",
"sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d",
"sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c",
"sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb",
"sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7",
"sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737",
"sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3",
"sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d",
"sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358",
"sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53",
"sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78",
"sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803",
"sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a",
"sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f",
"sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174",
"sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"
],
"version": "==6.0"
},
"requests": {
"hashes": [
"sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa",
"sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"
"sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f",
"sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"
],
"index": "pypi",
"version": "==2.28.2"
"version": "==2.31.0"
},
"s3transfer": {
"hashes": [
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
"sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346",
"sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"
],
"markers": "python_version >= '3.7'",
"version": "==0.6.0"
"version": "==0.6.1"
},
"setuptools": {
"hashes": [
"sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077",
"sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"
"sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f",
"sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"
],
"markers": "python_version >= '3.7'",
"version": "==67.6.0"
"version": "==67.8.0"
},
"six": {
"hashes": [
@ -626,82 +755,192 @@
},
"sqlalchemy": {
"hashes": [
"sha256:1df00f280fcf7628379c6838d47ac6abd2319848cb02984af313de9243994db8",
"sha256:1fd154847f2c77128e16757e3fd2028151aa8208dd3b9a5978918ea786a15312",
"sha256:20f36bff3b6c9fa94e40114fda4dc5048d40fd665390f5547b456a28e8059ee8",
"sha256:224c817e880359d344a462fc4dd94a233804f371aa290b024b6b976a2f5ade36",
"sha256:2ad44f45526411bebbf427cf858955a35f3a6bfd7db8f4314b12da4c0d1a4fd2",
"sha256:2c4c64f321080c83a3f0eed11cc9b73fe2a574f6b8339c402861274165c24cf6",
"sha256:3625a52fae744cff6f9beb6ed0775468b9eb7e6e8f6730676dfc49aa77d98b4e",
"sha256:3be54b3825512b3de5698ae04bf4aad6ea60442ac0f6b91ee4b8fa4db5c2dccd",
"sha256:4100c80070a66b042f1010b29b29a88d1d151c27a5e522c95ec07518b361a7a3",
"sha256:47e96be3e8c9c0f2c71ec87599be4bb8409d61841b66964a36b2447bec510b3b",
"sha256:483712fce53e2f7ec95ed7d106cd463f9fc122c28a7df4aaf2bc873d0d2a901f",
"sha256:48824b989a0e4340cd099dd4539702ddb1a5ce449f8a7355124e40a4935a95fa",
"sha256:4d653962da384a1d99795dbd8aac4a7516071b2f2984ed2aa25545fae670b808",
"sha256:5b067b2eaf3d97a49f3f6217981efa7b45d5726c2142f103712b020dd250fd98",
"sha256:5c35175b74cbcfe9af077bd13e87cfab13239e075c0e1e920095082f9377f0ed",
"sha256:61abff42e44e5daf17372cb8baa90e970dc647fc5f747e2caa9f9768acf17be8",
"sha256:6987f658389ad8bb6257db91551e7fde3e904974eef6f323856260907ef311d7",
"sha256:709f1ecb5dcea59f36fa0f485e09e41ff313b2d62c83a6f99b36870b0d6e42fa",
"sha256:7635cd38e3ea8522729b14451157104fce2117c44e7ba6a14684ed153d71b567",
"sha256:778db814cc21eff200c8bd42b4ffe976fa3378d10fb84d2c164d3c6a30bb38ee",
"sha256:81d4fc8f5c966677a3a2f39eb8e496442269d8c7d285b28145f7745fcc089d63",
"sha256:82691d3539023c3cee5ae055c47bf873728cd6b33bfaa7b916bea5a99b92f700",
"sha256:8ef7c56c74f4420b2c4a148d2531ba7f99b946cbf438a2bbcb2435fb4938a08d",
"sha256:9310666251385e4374c6f0bae6d69e62bc422021298ceb8669bf6ff56957ff37",
"sha256:ac6274dd530b684cca8cbb774e348afac6846f15d1694a56954413be6e2e8dcd",
"sha256:b7be0e6a4061d28b66ca4b4eb24558dd8c6386d3bcd2d6d7ef247be27cf1281b",
"sha256:bea2c1341abe9bc6f30071b8ada1a3c44f24ec0fe1b9418e9c1112ed32057c9e",
"sha256:bfcadfb8f0a9d26a76a5e2488cedd2e7cf8e70fe76d58aeb1c85eb83b33cbc5c",
"sha256:bfce790746d059af6d0bc68b578ba20d50a63c71a3db16edce7aa8eccdd73796",
"sha256:bfde1d7cf8b9aa6bbd0d53946cd508d76db7689afd442e2289642cdc8908b7b7",
"sha256:c343f0b546495f5d7a239c70bf50a99a48d7321c165b82afafa8483b9ebebf6e",
"sha256:c5d754665edea1ecdc79e3023659cb5594372e10776f3b3734d75c2c3ce95013",
"sha256:c76caced0c8e9129810895f71954c72f478e30bea7d0bba7130bade396be5048",
"sha256:ca147d9cde38b481085408e1d4277ee834cb88bcc31bc01933bc6513340071bc",
"sha256:d7bd001a40997f0c9a9ac10a57663a9397959966a5a365bb24a4d1a17aa60175",
"sha256:db91fe985f2264ab49b3450ab7e2a59c34f7eaf3bf283d6b9e2f9ee02b29e533",
"sha256:e0e270a4f5b42c67362d9c6af648cb86f6a00b20767553cfd734c914e1e2a5e0",
"sha256:ed714b864349704a7a719ec7199eec3f9cd15c190ecf6e10c34b5a0c549c5c18",
"sha256:edc16c8e24605d0a7925afaf99dbcbdc3f98a2cdda4622f1ea34482cb3b91940",
"sha256:f47709c98544384d390aed34046f0573df5725d22861c0cd0a5c151bc22eedff",
"sha256:ff10ad2d74a9a79c2984a2c709943e5362a1c898d8f3414815ea57515ae80c84"
"sha256:0db6734cb5644c55d0262a813b764c6e2cda1e66e939a488b3d6298cdc7344c2",
"sha256:0e4645b260cfe375a0603aa117f0a47680864cf37833129da870919e88b08d8f",
"sha256:131f0c894c6572cb1bdcf97c92d999d3128c4ff1ca13061296057072f61afe13",
"sha256:1e2caba78e7d1f5003e88817b7a1754d4e58f4a8f956dc423bf8e304c568ab09",
"sha256:2de1477af7f48c633b8ecb88245aedd811dca88e88aee9e9d787b388abe74c44",
"sha256:2f3b6c31b915159b96b68372212fa77f69230b0a32acab40cf539d2823954f5a",
"sha256:3ef876615ff4b53e2033022195830ec4941a6e21068611f8d77de60203b90a98",
"sha256:43e69c8c1cea0188b7094e22fb93ae1a1890aac748628b7e925024a206f75368",
"sha256:53081c6fce0d49bb36d05f12dc87e008c9b0df58a163b792c5fc4ac638925f98",
"sha256:5a934eff1a2882137be3384826f997db8441d43b61fda3094923e69fffe474be",
"sha256:5e8522b49e0e640287308b68f71cc338446bbe1c226c8f81743baa91b0246e92",
"sha256:61f2035dea56ff1a429077e481496f813378beb02b823d2e3e7eb05bc1a7a8ca",
"sha256:63ea36c08792a7a8a08958bc806ecff6b491386feeaf14607c3d9d2d9325e67f",
"sha256:6e85e315725807c127ad8ba3d628fdb861cf9ebfb0e10c39a97c01e257cdd71b",
"sha256:7641f6ed2682de84d77c4894cf2e43700f3cf7a729361d7f9cac98febf3d8614",
"sha256:7be04dbe3470fe8dd332fdb48c979887c381ef6c635eddf2dec43d2766111be4",
"sha256:81d867c1be5abd49f7e547c108391f371a9d980ba7ec34666c50d683f782b754",
"sha256:8544c6e62eacb77d5106e2055ef10f2407fc0dbd547e879f8745b2032eefd2bc",
"sha256:8d3cbdb2f07fb0e4b897dc1df39166735e194fb946f28f26f4c9f9801c8b24f7",
"sha256:8d6ef848e5afcd1bda3e9a843751f845c0ca888b61e669237680e913d84ec206",
"sha256:8e2569dac4e3cb85365b91ab569d06a221e0e17e65ce59949d00c3958946282b",
"sha256:90d320fde566b864adbc19abb40ecb80f4e25d6f084639969bb972d5cca16858",
"sha256:91eb8f89fcce8f709f8a4d65d265bc48a80264ee14c7c9e955f3222f19b4b39c",
"sha256:a08a791c75d6154d46914d1e23bd81d9455f2950ec1de81f2723848c593d2c8b",
"sha256:a2e9f50a906d0b81292576a9fb458f8cace904c81a67088f4a2ca9ff2856f55d",
"sha256:a5a2856e12cf5f54301ddf043bcbf0552561d61555e1bcf348b63f42b8e1eec2",
"sha256:b2801f85c5c0293aa710f8aa5262c707a83c1c203962ae5a22b4d9095e71aa9d",
"sha256:b72f4e4def50414164a1d899f2ce4e782a029fad0ed5585981d1611e8ae29a74",
"sha256:bdaf89dd82f4a0e1b8b5ffc9cdc0c9551be6175f7eee5af6a838e92ba2e57100",
"sha256:c5e333b81fe10d14efebd4e9429b7bb865ed9463ca8bef07a7136dfa1fd4a37b",
"sha256:ce1fc3f64fd42d5f763d6b83651471f32920338a1ba107a3186211474861af57",
"sha256:d0c96592f54edd571e00ba6b1ed5df8263328ca1da9e78088c0ebc93c2e6562c",
"sha256:dc97238fa44be86971270943a0c21c19ce18b8d1596919048e57912e8abc02cc",
"sha256:e19546924f0cf2ec930d1faf318b7365e5827276410a513340f31a2b423e96a4",
"sha256:f2938edc512dd1fa48653e14c1655ab46144d4450f0e6b33da7acd8ba77fbfd7",
"sha256:f387b496a4c9474d8580195bb2660264a3f295a04d3a9d00f4fa15e9e597427e",
"sha256:f409f35a0330ab0cb18ece736b86d8b8233c64f4461fcb10993f67afc0ac7e5a",
"sha256:f662cf69484c59f8a3435902c40dfc34d86050bdb15e23d437074ce9f153306b",
"sha256:fbcc51fdbc89fafe4f4fe66f59372a8be88ded04de34ef438ab04f980beb12d4",
"sha256:fc1dae11bd5167f9eb53b3ccad24a79813004612141e76de21cf4c028dc30b34",
"sha256:ff6496ad5e9dc8baeb93a151cc2f599d01e5f8928a2aaf0b09a06428fdbaf553"
],
"index": "pypi",
"version": "==2.0.6"
"version": "==2.0.16"
},
"starlette": {
"hashes": [
"sha256:41da799057ea8620e4667a3e69a5b1923ebd32b1819c8fa75634bbe8d8bea9bd",
"sha256:e87fce5d7cbdde34b76f0ac69013fd9d190d581d80681493016666e6f96c6d5e"
"sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75",
"sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"
],
"markers": "python_version >= '3.7'",
"version": "==0.26.1"
"version": "==0.27.0"
},
"typing-extensions": {
"hashes": [
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
"sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26",
"sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"
],
"markers": "python_version >= '3.7'",
"version": "==4.5.0"
"version": "==4.6.3"
},
"tzdata": {
"hashes": [
"sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a",
"sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"
],
"markers": "python_version >= '2'",
"version": "==2023.3"
},
"urllib3": {
"hashes": [
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305",
"sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"
"sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f",
"sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.26.15"
"version": "==1.26.16"
},
"uvicorn": {
"hashes": [
"sha256:8635a388062222082f4b06225b867b74a7e4ef942124453d4d1d1a5cb3750932",
"sha256:e69e955cb621ae7b75f5590a814a4fcbfb14cb8f44a36dfe3c5c75ab8aee3ad5"
"sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8",
"sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"
],
"index": "pypi",
"version": "==0.21.0"
"version": "==0.22.0"
},
"watchfiles": {
"hashes": [
"sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911",
"sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda",
"sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154",
"sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af",
"sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d",
"sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c",
"sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48",
"sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c",
"sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545",
"sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e",
"sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120",
"sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7",
"sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8",
"sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc",
"sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056",
"sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193",
"sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3",
"sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf",
"sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79",
"sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1",
"sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b",
"sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"
],
"version": "==0.19.0"
},
"websockets": {
"hashes": [
"sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd",
"sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f",
"sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998",
"sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82",
"sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788",
"sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa",
"sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f",
"sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4",
"sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7",
"sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f",
"sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd",
"sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69",
"sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb",
"sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b",
"sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016",
"sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac",
"sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4",
"sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb",
"sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99",
"sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e",
"sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54",
"sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf",
"sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007",
"sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3",
"sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6",
"sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86",
"sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1",
"sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61",
"sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11",
"sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8",
"sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f",
"sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931",
"sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526",
"sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016",
"sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae",
"sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd",
"sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b",
"sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311",
"sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af",
"sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152",
"sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288",
"sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de",
"sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97",
"sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d",
"sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d",
"sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca",
"sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0",
"sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9",
"sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b",
"sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e",
"sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128",
"sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d",
"sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c",
"sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5",
"sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6",
"sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b",
"sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b",
"sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280",
"sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c",
"sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c",
"sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f",
"sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20",
"sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8",
"sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb",
"sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602",
"sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf",
"sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0",
"sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74",
"sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0",
"sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"
],
"version": "==11.0.3"
},
"xlrd": {
"hashes": [

View File

@ -23,7 +23,7 @@
- Merck_NewDWH開発2021のWiki、[Python環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照
- 「Pipenvの導入」までを行っておくこと
- 構築完了後、プロジェクト配下で以下のコマンドを実行し、Pythonの仮想環境を作成する
- `pipenv install --python <pyenvでインストールしたpythonバージョン>`
- `pipenv install --python <pyenvでインストールしたpythonバージョン> --dev`
- この手順で出力される仮想環境のパスは、後述するVSCodeの設定手順で使用するため、控えておく
- MySQLの環境構築

View File

@ -7,9 +7,9 @@ from src.system_var import environment
class CognitoClient(AWSAPIClient):
def __init__(self) -> None:
self.__client = boto3.client('cognito-idp')
def login_by_user_password_flow(self, username: str, password: str, secret_hash: str):
auth_response = self.__client.admin_initiate_auth(
UserPoolId=environment.COGNITO_USER_POOL_ID,
ClientId=environment.COGNITO_CLIENT_ID,
@ -21,5 +21,5 @@ class CognitoClient(AWSAPIClient):
},
)
authentication_result = auth_response['AuthenticationResult']
return authentication_result['IdToken'], authentication_result['RefreshToken'],

View File

@ -7,16 +7,16 @@ from src.aws.aws_api_client import AWSAPIClient
class S3Client(AWSAPIClient):
__s3_client = boto3.client('s3')
def upload_file(self, local_file_path: str, bucket_name: str, file_key: str):
self.__s3_client.upload_file(
local_file_path,
Bucket=bucket_name,
Key=file_key
)
local_file_path,
Bucket=bucket_name,
Key=file_key
)
def generate_presigned_url(self, bucket_name: str, file_key: str, download_filename: str=''):
# presigned_urlを生成
def generate_presigned_url(self, bucket_name: str, file_key: str, download_filename: str = ''):
# presigned_urlを生成
presigned_url = self.__s3_client.generate_presigned_url(
'get_object',
Params={
@ -28,5 +28,5 @@ class S3Client(AWSAPIClient):
# 有効期限20分
ExpiresIn=1200
)
return presigned_url

View File

@ -1,10 +1,10 @@
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.exceptions import HTTPException
from starlette import status
from src.depends.services import get_service
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.view.bio_view_model import BioViewModel
@ -17,20 +17,25 @@ from src.templates import templates
router = APIRouter()
router.route_class = AuthenticatedRoute
logger = get_logger('生物由来参照')
#########################
# Views #
#########################
@router.get('/BioSearchList')
def bio_view(
request: Request,
batch_status_service:BatchStatusService=Depends(get_service(BatchStatusService)),
bio_service: BioViewService=Depends(get_service(BioViewService))
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
bio_service: BioViewService = Depends(get_service(BioViewService))
):
session: UserSession = request.session
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BATCH_PROCESSING)
logger.debug(f'UserId: {session.user_id}')
# 検索項目の取得
bio = bio_service.prepare_bio_view(session)
# セッション書き換え
@ -50,14 +55,14 @@ def bio_view(
)
return templates_response
@router.post('/BioSearchList')
def search_bio(
request: Request,
bio_form: Optional[BioModel] = Depends(BioModel.as_form),
bio_service: BioViewService=Depends(get_service(BioViewService)),
batch_status_service:BatchStatusService=Depends(get_service(BatchStatusService))
bio_service: BioViewService = Depends(get_service(BioViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
# error_log(date("Y/m/d H:i:s") . " [INFO] UserId:" . $UserId . "\r\n", 3, "$execLog");
session: UserSession = request.session
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():

View File

@ -2,96 +2,78 @@
from datetime import datetime
from typing import Union
import pandas as pd
from fastapi import APIRouter, Depends, HTTPException
from fastapi.exceptions import HTTPException
from fastapi.responses import JSONResponse
from starlette import status
from src.depends.auth import verify_session
from src.depends.services import get_service
from src.error.exceptions import DBException
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.request.bio_download import BioDownloadModel
from src.services.batch_status_service import BatchStatusService
from src.services.bio_view_service import BioViewService
from src.services.session_service import set_session
from src.system_var import constants
from src.system_var import constants, environment
logger = get_logger('生物由来参照')
router = APIRouter()
#########################
# APIs #
#########################
@router.post('/download')
async def download_bio_data(
search_param: BioModel=Depends(BioModel.as_body),
download_param: BioDownloadModel=Depends(BioDownloadModel.as_body),
search_param: BioModel = Depends(BioModel.as_body),
download_param: BioDownloadModel = Depends(BioDownloadModel.as_body),
bio_service: BioViewService = Depends(get_service(BioViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
session: Union[UserSession, None]=Depends(verify_session)
):
session: Union[UserSession, None] = Depends(verify_session)
):
# 通常のビューとはルーティングの扱いを変えるために、個別のルーターで登録する
# error_log(date("Y/m/d H:i:s") . " [INFO] getBioData start" . "\r\n", 3, "$execLog");
# 改修後のパラメータを打ち出すようにする
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] param:szConditions=" . htmlspecialchars($_POST["szConditions"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] param:pageNum=" . htmlspecialchars($_POST["pageNum"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] szUser=" . htmlspecialchars($_POST["szUser"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] szfilename=" . htmlspecialchars($_POST["szfilename"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] extension=" . htmlspecialchars($_POST["extension"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] sql=" . htmlspecialchars($_POST["sql"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] arrayPrepare=" . $_POST["arrayPrepare"] . "\r\n", 3, "$execLog");
logger.info('生物由来データダウンロード開始')
logger.info(f'ユーザーID: {download_param.user_id}')
logger.info(f'拡張子: {download_param.ext}')
# ファイル名に使用するタイムスタンプを初期化しておく
now = datetime.now()
current_timestamp = datetime.now()
# 出力ファイル名
download_file_name = f'Result_{download_param.user_id}_{current_timestamp:%Y%m%d%H%M%S%f}.{download_param.ext}'
if session is None:
return {'status': 'session_expired'}
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
return {'status': 'batch_processing'}
try:
# 生物由来データを検索
search_result_df = bio_service.search_download_bio_data(search_param)
except DBException as e:
# error_log(date("Y/m/d H:i:s") . " [ERROR] " . "\r\n", 3, "$execLog");
print('DB Error', e.args)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'db_error', 'message': e.args}
)
# 生物由来データを検索
# 検索に使用したクエリも取得
search_result_df, query = _search_bio_data(bio_service, search_param, download_param.user_id)
# アクセスログを記録
bio_service.write_access_log(query, search_param, download_param.user_id, current_timestamp, download_file_name)
if search_result_df.size < 1:
# 検索結果が0件の場合、download_urlを返さない
print('Bio data not found')
logger.info('検索結果が0件です')
return {'status': 'ok', 'download_url': None}
# ファイルに打ち出すカラムを抽出
extract_df = search_result_df[constants.BIO_EXTRACT_COLUMNS]
# TODO: SQLクエリを修正するため、この処理は不要になる
extract_df = _extract_output_df(search_result_df)
# 値を変換
# データ種別の正式名を設定
extract_df.loc[:, 'slip_org_kbn'] = extract_df['slip_org_kbn'].apply(lambda key: constants.SLIP_ORG_KBN_FULL_NAME.get(key))
# データ区分の区分の日本語名を設定
extract_df.loc[:, 'data_kbn'] = extract_df['data_kbn'].apply(lambda key: constants.DATA_KBN_JP_NAME.get(key))
# ロット番号エラーフラグの日本語名を設定
extract_df.loc[:, 'lot_no_err_flg'] = extract_df['lot_no_err_flg'].apply(lambda key: constants.LOT_NO_ERR_FLG_JP_NAME.get(key))
# 訂正前伝票管理番号がセットされているときのみ修正日時、修正者、エラー詳細種別をセット
extract_df.loc[:, 'ins_dt'] = extract_df['bef_slip_mgt_no'].apply(lambda bef_slip_mgt_no:extract_df['ins_dt'] if bef_slip_mgt_no is not None else '')
extract_df.loc[:, 'ins_usr'] = extract_df['bef_slip_mgt_no'].apply(lambda bef_slip_mgt_no:extract_df['ins_usr'] if bef_slip_mgt_no is not None else '')
# 種別によって出力を変える
local_file_path = ''
if download_param.kind == 'xlsx':
# error_log(date("Y/m/d H:i:s") . " [INFO] 今回はExcelファイルに出力する" . "\r\n", 3, "$execLog");
local_file_path = bio_service.write_excel_file(extract_df, download_param.user_id, timestamp=now)
elif download_param.kind == 'csv':
# error_log(date("Y/m/d H:i:s") . " [INFO] 今回はCSVファイルに出力する" . "\r\n", 3, "$execLog");
local_file_path = bio_service.write_csv_file(extract_df, download_param.user_id, header=constants.BIO_CSV_HEADER, timestamp=now)
# ファイルを書き出し(Excel or CSV)
local_file_path = _write_bio_data_to_file(bio_service, download_param, extract_df, download_file_name)
# ローカルファイルからS3にアップロードし、ダウンロード用URLを取得する
try:
bio_service.upload_bio_data_file(local_file_path)
download_file_url = bio_service.generate_download_file_url(local_file_path, download_param.user_id, download_param.kind)
download_file_url = bio_service.generate_download_file_url(
local_file_path, download_param.user_id, download_param.ext)
except Exception as e:
print('S3 access error', e.args)
logger.exception(f'S3 アクセスエラー{e}')
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'aws_error', 'message': e.args}
@ -114,8 +96,62 @@ async def download_bio_data(
json_response.set_cookie(
key='session',
value=session.session_key,
max_age=20*60,
max_age=environment.SESSION_EXPIRE_MINUTE * 60, # cookieの有効期限は秒数指定なので、60秒をかける
secure=True,
httponly=True
)
return json_response
def _search_bio_data(bio_service: BioViewService, search_param: BioModel, user_id: str) -> pd.DataFrame:
try:
# 生物由来データを検索
search_result_df, query = bio_service.search_download_bio_data(search_param)
except DBException as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'db_error', 'message': e.args}
)
return search_result_df, query
def _extract_output_df(search_result_df: pd.DataFrame) -> pd.DataFrame:
extract_df = search_result_df[constants.BIO_EXTRACT_COLUMNS]
# 値を変換
# データ種別の正式名を設定
extract_df.loc[:, 'slip_org_kbn'] = extract_df['slip_org_kbn'].apply(
lambda key: constants.SLIP_ORG_KBN_FULL_NAME.get(key))
# データ区分の区分の日本語名を設定
extract_df.loc[:, 'data_kbn'] = extract_df['data_kbn'].apply(lambda key: constants.DATA_KBN_JP_NAME.get(key))
# ロット番号エラーフラグの日本語名を設定
extract_df.loc[:, 'lot_num_err_flg'] = extract_df['lot_num_err_flg'].apply(
lambda key: constants.LOT_NO_ERR_FLG_JP_NAME.get(key))
# 訂正前伝票管理番号がセットされているときのみ修正日時、修正者、エラー詳細種別をセット
extract_df.loc[:, 'ins_dt'] = extract_df['bef_slip_mgt_num'].apply(
lambda bef_slip_mgt_num: extract_df['ins_dt'] if bef_slip_mgt_num is not None else '')
extract_df.loc[:, 'ins_usr'] = extract_df['bef_slip_mgt_num'].apply(
lambda bef_slip_mgt_num: extract_df['ins_usr'] if bef_slip_mgt_num is not None else '')
return extract_df
def _write_bio_data_to_file(
bio_service: BioViewService,
download_param: BioDownloadModel,
df: pd.DataFrame,
download_file_name: str
) -> str:
# 種別によって出力を変える
local_file_path = ''
if download_param.ext == 'xlsx':
logger.info('今回はExcelファイルに出力する')
local_file_path = bio_service.write_excel_file(
df, download_param.user_id, download_file_name=download_file_name)
elif download_param.ext == 'csv':
logger.info('今回はCSVファイルに出力する')
local_file_path = bio_service.write_csv_file(
df, download_param.user_id, header=constants.BIO_CSV_HEADER, download_file_name=download_file_name)
return local_file_path

View File

@ -5,6 +5,8 @@ router = APIRouter()
#########################
# Views #
#########################
@router.get('/')
def healthcheck():
return {'status': 'OK'}

View File

@ -1,4 +1,3 @@
import os.path as path
import secrets
import urllib.parse as parse
from typing import Union
@ -10,6 +9,7 @@ from starlette import status
from src.depends.auth import code_security
from src.depends.services import get_service
from src.error.exceptions import JWTTokenVerifyException, NotAuthorizeException
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.request.login import LoginModel
from src.model.view.mainte_login_view_model import MainteLoginViewModel
@ -22,9 +22,13 @@ from src.templates import templates
router = APIRouter()
router.route_class = AfterSetCookieSessionRoute
logger = get_logger('ログイン')
#########################
# Views #
#########################
@router.get('/userlogin')
def login_user_redirect_view():
auth_query_string = parse.urlencode(
@ -39,6 +43,7 @@ def login_user_redirect_view():
return RedirectResponse(url=authorize_endpoint_url, status_code=status.HTTP_303_SEE_OTHER)
@router.get('/maintlogin')
def login_maintenance_view(request: Request):
mainte_login = MainteLoginViewModel()
@ -53,30 +58,40 @@ def login_maintenance_view(request: Request):
#########################
# APIs #
#########################
@router.post('/maintlogin')
def login(
response: Response,
request: LoginModel = Depends(LoginModel.as_form),
login_service: LoginService = Depends(get_service(LoginService))
):
response: Response,
request: LoginModel = Depends(LoginModel.as_form),
login_service: LoginService = Depends(get_service(LoginService))
):
try:
jwt_token = login_service.login(request.username, request.password)
except NotAuthorizeException as e:
print(e)
logger.info(f'ログイン失敗:{e}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
except JWTTokenVerifyException as e:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED)
logger.info(f'ログイン失敗:{e}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
verified_token = jwt_token.verify_token()
# 普通の認証だと、`cognito:username`に入る。
user_id = verified_token.user_id
user_record = login_service.logged_in_user(user_id)
# ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする
if user_record is None:
logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# ユーザーが有効ではない場合、ログアウトにリダイレクトする
if not user_record.is_enable_user():
logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# メンテユーザーではない場合、ログアウトにリダイレクトする
if user_record is None or not user_record.is_maintenance_user():
logger.info(f'メンテナンスユーザーではない: {user_id}, メンテナンスユーザーフラグ: {user_record.mntuser_flg}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
logger.info(f'メンテナンスユーザー認証成功: {user_id}')
# CSRFトークンを生成
csrf_token = secrets.token_urlsafe(32)
# DynamoDBにトークンIDを設定する
@ -92,9 +107,9 @@ def login(
user_flg=user_record.mntuser_flg
)
session_key = set_session(session_model)
response = RedirectResponse(
url='/menu',
url='/menu/',
status_code=status.HTTP_303_SEE_OTHER,
headers={'session_key': session_key}
)
@ -103,9 +118,9 @@ def login(
@router.get('/authorize')
def sso_authorize(
code:Union[str, None]=Depends(code_security),
login_service: LoginService=Depends(get_service(LoginService))
) -> Response:
code: Union[str, None] = Depends(code_security),
login_service: LoginService = Depends(get_service(LoginService))
) -> Response:
if not code:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_NOT_LOGIN)
@ -115,18 +130,26 @@ def sso_authorize(
# トークン検証
verified_token = jwt_token.verify_token()
except JWTTokenVerifyException as e:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED)
logger.info(f'SSOログイン失敗{e}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
# トークンからユーザーIDを取得
user_id = verified_token.user_id
user_record = login_service.logged_in_user(user_id)
# ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする
if user_record is None:
logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# ユーザーが有効ではない場合、ログアウトにリダイレクトする
if not user_record.is_enable_user():
logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# Merckユーザーではない場合、ログアウトにリダイレクトする
if user_record is None or not user_record.is_groupware_user():
logger.info(f'メンテナンスユーザーではない: {user_id}, メンテナンスユーザーフラグ: {user_record.mntuser_flg}')
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
logger.info(f'顧客ユーザー認証成功: {user_id}')
# CSRFトークンを生成
csrf_token = secrets.token_urlsafe(32)
# DynamoDBにトークンIDを設定する
@ -143,7 +166,7 @@ def sso_authorize(
)
session_key = set_session(session_model)
response = RedirectResponse(
url='/menu',
url='/menu/',
status_code=status.HTTP_303_SEE_OTHER,
headers={'session_key': session_key}
)

View File

@ -14,17 +14,24 @@ router = APIRouter()
#########################
# Views #
#########################
@router.get('/', response_class=HTMLResponse)
def logout_view(
request: Request,
reason: Optional[str] = None,
session: Union[UserSession, None]=Depends(verify_session)
):
request: Request,
reason: Optional[str] = None,
session: Union[UserSession, None] = Depends(verify_session)
):
# どういうルートでログインしたかを判断するため、refererを取得
referer = request.headers.get('referer', '')
redirect_to = '/login/userlogin'
link_text = 'MeDaCA機能メニューへ'
if session is not None and session.user_flg == '1':
# セッションが切れておらず、メンテユーザである、またはメンテログイン画面から遷移した場合、メンテログイン画面に戻す
if (session is not None and session.user_flg == '1') or referer.endswith('maintlogin'):
redirect_to = '/login/maintlogin'
link_text = 'Login画面に戻る'
logout = LogoutViewModel()
logout.redirect_to = redirect_to
logout.reason = constants.LOGOUT_REASON_MESSAGE_MAP.get(reason, '')

View File

@ -0,0 +1,185 @@
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import HTMLResponse
from starlette import status
from src.depends.services import get_service
from src.model.internal.session import UserSession
from src.model.view.inst_emp_csv_download_view_model import \
InstEmpCsvDownloadViewModel
from src.model.view.inst_emp_csv_upload_view_model import \
InstEmpCsvUploadViewModel
from src.model.view.master_mainte_menu_view_model import \
MasterMainteMenuViewModel
from src.model.view.table_override_view_model import TableOverrideViewModel
from src.router.session_router import AuthenticatedRoute
from src.services.batch_status_service import BatchStatusService
from src.services.session_service import set_session
from src.system_var import constants
from src.templates import templates
router = APIRouter()
router.route_class = AuthenticatedRoute
#########################
# Views #
#########################
@router.get('/masterMainteMenu', response_class=HTMLResponse)
def menu_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg != '1':
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
menu = MasterMainteMenuViewModel()
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
templates_response = templates.TemplateResponse(
'masterMainteMenu.html',
{
'request': request,
'menu': menu
},
headers={'session_key': session.session_key}
)
return templates_response
@router.get('/instEmpCsvUL', response_class=HTMLResponse)
def inst_emp_csv_upload_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg != '1':
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
view_model = InstEmpCsvUploadViewModel()
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
templates_response = templates.TemplateResponse(
'instEmpCsvUL.html',
{
'request': request,
'view': view_model
},
headers={'session_key': session.session_key}
)
return templates_response
@router.get('/instEmpCsvDL', response_class=HTMLResponse)
def inst_emp_csv_download_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg != '1':
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
view_model = InstEmpCsvDownloadViewModel()
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
templates_response = templates.TemplateResponse(
'instEmpCsvDL.html',
{
'request': request,
'view': view_model
},
headers={'session_key': session.session_key}
)
return templates_response
@router.get('/tableOverride', response_class=HTMLResponse)
def table_override_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg != '1':
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
view_model = TableOverrideViewModel()
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
templates_response = templates.TemplateResponse(
'tableOverride.html',
{
'request': request,
'view': view_model
},
headers={'session_key': session.session_key}
)
return templates_response

View File

@ -2,6 +2,7 @@ from fastapi import APIRouter, Depends, Request
from fastapi.responses import HTMLResponse
from src.depends.services import get_service
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.view.menu_view_model import MenuViewModel
from src.model.view.user_view_model import UserViewModel
@ -10,22 +11,28 @@ from src.services.batch_status_service import BatchStatusService
from src.services.session_service import set_session
from src.templates import templates
logger = get_logger('MeDaCA機能メニュー')
router = APIRouter()
router.route_class = AuthenticatedRoute
#########################
# Views #
#########################
@router.get('/', response_class=HTMLResponse)
def menu_view(
request: Request,
batch_status_service:BatchStatusService=Depends(get_service(BatchStatusService))
):
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
logger.info(f'UserID: {session.user_id}')
# 日付マスターからバッチ情報を取得する
hdke_tbl_record = batch_status_service.hdke_table_record
batch_status = hdke_tbl_record.bch_actf
dump_status = hdke_tbl_record.dump_sts_kbn
user = UserViewModel(
doc_flg=session.doc_flg,
inst_flg=session.inst_flg,
@ -34,6 +41,7 @@ def menu_view(
)
menu = MenuViewModel(
batch_status=batch_status,
dump_status=dump_status,
user_model=user
)
# セッション書き換え

View File

@ -0,0 +1,11 @@
from fastapi import APIRouter
from fastapi.responses import RedirectResponse
from starlette import status
router = APIRouter()
@router.get('/')
def redirect_to_user_login():
# ルートパスへのアクセスは、顧客ユーザーログイン画面にリダイレクトさせる
return RedirectResponse(url='/login/userlogin', status_code=status.HTTP_303_SEE_OTHER)

View File

@ -0,0 +1,44 @@
from typing import Annotated
from fastapi import APIRouter, File, Form, Request, UploadFile
from src.templates import templates
router = APIRouter()
@router.get('/')
def get_view(request: Request):
return templates.TemplateResponse(
'sample_send_file.html',
{
'request': request
}
)
@router.post('/')
# file.readがCoroutineが返ってくるため、必ずasync関数にする
async def post_view(
# formからファイルを受け取る。formタグにenctype="multipart/form-data"を指定すること)
file: Annotated[UploadFile, File()],
message: str = Form()
):
# ファイルを読み込むCoroutineが取れるため、必ずawaitする
file_bytes = await file.read()
# 閉じとく
await file.close()
# 読み込んだファイルはbytesで返ってくるので、デコードする
file_content = file_bytes.decode()
print(file_content)
try:
return {
# ファイル名
"file_name": file.filename,
# ファイルのバイト数
"file_size": file.size,
# Content-Type
"file_content_type": file.content_type
}
except Exception:
return {'code': 'fail'}

View File

@ -0,0 +1,347 @@
from typing import Optional
from fastapi import APIRouter, Depends, Request
from src.depends.services import get_service
from src.model.internal.session import UserSession
from src.model.request.ultmarc_doctor import (UltmarcDoctorInfoModel,
UltmarcDoctorSearchModel)
from src.model.request.ultmarc_inst import (UltmarcInstInfoModel,
UltmarcInstSearchModel)
from src.router.session_router import AuthenticatedRoute
from src.services.batch_status_service import BatchStatusService
from src.services.session_service import set_session
from src.services.ultmarc_view_service import UltmarcViewService
from src.templates import templates
router = APIRouter()
router.route_class = AuthenticatedRoute
#########################
# Views #
#########################
#########################
# アルトマーク施設検索 #
#########################
@router.get('/instSearch')
def ultmarc_inst_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
# 検索項目の取得(都道府県・施設区分)
ultmarc = ultmarc_service.prepare_ultmarc_inst_search_view()
ultmarc.is_batch_processing = is_batch_processing
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'instSearch.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
@router.post('/instSearch')
def search_inst(
request: Request,
ultmarc_inst_form: Optional[UltmarcInstSearchModel] = Depends(UltmarcInstSearchModel.as_form),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
# 施設データを検索
ultmarc_inst_data = ultmarc_service.search_inst_data(ultmarc_inst_form)
# 検索項目の取得(都道府県・施設区分)
ultmarc = ultmarc_service.prepare_ultmarc_inst_search_view()
ultmarc.is_batch_processing = is_batch_processing
ultmarc.inst_data = ultmarc_inst_data
# 画面表示用にエスケープを解除して返す
ultmarc.form_data = ultmarc_inst_form.unescape()
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'instSearch.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
#########################
# アルトマーク施設詳細 #
#########################
@router.get('/instInfo')
def ultmarc_inst_info_view(
request: Request,
id: str,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
# 施設情報の取得
ultmarc = ultmarc_service.prepare_ultmarc_inst_info_view(id)
# バッチ起動判定
ultmarc.is_batch_processing = is_batch_processing
# inst_id
ultmarc.inst_id = id
# ページ総数(件数)
ultmarc.post_cnt = 1
# ページ数表示するページNo
ultmarc.page_num = 0
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'instInfo.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
@router.post('/instInfo')
def ultmarc_inst_info_search(
request: Request,
ultmarc_inst_form: Optional[UltmarcInstInfoModel] = Depends(UltmarcInstInfoModel.as_form),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
inst_id = ultmarc_inst_form.inst_id.split(',')
# 施設情報の取得
ultmarc = ultmarc_service.prepare_ultmarc_inst_info_view(inst_id[ultmarc_inst_form.page_num])
# バッチ起動判定の取得
ultmarc.is_batch_processing = is_batch_processing
# inst_id
ultmarc.inst_id = ultmarc_inst_form.inst_id
# ページ総数(件数)
ultmarc.post_cnt = len(inst_id)
# ページ数表示するページNo
ultmarc.page_num = ultmarc_inst_form.page_num
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'instInfo.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
#########################
# アルトマーク医師検索 #
#########################
@router.get('/docSearch')
def ultmarc_doctor_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
# 検索項目の取得(都道府県)
ultmarc = ultmarc_service.prepare_ultmarc_doctor_search_view()
ultmarc.is_batch_processing = is_batch_processing
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'docSearch.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
@router.post('/docSearch')
def search_doc(
request: Request,
ultmarc_doctor_form: Optional[UltmarcDoctorSearchModel] = Depends(UltmarcDoctorSearchModel.as_form),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
# 医師データを検索
ultmarc_doctor_data = ultmarc_service.search_doctor_data(ultmarc_doctor_form)
# 検索項目などのデータを取得
ultmarc = ultmarc_service.prepare_ultmarc_doctor_search_view()
ultmarc.is_batch_processing = is_batch_processing
ultmarc.doctor_data = ultmarc_doctor_data
# 画面表示用にエスケープを解除して返す
ultmarc.form_data = ultmarc_doctor_form.unescape()
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'docSearch.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
#########################
# アルトマーク医師詳細 #
#########################
@router.get('/docInfo')
def ultmarc_doctor_info_view(
request: Request,
id: str,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
# 医師情報の取得
ultmarc = ultmarc_service.prepare_ultmarc_doctor_info_view(id)
# バッチ起動判定の取得
ultmarc.is_batch_processing = is_batch_processing
# doc_id
ultmarc.doc_id = id
# ページ総数(件数)
ultmarc.post_cnt = 1
# ページ数表示するページNo
ultmarc.page_num = 0
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'docInfo.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response
@router.post('/docInfo')
def ultmarc_doctor_info_search(
request: Request,
ultmarc_doctor_form: Optional[UltmarcDoctorInfoModel] = Depends(UltmarcDoctorInfoModel.as_form),
ultmarc_service: UltmarcViewService = Depends(get_service(UltmarcViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# バッチ処理中ステータスを取得
is_batch_processing = batch_status_service.is_batch_processing()
doc_id = ultmarc_doctor_form.doc_id.split(',')
# 医師情報の取得
ultmarc = ultmarc_service.prepare_ultmarc_doctor_info_view(doc_id[ultmarc_doctor_form.page_num])
# バッチ起動判定の取得
ultmarc.is_batch_processing = is_batch_processing
# doc_id
ultmarc.doc_id = ultmarc_doctor_form.doc_id
# ページ総数(件数)
ultmarc.post_cnt = len(doc_id)
# ページ数表示するページNo
ultmarc.page_num = ultmarc_doctor_form.page_num
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'docInfo.html', {
'request': request,
'ultmarc': ultmarc,
},
headers={'session_key': session_key}
)
return templates_response

View File

@ -18,4 +18,4 @@ def create_stop_app_handler(app: FastAPI) -> Callable:
def stop_app() -> None:
close_db(app)
return stop_app
return stop_app

View File

@ -77,7 +77,10 @@ class Database:
Raises:
DBException: 接続失敗
"""
self.__connection = self.__engine.connect()
try:
self.__connection = self.__engine.connect()
except Exception as e:
raise DBException(e)
def execute_select(self, select_query: str, parameters=None) -> list[dict]:
"""SELECTクエリを実行します。

View File

@ -16,13 +16,14 @@ class SQLCondition:
self.column = column
self.operator = operator
self.param = param
self.literal=literal
self.literal = literal
def apply(self):
# literalがFalseならプレースホルダー。Trueだったならは固定値。
param = f':{self.param}' if self.literal is False else self.param
return f' {self.column} {self.operator} {param}'
# 定数
EQ = '='
NE = '<>'
@ -32,4 +33,4 @@ GE = '>='
LE = '<='
LIKE = 'LIKE'
IS = 'IS'
IS_NOT = 'IS NOT'
IS_NOT = 'IS NOT'

View File

@ -5,11 +5,13 @@ from fastapi import Depends
from fastapi.security import APIKeyCookie, APIKeyQuery
from src.error.exceptions import JWTTokenVerifyException
from src.logging.get_logger import get_logger
from src.model.internal.jwt_token import JWTToken
from src.model.internal.session import UserSession
from src.services.session_service import get_session
from src.system_var import environment
logger = get_logger('認証チェック')
cookie_security = APIKeyCookie(name='session', auto_error=False)
code_security = APIKeyQuery(name='code', auto_error=False)
@ -17,32 +19,34 @@ code_security = APIKeyQuery(name='code', auto_error=False)
def get_current_session(session_key=Depends(cookie_security)):
if session_key is None:
return None
session = get_session(session_key)
# sessionが存在しない場合はNoneが返る
return session
def check_session_expired(session:Union[UserSession, None]=Depends(get_current_session)):
def check_session_expired(session: Union[UserSession, None] = Depends(get_current_session)):
"""セッションの最後にアクセスした時間が、セッション有効期限切れであるかどうかをチェックする"""
if session is None:
return None
last_access_time = session.last_access_time
session_expired_period = datetime.datetime.fromtimestamp(last_access_time) + datetime.timedelta(minutes=environment.SESSION_EXPIRE_MINUTE)
session_expired_period = datetime.datetime.fromtimestamp(
last_access_time) + datetime.timedelta(minutes=environment.SESSION_EXPIRE_MINUTE)
if session_expired_period < datetime.datetime.now():
return None
return session
def verify_session(session:Union[UserSession, None]=Depends(check_session_expired)):
def verify_session(session: Union[UserSession, None] = Depends(check_session_expired)):
if session is None:
return None
jwt_token = JWTToken(session.id_token, session.refresh_token)
try:
jwt_token.verify_token()
except JWTTokenVerifyException as e:
print(e)
logger.info(e)
return None
return session

View File

@ -14,4 +14,4 @@ def get_database(request: Request) -> Database:
def get_repository(Repo_type: Type[BaseRepository]) -> Callable:
def get_repo(db: Database = Depends(get_database)) -> Type[BaseRepository]:
return Repo_type(db)
return get_repo
return get_repo

View File

@ -1,7 +1,6 @@
from typing import Callable, Type
from fastapi import Depends
from starlette.requests import Request
from src.db.database import Database
from src.depends.database import get_database
@ -9,8 +8,8 @@ from src.services.base_service import BaseService
def get_service(Service_type: Type[BaseService]) -> Callable:
def get_service(db: Database=Depends(get_database)) -> Type[BaseService]:
def get_service(db: Database = Depends(get_database)) -> Type[BaseService]:
repositories = {key: repository(db) for key, repository in Service_type.REPOSITORIES.items()}
clients = {key: client() for key, client in Service_type.CLIENTS.items()}
return Service_type(repositories=repositories, clients=clients)
return get_service
return get_service

View File

@ -12,4 +12,4 @@ def http_exception_handler(request: Request, exc: HTTPException):
raise exc
error_detail = exc.detail if hasattr(exc, 'detail') else ''
reason = parse.quote(error_detail)
return RedirectResponse(f'/logout?reason={reason}', status_code=status.HTTP_303_SEE_OTHER)
return RedirectResponse(f'/logout/?reason={reason}', status_code=status.HTTP_303_SEE_OTHER)

View File

@ -7,18 +7,22 @@ class MeDaCaException(Exception):
"""Webアプリの共通例外"""
pass
class NotAuthorizeException(MeDaCaException):
"""認証失敗の例外"""
pass
class JWTTokenVerifyException(MeDaCaException):
"""トークン検証失敗の例外"""
pass
class DBException(MeDaCaException):
"""DB関連の例外"""
pass
class UnexpectedException(MeDaCaException):
"""予期しない例外"""

View File

@ -0,0 +1,37 @@
import logging
from src.system_var.environment import LOG_LEVEL
# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する
for name in ["boto3", "botocore", "s3transfer", "urllib3"]:
logging.getLogger(name).setLevel(logging.WARNING)
def get_logger(log_name: str) -> logging.Logger:
"""一意のログ出力モジュールを取得します。
Args:
log_name (str): ロガー名
Returns:
_type_: _description_
"""
logger = logging.getLogger(log_name)
level = logging.getLevelName(LOG_LEVEL)
if not isinstance(level, int):
level = logging.INFO
logger.setLevel(level)
if not logger.hasHandlers():
handler = logging.StreamHandler()
logger.addHandler(handler)
formatter = logging.Formatter(
'%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s',
'%Y-%m-%d %H:%M:%S'
)
for handler in logger.handlers:
handler.setFormatter(formatter)
return logger

View File

@ -5,7 +5,9 @@ from fastapi.staticfiles import StaticFiles
from starlette import status
import src.static as static
from src.controller import bio, bio_download, healthcheck, login, logout, menu
from src.controller import (bio, bio_download, healthcheck, login, logout,
master_mainte, menu, root, ultmarc)
from src.controller.sample_send_file import router as sample_router
from src.core import tasks
from src.error.exception_handler import http_exception_handler
from src.error.exceptions import UnexpectedException
@ -14,6 +16,8 @@ app = FastAPI()
# 静的ファイルをマウント
app.mount('/static', StaticFiles(directory=path.dirname(static.__file__)), name='static')
# ルートパス。顧客ログイン画面にリダイレクトさせる
app.include_router(root.router)
# ログイン関連のルーター
app.include_router(login.router, prefix='/login')
# ログアウト関連のルーター
@ -22,12 +26,19 @@ app.include_router(logout.router, prefix='/logout')
app.include_router(menu.router, prefix='/menu')
# 生物由来関連のルーター
app.include_router(bio.router, prefix='/bio')
# アルトマークデータ照会のルーター
app.include_router(ultmarc.router, prefix='/ultmarc')
# 生物由来のダウンロード用APIルーター。
# クライアントから非同期呼出しされるため、共通ルーターとは異なる扱いとする。
app.include_router(bio_download.router, prefix='/bio')
# マスタメンテ
app.include_router(master_mainte.router, prefix='/masterMainte')
# ヘルスチェック用のルーター
app.include_router(healthcheck.router, prefix='/healthcheck')
# サンプル実装、ファイル送信ルーター
app.include_router(sample_router, prefix='/sample')
# エラー発生時にログアウト画面に遷移させるハンドラー
app.add_exception_handler(status.HTTP_401_UNAUTHORIZED, http_exception_handler)
app.add_exception_handler(status.HTTP_403_FORBIDDEN, http_exception_handler)

View File

@ -15,34 +15,34 @@ class BioSalesViewModel(BaseDBModel):
rec_tran_kbn: Optional[str]
rev_hsdnymd_wrk: Optional[str]
rev_hsdnymd_srk: Optional[str]
rec_urag_no: Optional[str]
rec_comm_nm: Optional[str]
rec_nnskfcl_nm: Optional[str]
rec_nnsk_fcl_addr: Optional[str]
rec_urag_num: Optional[str]
rec_comm_name: Optional[str]
rec_nonyu_fcl_name: Optional[str]
rec_nonyu_fcl_addr: Optional[str]
rec_lot_num: Optional[str]
rec_amt: Optional[str]
rec_qty: Optional[str]
rec_ymd: Optional[str]
sale_data_cat: Optional[str]
slip_file_nm: Optional[str]
slip_mgt_no: Optional[str]
slip_file_name: Optional[str]
slip_mgt_num: Optional[str]
row_num: Optional[int]
hsdn_ymd: Optional[str]
exec_dt: Optional[str]
v_tran_cd: Optional[int]
tran_kbn_nm: Optional[str]
tran_kbn_name: Optional[str]
whs_org_cd: Optional[str]
v_whsorg_cd: Optional[str]
whs_org_nm: Optional[str]
whs_org_name: Optional[str]
whs_org_kn: Optional[str]
v_whs_cd: Optional[int]
whs_nm: Optional[str]
nnsk_cd: Optional[str]
whs_name: Optional[str]
nonyu_fcl_cd: Optional[str]
v_inst_cd: Optional[str]
v_inst_kn: Optional[str]
v_inst_nm: Optional[str]
v_inst_addr: Optional[str]
comm_cd: Optional[str]
comm_nm: Optional[str]
product_name: Optional[str]
whs_rep_comm_nm: Optional[str]
whs_rep_nnskfcl_nm: Optional[str]
whs_rep_nnsk_fcl_addr: Optional[str]
@ -53,7 +53,7 @@ class BioSalesViewModel(BaseDBModel):
fcl_exis_kbn: Optional[str]
amt: Optional[int]
slip_org_kbn: Optional[str]
bef_slip_mgt_no: Optional[str]
bef_slip_mgt_num: Optional[str]
lot_no_err_flg: Optional[str]
iko_flg: Optional[str]
kjyo_ym: Optional[str]

View File

@ -1,4 +1,3 @@
from datetime import datetime
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
@ -6,3 +5,4 @@ from src.model.db.base_db_model import BaseDBModel
class HdkeTblModel(BaseDBModel):
bch_actf: Optional[str]
dump_sts_kbn: Optional[str]

View File

@ -0,0 +1,8 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class InstDivMasterModel(BaseDBModel):
inst_div_cd: Optional[str]
inst_div_name: Optional[str]

View File

@ -4,4 +4,4 @@ from src.model.db.base_db_model import BaseDBModel
class PharmacyProductMasterModel(BaseDBModel):
mkr_cd_nm: Optional[str]
mkr_cd_name: Optional[str]

View File

@ -0,0 +1,8 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class PrefcMasterModel(BaseDBModel):
prefc_cd: Optional[str]
prefc_name: Optional[str]

View File

@ -0,0 +1,19 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcDoctorDBModel(BaseDBModel):
dcf_pcf_dr_cd: Optional[str]
dr_name: Optional[str]
form_inst_name_kanji: Optional[str]
dcf_dsf_inst_cd: Optional[str]
blng_sec_name: Optional[str]
trt_course_name: Optional[str]
form_post_name: Optional[str]
alma: Optional[str]
grad_y: Optional[str]
prefc_name: Optional[str]
blng_sec_cd: Optional[str]

View File

@ -0,0 +1,18 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcDoctorInfoDBModel(BaseDBModel):
dcf_pcf_dr_cd: Optional[str]
dr_name: Optional[str]
dr_name_kana: Optional[str]
sex: Optional[str]
birthday: Optional[str]
alma: Optional[str]
hometown: Optional[str]
grad_y: Optional[str]
drday_y: Optional[str]
estab_y: Optional[str]

View File

@ -0,0 +1,13 @@
from datetime import date
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class UltmarcDoctorWrkplaceDBModel(BaseDBModel):
dcf_dsf_inst_cd: Optional[str]
inst_name_kanji: Optional[str]
blng_sec_name: Optional[str]
univ_post_name: Optional[str]
post_name: Optional[str]
aply_start_ymd: Optional[date]

View File

@ -0,0 +1,9 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcDoctorWrkplaceCountDBModel(BaseDBModel):
count: Optional[int]

View File

@ -0,0 +1,14 @@
from datetime import date
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class UltmarcDoctorWrkplaceHisDBModel(BaseDBModel):
dcf_dsf_inst_cd: Optional[str]
inst_name_kanji: Optional[str]
blng_sec_name: Optional[str]
univ_post_name: Optional[str]
post_name: Optional[str]
aply_start_ymd: Optional[date]
aply_end_ymd: Optional[date]

View File

@ -0,0 +1,18 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcInstDBModel(BaseDBModel):
dcf_dsf_inst_cd: Optional[str]
abolish_ymd: Optional[str]
delete_sche_reason_cd: Optional[str]
form_inst_name_kanji: Optional[str]
inst_addr: Optional[str]
postal_number: Optional[str]
inst_phone_number: Optional[str]
inst_div_name: Optional[str]
hp_assrt_name: Optional[str]
prefc_name: Optional[str]

View File

@ -0,0 +1,56 @@
from datetime import datetime
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class UltmarcInstInfoDBModel(BaseDBModel):
dcf_dsf_inst_cd: Optional[str]
unconf_flg: Optional[str]
dup_opp_cd: Optional[str]
close_start_ym: Optional[str]
close_flg: Optional[str]
delete_sche_reason_cd: Optional[str]
abolish_ymd: Optional[str]
estab_sche_ym: Optional[str]
estab_sche_flg: Optional[str]
form_inst_name_kana: Optional[str]
form_inst_name_kanji: Optional[str]
inst_name_kana: Optional[str]
inst_name_kanji: Optional[str]
manage_cd: Optional[str]
postal_number: Optional[str]
inst_phone_number: Optional[str]
addr_unknown_reason_cd: Optional[str]
phone_number_non_flg: Optional[str]
inst_addr_kana: Optional[str]
inst_addr: Optional[str]
re_exam_cd: Optional[str]
rltd_univ_prnt_cd: Optional[str]
insp_item_micrb: Optional[str]
insp_item_serum: Optional[str]
insp_item_blood: Optional[str]
insp_item_patho: Optional[str]
insp_item_paras: Optional[str]
insp_item_biochem: Optional[str]
insp_item_ri: Optional[str]
prmit_bed_num_gen: Optional[str]
prmit_bed_num_rcup: Optional[str]
prmit_bed_num_mental: Optional[str]
prmit_bed_num_infection: Optional[str]
prmit_bed_num_tuber: Optional[str]
prmit_bed_num_other: Optional[str]
prmit_bed_num_sum: Optional[str]
ward_abolish_flg: Optional[str]
bed_num: Optional[str]
prmit_bed_maint_ymd: Optional[str]
inst_repre_cd: Optional[str]
inst_repre_kana: Optional[str]
inst_repre: Optional[str]
sys_update_date: Optional[datetime]
delete_sche_reason: Optional[str]
inst_div_name: Optional[str]
manage_name: Optional[str]
hp_assrt_name: Optional[str]
parent_name: Optional[str]
dcf_prnt_inst_cd: Optional[str]

View File

@ -0,0 +1,9 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcInstTrtCourseDBModel(BaseDBModel):
trt_course_name_abb: Optional[str]

View File

@ -0,0 +1,10 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcSosietyDBModel(BaseDBModel):
sosiety_cd: Optional[str]
sosiety_name: Optional[str]

View File

@ -0,0 +1,10 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcSpecialistLicenseDBModel(BaseDBModel):
specialist_cd: Optional[str]
specialist_license_name: Optional[str]

View File

@ -0,0 +1,9 @@
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
@sanitize
class UltmarcTrtCourseDBModel(BaseDBModel):
trt_course_name: Optional[str]

View File

@ -28,9 +28,9 @@ class UserMasterModel(BaseDBModel):
def is_enable_user(self):
return self.enabled_flg == 'Y'
def is_maintenance_user(self):
return self.mntuser_flg == '1'
def is_groupware_user(self):
return self.mntuser_flg == '0'
return self.mntuser_flg == '0' or self.mntuser_flg is None

View File

@ -6,5 +6,5 @@ from src.model.db.base_db_model import BaseDBModel
class WholesalerMasterModel(BaseDBModel):
rec_whs_cd: Optional[str]
rec_whs_sub_cd: Optional[str]
nm: Optional[str]
whs_nm: Optional[str]
name: Optional[str]
whs_name: Optional[str]

View File

@ -15,11 +15,11 @@ class JWTToken:
refresh_token: str
verified_jwt: Optional[dict]
def __init__(self, id_token: str, refresh_token: str, verified_jwt: dict=None) -> None:
def __init__(self, id_token: str, refresh_token: str, verified_jwt: dict = None) -> None:
self.id_token = id_token
self.refresh_token = refresh_token
self.verified_jwt = verified_jwt
@property
def verified_token(self):
if self.verified_jwt is None:
@ -29,7 +29,7 @@ class JWTToken:
@property
def user_id(self):
verified_token = self.verified_token
user_id: str = None
identities: dict = verified_token.get('identities')
if identities is not None:
@ -63,7 +63,7 @@ class JWTToken:
'code': code,
'redirect_uri': environment.COGNITO_REDIRECT_URI
}
message = bytes(f'{environment.COGNITO_CLIENT_ID}:{environment.COGNITO_CLIENT_SECRET}', 'utf8')
auth_header_value = base64.b64encode(message).decode()
request_headers = {
@ -76,7 +76,7 @@ class JWTToken:
raise JWTTokenVerifyException(res.text)
token_response = json.loads(res.text)
return cls(id_token=token_response['id_token'], refresh_token=token_response['refresh_token'])
@classmethod
@ -99,7 +99,7 @@ class JWTToken:
'refresh_token': refresh_token,
'redirect_uri': environment.COGNITO_REDIRECT_URI
}
message = bytes(f'{environment.COGNITO_CLIENT_ID}:{environment.COGNITO_CLIENT_SECRET}', 'utf8')
auth_header_value = base64.b64encode(message).decode()
request_headers = {
@ -117,13 +117,13 @@ class JWTToken:
def verify_token(self):
if self.id_token is None:
raise Exception('アクセストークンがない')
issuer = f'https://cognito-idp.{environment.AWS_REGION}.amazonaws.com/{environment.COGNITO_USER_POOL_ID}'
jwks_url = f'{issuer}/.well-known/jwks.json'
jwks_client = jwt.PyJWKClient(jwks_url)
signing_key = jwks_client.get_signing_key_from_jwt(self.id_token)
try:
verified_jwt = jwt.decode(
self.id_token,

View File

@ -14,11 +14,11 @@ class UserSession(DynamoDBTableModel):
session_key = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute()
id_token = UnicodeAttribute()
doc_flg = UnicodeAttribute()
inst_flg = UnicodeAttribute()
bio_flg = UnicodeAttribute()
master_mainte_flg = UnicodeAttribute()
user_flg = UnicodeAttribute()
doc_flg = UnicodeAttribute(null=True)
inst_flg = UnicodeAttribute(null=True)
bio_flg = UnicodeAttribute(null=True)
master_mainte_flg = UnicodeAttribute(null=True)
user_flg = UnicodeAttribute(null=True)
refresh_token = UnicodeAttribute()
csrf_token = UnicodeAttribute()
last_access_time = NumberAttribute()
@ -27,7 +27,7 @@ class UserSession(DynamoDBTableModel):
@classmethod
def new_last_access_time(cls):
return datetime.datetime.now().timestamp()
@classmethod
def new_record_expiration_time(cls, expire=environment.SESSION_EXPIRE_MINUTE):
last_access_time = datetime.datetime.fromtimestamp(cls.new_last_access_time())
@ -35,8 +35,8 @@ class UserSession(DynamoDBTableModel):
@classmethod
def new(
cls, user_id, id_token, refresh_token, csrf_token, doc_flg, inst_flg, bio_flg, master_mainte_flg, user_flg
):
cls, user_id, id_token, refresh_token, csrf_token, doc_flg, inst_flg, bio_flg, master_mainte_flg, user_flg
):
return cls(
session_key=str(uuid.uuid4()),
user_id=user_id,
@ -47,7 +47,7 @@ class UserSession(DynamoDBTableModel):
inst_flg=inst_flg,
bio_flg=bio_flg,
master_mainte_flg=master_mainte_flg,
user_flg=user_flg,
user_flg=user_flg,
last_access_time=cls.new_last_access_time(),
record_expiration_time=cls.new_record_expiration_time()
)

View File

@ -9,18 +9,18 @@ from src.util.string_util import is_not_empty
@sanitize
class BioModel(BaseModel):
wholesaler_code: Optional[str]
wholesaler_sub_code: Optional[str]
wholesaler_name: Optional[str]
org_kbn: Optional[str]
rec_whs_cd: Optional[str]
rec_whs_sub_cd: Optional[str]
whs_name: Optional[str]
slip_org_kbn: Optional[str]
rec_ymd_from: Optional[str]
rec_ymd_to: Optional[str]
rec_lot_num: Optional[str]
data_kbn: Optional[str]
maker_cd: Optional[str]
mkr_cd: Optional[str]
rev_hsdnymd_srk_from: Optional[str]
rev_hsdnymd_srk_to: Optional[str]
ikoFlg: Optional[str]
iko_flg: Optional[str]
@classmethod
def as_form(
@ -50,7 +50,7 @@ class BioModel(BaseModel):
ctrl_rev_hsdnymd_srk_to,
ikoFlg
)
@classmethod
def as_body(
cls,
@ -79,7 +79,6 @@ class BioModel(BaseModel):
ctrl_rev_hsdnymd_srk_to,
ikoFlg
)
def __convert_request_param(
cls,
@ -122,16 +121,16 @@ class BioModel(BaseModel):
rev_hsdnymd_srk_to = ctrl_rev_hsdnymd_srk_to.replace('/', '')
return cls(
wholesaler_code=wholesaler_code,
wholesaler_sub_code=wholesaler_sub_code,
wholesaler_name=wholesaler_name,
org_kbn=ctrl_org_kbn,
rec_whs_cd=wholesaler_code,
rec_whs_sub_cd=wholesaler_sub_code,
whs_name=wholesaler_name,
slip_org_kbn=ctrl_org_kbn,
rec_ymd_from=rec_ymd_from,
rec_ymd_to=rec_ymd_to,
rec_lot_num=ctrl_rec_lot_num,
data_kbn=ctrl_data_kbn,
maker_cd=ctrl_maker_cd,
mkr_cd=ctrl_maker_cd,
rev_hsdnymd_srk_from=rev_hsdnymd_srk_from,
rev_hsdnymd_srk_to=rev_hsdnymd_srk_to,
ikoFlg=ikoFlg
)
iko_flg=ikoFlg
)

View File

@ -1,20 +1,18 @@
from typing import Optional
from fastapi import Body
from pydantic import BaseModel
class BioDownloadModel(BaseModel):
user_id: str
kind: str
ext: str
@classmethod
def as_body(
cls,
user_id: str = Body(),
kind: str = Body()
ext: str = Body()
):
return cls(
user_id=user_id,
kind=kind
ext=ext
)

View File

@ -5,7 +5,7 @@ from pydantic import BaseModel
class LoginModel(BaseModel):
username: str
password: str
@classmethod
def as_form(
cls,

View File

@ -0,0 +1,20 @@
import html
from pydantic import BaseModel
from src.util.sanitize import sanitize
@sanitize
class RequestBaseModel(BaseModel):
"""
Webのリクエストを受け取る共通モデルクラス
保持するメンバはエスケープされる
エスケープを解除するにはunescapeメソッドを使用する
"""
def unescape(self):
for k, v in self.dict().items():
if v is not None and type(v) is str:
setattr(self, k, html.unescape(v))
return self

View File

@ -0,0 +1,69 @@
from typing import Optional
from fastapi import Form
from src.model.request.request_base_model import RequestBaseModel
class UltmarcDoctorSearchModel(RequestBaseModel):
dcf_pcf_dr_cd: Optional[str]
dr_name: Optional[str]
dr_name_kana: Optional[str]
dcf_dsf_inst_cd: Optional[str]
form_inst_name_kanji: Optional[str]
form_inst_name_kana: Optional[str]
prefc_cd: Optional[str]
blng_sec_name: Optional[str]
trt_course_name: Optional[str]
alma: Optional[str]
grad_y: Optional[str]
pagination_page_number: Optional[int]
@classmethod
def as_form(
cls,
ctrl_dcf_pcf_dr_cd: str = Form(None),
ctrl_dr_name: str = Form(None),
ctrl_dr_name_kana: str = Form(None),
ctrl_dcf_dsf_inst_cd: str = Form(None),
ctrl_form_inst_name_kanji: str = Form(None),
ctrl_form_inst_name_kana: str = Form(None),
ctrl_prefc_cd: str = Form(None),
ctrl_blng_sec_name: str = Form(None),
ctrl_trt_course_name: str = Form(None),
ctrl_alma: str = Form(None),
ctrl_grad_y: str = Form(None),
pagination_page_number: str = Form(None)
):
return cls(
dcf_pcf_dr_cd=ctrl_dcf_pcf_dr_cd,
dr_name=ctrl_dr_name,
dr_name_kana=ctrl_dr_name_kana,
dcf_dsf_inst_cd=ctrl_dcf_dsf_inst_cd,
form_inst_name_kanji=ctrl_form_inst_name_kanji,
form_inst_name_kana=ctrl_form_inst_name_kana,
prefc_cd=ctrl_prefc_cd,
blng_sec_name=ctrl_blng_sec_name,
trt_course_name=ctrl_trt_course_name,
alma=ctrl_alma,
grad_y=ctrl_grad_y,
pagination_page_number=pagination_page_number
)
class UltmarcDoctorInfoModel(RequestBaseModel):
doc_id: Optional[str]
page_num: Optional[int]
@classmethod
def as_form(
cls,
doc_id: str = Form(None),
page_num: str = Form(None)
):
return cls(
doc_id=doc_id,
page_num=int(page_num)
)

View File

@ -0,0 +1,70 @@
import html
from typing import Optional
from fastapi import Form
from src.model.request.request_base_model import RequestBaseModel
# @sanitize
class UltmarcInstSearchModel(RequestBaseModel):
dcf_dsf_inst_cd: Optional[str]
inst_div_cd: Optional[str]
form_inst_name_kanji: Optional[str]
form_inst_name_kana: Optional[str]
postal_number: Optional[str]
inst_phone_number: Optional[str]
prefc_cd: Optional[str]
delFlg: Optional[str]
inst_addr: Optional[str]
pagination_page_number: Optional[int]
@classmethod
def as_form(
cls,
ctrl_dcf_dsf_inst_cd: str = Form(None),
ctrl_inst_div_cd: str = Form(None),
ctrl_form_inst_name_kanji: str = Form(None),
ctrl_form_inst_name_kana: str = Form(None),
ctrl_postal_number: str = Form(None),
ctrl_inst_phone_number: str = Form(None),
ctrl_prefc_cd: str = Form(None),
delFlg_ctrl: str = Form(None),
ctrl_inst_addr: str = Form(None),
pagination_page_number: str = Form(None)
):
return cls(
dcf_dsf_inst_cd=ctrl_dcf_dsf_inst_cd,
inst_div_cd=ctrl_inst_div_cd,
form_inst_name_kanji=ctrl_form_inst_name_kanji,
form_inst_name_kana=ctrl_form_inst_name_kana,
postal_number=ctrl_postal_number,
inst_phone_number=ctrl_inst_phone_number,
prefc_cd=ctrl_prefc_cd,
delFlg=delFlg_ctrl,
inst_addr=ctrl_inst_addr,
pagination_page_number=pagination_page_number
)
def unescape(self):
for k, v in self.dict().items():
if v is not None and type(v) is str:
setattr(self, k, html.unescape(v))
return self
class UltmarcInstInfoModel(RequestBaseModel):
inst_id: Optional[str]
page_num: Optional[int]
@classmethod
def as_form(
cls,
inst_id: str = Form(None),
page_num: str = Form(None)
):
return cls(
inst_id=inst_id,
page_num=int(page_num)
)

View File

@ -7,13 +7,13 @@ from src.util.sanitize import sanitize
class BisDisplayModel(BioSalesViewModel):
def __init__(self, param: BioSalesViewModel) -> None:
super().__init__(**param.dict())
# 区分・フラグの正式名称を設定
self.slip_org_kbn = constants.SLIP_ORG_KBN_FULL_NAME.get(self.slip_org_kbn)
self.data_kbn = constants.DATA_KBN_JP_NAME.get(self.data_kbn)
self.lot_no_err_flg = constants.LOT_NO_ERR_FLG_JP_NAME.get(self.lot_no_err_flg)
# 訂正前伝票管理番号がセットされているときのみ修正日時、修正者、エラー詳細種別をセット
if (self.bef_slip_mgt_no is None):
if (self.bef_slip_mgt_num is None):
self.ins_dt = ""
self.ins_usr = ""

View File

@ -20,10 +20,10 @@ class BioViewModel(BaseModel):
phm_models: list[PharmacyProductMasterModel]
bio_data: Optional[list[BisDisplayModel]] = []
form_data: Optional[BioModel]
def display_wholesaler_names(self):
display_names = [
f'{whs_model.rec_whs_cd}-{whs_model.rec_whs_sub_cd}:{whs_model.nm}'
f'{whs_model.rec_whs_cd}-{whs_model.rec_whs_sub_cd}:{whs_model.name}'
for whs_model in self.whs_models
]
return display_names
@ -41,7 +41,7 @@ class BioViewModel(BaseModel):
def display_data_kbn(self):
return OrderedDict(
{
'' : '',
'': '',
'0': '正常',
'1': 'ロットエラー',
'3': 'ロット不明',
@ -59,14 +59,16 @@ class BioViewModel(BaseModel):
if not self.is_form_submitted():
return ''
form_wholesaler_full_name = f'{self.form_data.wholesaler_code}-{self.form_data.wholesaler_sub_code}:{self.form_data.wholesaler_name}'
form_wholesaler_full_name = \
f'{self.form_data.rec_whs_cd}-{self.form_data.rec_whs_sub_cd}:{self.form_data.whs_name}'
return self._selected_value(form_wholesaler_full_name, selected_wholesaler)
def is_selected_org_kbn(self, selected_org_kbn):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.org_kbn, selected_org_kbn)
return self._selected_value(self.form_data.slip_org_kbn, selected_org_kbn)
def is_input_rec_ymd_from(self):
if not self.is_form_submitted():
return ''
@ -84,18 +86,18 @@ class BioViewModel(BaseModel):
return ''
return self.form_data.rec_lot_num or ''
def is_selected_data_kbn(self, selected_data_kbn):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.data_kbn, selected_data_kbn)
def is_selected_maker_cd(self, selected_maker_cd):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.maker_cd, selected_maker_cd)
return self._selected_value(self.form_data.mkr_cd, selected_maker_cd)
def is_input_rev_hsdnymd_srk_from(self):
if not self.is_form_submitted():
@ -108,13 +110,13 @@ class BioViewModel(BaseModel):
return ''
return self._format_date_string(self.form_data.rev_hsdnymd_srk_to)
def is_checked_iko_flg(self):
if not self.is_form_submitted():
return ''
return 'checked' if self.form_data.ikoFlg else ''
return 'checked' if self.form_data.iko_flg else ''
def disabled_button(self):
return 'disabled' if self.is_data_empty() or self.is_data_overflow_max_length() else ''
@ -123,9 +125,9 @@ class BioViewModel(BaseModel):
def is_data_empty(self):
return len(self.bio_data) == 0
def is_data_overflow_max_length(self):
return len(self.bio_data) >= environment.BIO_SEARCH_RESULT_MAX_COUNT
return len(self.bio_data) > environment.BIO_SEARCH_RESULT_MAX_COUNT
def _format_date_string(self, date_string):
if date_string is None:

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class InstEmpCsvDownloadViewModel(BaseModel):
subtitle: str = '施設担当者データCSVダウンロード'

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class InstEmpCsvUploadViewModel(BaseModel):
subtitle: str = '施設担当者データCSVアップロード'

View File

@ -7,4 +7,4 @@ class LogoutViewModel(BaseModel):
subtitle: str = 'MeDaCA Logout'
redirect_to: Optional[str]
reason: Optional[str]
link_text:Optional[str]
link_text: Optional[str]

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class MasterMainteMenuViewModel(BaseModel):
subtitle: str = 'MeDaCA マスターメンテメニュー'

View File

@ -3,15 +3,20 @@ from typing import Optional
from pydantic import BaseModel
from src.model.view.user_view_model import UserViewModel
from src.system_var import constants
class MenuViewModel(BaseModel):
subtitle: str = 'MeDaCA 機能メニュー'
batch_status: Optional[str]
dump_status: Optional[str]
user_model: UserViewModel
def is_batch_processing(self):
return self.batch_status == '1'
return self.batch_status == constants.BATCH_STATUS_PROCESSING
def is_backup_processing(self):
return self.dump_status != constants.DUMP_STATUS_UNPROCESSED
def is_available_ult_doctor_menu(self):
return self.user_model.has_ult_doctor_permission()

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class TableOverrideViewModel(BaseModel):
subtitle: str = 'テーブル上書きコピー'

View File

@ -0,0 +1,92 @@
from datetime import datetime
from typing import Optional
from pydantic import BaseModel
from src.model.db.ultmarc_doctor_info import UltmarcDoctorInfoDBModel
from src.model.db.ultmarc_doctor_wrkplace import UltmarcDoctorWrkplaceDBModel
from src.model.db.ultmarc_doctor_wrkplace_his import \
UltmarcDoctorWrkplaceHisDBModel
from src.model.db.ultmarc_sosiety import UltmarcSosietyDBModel
from src.model.db.ultmarc_specialist_license import \
UltmarcSpecialistLicenseDBModel
from src.model.db.ultmarc_trt_course import UltmarcTrtCourseDBModel
from src.system_var import environment
class UltmarcDoctorInfoViewModel(BaseModel):
subtitle: str = '医師情報'
is_batch_processing: Optional[bool]
doctor_info_data: Optional[UltmarcDoctorInfoDBModel]
trt_coursed_data: Optional[list[UltmarcTrtCourseDBModel]]
sosiety_data: Optional[list[UltmarcSosietyDBModel]]
specialist_license_data: Optional[list[UltmarcSpecialistLicenseDBModel]]
doctor_wrkplace_data: Optional[list[UltmarcDoctorWrkplaceDBModel]]
doctor_wrkplace_his_data: Optional[list[UltmarcDoctorWrkplaceHisDBModel]]
doc_id: Optional[str]
post_cnt: Optional[int]
page_num: Optional[int]
# 現在のページ(表示用)
def is_page_num_view(self):
return self.page_num + 1
# 前ボタン
def is_disabled_prev(self):
return 'disabled' if self.page_num == 0 else ''
# 次ボタン
def is_disabled_next(self):
if self.page_num == self.post_cnt - 1:
return 'disabled'
return ''
# 生年月日
def is_input_birthday_format(self):
return self._format_date_string(self.doctor_info_data.birthday)
# 開始年月日
def is_input_aply_start_ymd_format(self, date_string):
if date_string:
return self._format_date(date_string)
else:
return ''
def is_input_his_aply_start_ymd_format(self, date_string):
if date_string:
return self._format_date(date_string)
else:
return ''
# 終了年月日
def is_input_his_aply_end_ymd_format(self, date_string):
if date_string:
return self._format_date(date_string)
else:
return ''
def is_input_trt_course_data_size(self):
return len(self.trt_coursed_data)
def disabled_button(self):
return 'disabled' if self.is_data_empty() or self.is_data_overflow_max_length() else ''
def is_form_submitted(self):
return self.form_data is not None
def is_data_empty(self):
return len(self.doctor_data) == 0
def is_data_overflow_max_length(self):
return len(self.doctor_data) > environment.ULTMARC_SEARCH_RESULT_MAX_COUNT
def _format_date_string(self, date_string):
if date_string is None:
return ''
date_str = datetime.strptime(date_string, '%Y%m%d')
return date_str.strftime('%Y/%m/%d')
def _format_date(self, date_time):
if date_time is None:
return ''
return date_time.strftime('%Y/%m/%d')

View File

@ -0,0 +1,116 @@
import json
from typing import Optional
from pydantic import BaseModel
from src.model.db.prefc_master import PrefcMasterModel
from src.model.db.ultmarc_doctor import UltmarcDoctorDBModel
from src.model.request.ultmarc_doctor import UltmarcDoctorSearchModel
from src.system_var import environment
class UltmarcDoctorViewModel(BaseModel):
subtitle: str = '医師検索一覧'
is_batch_processing: Optional[bool]
prefc_models: list[PrefcMasterModel]
doctor_data: Optional[list[UltmarcDoctorDBModel]] = []
form_data: Optional[UltmarcDoctorSearchModel]
def ultmarc_data_json_str(self):
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
return json.dumps([model.dict() for model in self.doctor_data], ensure_ascii=False, default=date_handler)
# ページネーションのページ番号
# 検索時は最初のページを表示する
# 詳細画面からの遷移時は、元のページを表示する
def init_pagination_page_number(self):
if not self.is_form_submitted():
return 1
return self.form_data.pagination_page_number or 1
# 医師コード
def is_input_dcf_pcf_dr_cd(self):
if not self.is_form_submitted():
return ''
return self.form_data.dcf_pcf_dr_cd or ''
# 氏名(漢字)
def is_input_dr_name(self):
if not self.is_form_submitted():
return ''
return self.form_data.dr_name or ''
# 氏名(かな・カナ)
def is_input_dr_name_kana(self):
if not self.is_form_submitted():
return ''
return self.form_data.dr_name_kana or ''
# 勤務先コード
def is_input_dcf_dsf_inst_cd(self):
if not self.is_form_submitted():
return ''
return self.form_data.dcf_dsf_inst_cd or ''
# 勤務先名(漢字)
def is_input_form_inst_name_kanji(self):
if not self.is_form_submitted():
return ''
return self.form_data.form_inst_name_kanji or ''
# 勤務先名(かな・カナ)
def is_input_form_inst_name_kana(self):
if not self.is_form_submitted():
return ''
return self.form_data.form_inst_name_kana or ''
# 勤務先都道府県
def is_selected_prefc_cd(self, selected_prefc_cd):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.prefc_cd, selected_prefc_cd)
def is_input_form_prefc_cd(self):
if not self.is_form_submitted():
return ''
return self.form_data.prefc_cd or ''
# 所属部科(漢字)
def is_input_blng_sec_name(self):
if not self.is_form_submitted():
return ''
return self.form_data.blng_sec_name or ''
# 診療科目(漢字)
def is_input_trt_course_name(self):
if not self.is_form_submitted():
return ''
return self.form_data.trt_course_name or ''
# 出身大学(漢字)
def is_input_alma(self):
if not self.is_form_submitted():
return ''
return self.form_data.alma or ''
# 卒年
def is_input_grad_y(self):
if not self.is_form_submitted():
return ''
return self.form_data.grad_y or ''
def disabled_button(self):
return 'disabled' if self.is_data_empty() or self.is_data_overflow_max_length() else ''
def is_form_submitted(self):
return self.form_data is not None
def is_data_empty(self):
return len(self.doctor_data) == 0
def is_data_overflow_max_length(self):
return len(self.doctor_data) > environment.ULTMARC_SEARCH_RESULT_MAX_COUNT
def _selected_value(self, form_value: str, current_value: str):
return 'selected' if form_value == current_value else ''

View File

@ -0,0 +1,91 @@
from typing import Optional
from pydantic import BaseModel
from src.model.db.ultmarc_inst_info import UltmarcInstInfoDBModel
from src.model.db.ultmarc_inst_trt_course import UltmarcInstTrtCourseDBModel
from src.system_var import environment
class UltmarcInstInfoViewModel(BaseModel):
subtitle: str = '施設情報'
is_batch_processing: Optional[bool]
inst_info_data: Optional[UltmarcInstInfoDBModel]
inst_trt_coursed_data: Optional[list[UltmarcInstTrtCourseDBModel]]
doctor_wrkplace_count: Optional[int]
inst_id: Optional[str]
post_cnt: Optional[int]
page_num: Optional[int]
# 未確認
def is_checked_unconf_flg(self):
return 'checked' if self.inst_info_data.unconf_flg == '1' else ''
# 休院店
def is_checked_close_flg(self):
return 'checked' if self.inst_info_data.close_flg == '1' else ''
# 開業
def is_checked_estab_sche_flg(self):
return 'checked' if self.inst_info_data.estab_sche_flg == '1' else ''
# 住所不明
def is_checked_addr_unknown_reason_cd(self):
return 'checked' if self.inst_info_data.addr_unknown_reason_cd else ''
# 開業
def is_checked_phone_number_non_flg(self):
return 'checked' if self.inst_info_data.phone_number_non_flg == '1' else ''
# 再審査区分
def is_checked_re_exam_cd(self):
return 'checked' if self.inst_info_data.re_exam_cd else ''
# 病棟閉鎖  
def is_checked_ward_abolish_flg(self):
return 'checked' if self.inst_info_data.ward_abolish_flg == '1' else ''
# 一部病棟閉鎖
def is_checked_ward_abolish_flg_part(self):
return 'checked' if self.inst_info_data.ward_abolish_flg == '2' else ''
# 修正年月日
def is_input_sys_update_date(self):
sys_update_date = str(self.inst_info_data.sys_update_date)
return sys_update_date[:10]
# 勤務医師ボタン表示
def is_disabled_doctor_wrkplace(self):
return 'disabled' if self.doctor_wrkplace_count == 0 else ''
# 現在のページ(表示用)
def is_page_num_view(self):
return self.page_num + 1
# 前ボタン
def is_disabled_prev(self):
return 'disabled' if self.page_num == 0 else ''
# 次ボタン
def is_disabled_next(self):
if self.page_num == self.post_cnt - 1:
return 'disabled'
return ''
# 診療科目のデータ件数
def is_input_inst_trt_course_data_size(self):
if self.inst_trt_coursed_data is None:
return 0
return len(self.inst_trt_coursed_data)
def disabled_button(self):
return 'disabled' if self.is_data_empty() or self.is_data_overflow_max_length() else ''
def is_form_submitted(self):
return self.form_data is not None
def is_data_empty(self):
return len(self.doctor_data) == 0
def is_data_overflow_max_length(self):
return len(self.doctor_data) > environment.ULTMARC_SEARCH_RESULT_MAX_COUNT

View File

@ -0,0 +1,119 @@
import json
from typing import Optional
from pydantic import BaseModel
from src.model.db.inst_div_master import InstDivMasterModel
from src.model.db.prefc_master import PrefcMasterModel
from src.model.db.ultmarc_inst import UltmarcInstDBModel
from src.model.request.ultmarc_inst import UltmarcInstSearchModel
from src.system_var import environment
class UltmarcInstViewModel(BaseModel):
subtitle: str = '施設検索一覧'
is_batch_processing: Optional[bool]
prefc_models: list[PrefcMasterModel]
inst_div_models: list[InstDivMasterModel]
inst_data: Optional[list[UltmarcInstDBModel]] = []
form_data: Optional[UltmarcInstSearchModel]
def ultmarc_data_json_str(self):
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
return json.dumps([model.dict() for model in self.inst_data], ensure_ascii=False, default=date_handler)
# ページネーションのページ番号
# 検索時は最初のページを表示する
# 詳細画面からの遷移時は、元のページを表示する
def init_pagination_page_number(self):
if not self.is_form_submitted():
return 1
return self.form_data.pagination_page_number or 1
# ULT施設コード
def is_input_dcf_dsf_inst_cd(self):
if not self.is_form_submitted():
return ''
return self.form_data.dcf_dsf_inst_cd or ''
# 施設区分
def is_selected_inst_div_cd(self, selected_inst_div_cd):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.inst_div_cd, selected_inst_div_cd)
def is_input_form_inst_div_cd(self):
if not self.is_form_submitted():
return ''
return self.form_data.inst_div_cd or ''
# ULT施設名(漢字)
def is_input_form_inst_name_kanji(self):
if not self.is_form_submitted():
return ''
return self.form_data.form_inst_name_kanji or ''
# ULT施設名(かな・カナ)
def is_input_form_inst_name_kana(self):
if not self.is_form_submitted():
return ''
return self.form_data.form_inst_name_kana or ''
# 郵便番号
def is_input_postal_number(self):
if not self.is_form_submitted():
return ''
return self.form_data.postal_number or ''
# 電話番号
def is_input_inst_phone_number(self):
if not self.is_form_submitted():
return ''
return self.form_data.inst_phone_number or ''
# 削除施設表示
def is_checked_delFlg(self):
if not self.is_form_submitted():
return 'checked'
return self._checked_value(self.form_data.delFlg)
def is_input_delFlg(self):
if not self.is_form_submitted():
return ''
return self.form_data.delFlg or ''
# ULT施設住所
def is_input_inst_addr(self):
if not self.is_form_submitted():
return ''
return self.form_data.inst_addr or ''
# 勤務先都道府県
def is_selected_prefc_cd(self, selected_prefc_cd):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.prefc_cd, selected_prefc_cd)
def is_input_form_prefc_cd(self):
if not self.is_form_submitted():
return ''
return self.form_data.prefc_cd or ''
def disabled_button(self):
return 'disabled' if self.is_data_empty() or self.is_data_overflow_max_length() else ''
def is_form_submitted(self):
return self.form_data is not None
def is_data_empty(self):
return len(self.inst_data) == 0
def is_data_overflow_max_length(self):
return len(self.inst_data) >= environment.ULTMARC_SEARCH_RESULT_MAX_COUNT
def _selected_value(self, form_value: str, current_value: str):
return 'selected' if form_value == current_value else ''
def _checked_value(self, form_value: str):
return 'checked' if form_value else ''

View File

@ -4,12 +4,12 @@ from pydantic import BaseModel
class UserViewModel(BaseModel):
bio_flg: str # AUTH_FLG1
doc_flg: str # AUTH_FLG2
inst_flg: str # AUTH_FLG3
master_mainte_flg: str # AUTH_FLG4
bio_flg: Optional[str] # AUTH_FLG1
doc_flg: Optional[str] # AUTH_FLG2
inst_flg: Optional[str] # AUTH_FLG3
master_mainte_flg: Optional[str] # AUTH_FLG4
user_flg: Optional[str] # MNTUSER_FLG
def has_ult_doctor_permission(self):
return self.doc_flg == '1'
@ -21,6 +21,3 @@ class UserViewModel(BaseModel):
def has_master_maintenance_permission(self):
return self.master_mainte_flg == '1'
def is_maintenance_user(self):
return self.user_flg == '1'

View File

@ -8,8 +8,9 @@ from src.model.db.base_db_model import BaseDBModel
class BaseRepository(metaclass=ABCMeta):
_database: Database
def __init__(self, db: Database) -> None:
self._database = db
@ -29,10 +30,9 @@ class BaseRepository(metaclass=ABCMeta):
"""DBの取得結果をデータフレームにして返す"""
pass
def _to_data_frame(self, query, parameter: BaseDBModel):
"""DBの取得結果をデータフレームに変換する"""
params = params=parameter.dict()
params = params = parameter.dict()
sql_query = pd.read_sql(
text(query),

View File

@ -1,10 +1,13 @@
from src.db import sql_condition as condition
from src.db.sql_condition import SQLCondition
from src.logging.get_logger import get_logger
from src.model.db.bio_sales_view import BioSalesViewModel
from src.model.request.bio import BioModel
from src.repositories.base_repository import BaseRepository
from src.util.string_util import is_not_empty
logger = get_logger('生物由来参照')
class BioSalesViewRepository(BaseRepository):
FETCH_SQL = """\
@ -12,19 +15,19 @@ class BioSalesViewRepository(BaseRepository):
(
CASE
WHEN LEFT(bs.v_tran_cd, 1) = 2
AND bs.amt >= 1 THEN CONCAT('-', bs.amt)
ELSE bs.amt
AND bs.qty >= 1 THEN CONCAT('-', bs.qty)
ELSE bs.qty
END
) AS amt_fugo,
bs.*,
ln.ser_no,
ln.ser_num,
ln.lot_num,
ln.expr_dt
FROM
src05.bio_sales_view bs
LEFT OUTER JOIN
src05.lot_num_mst ln
ON bs.mkr_cd = ln.ser_no
ON bs.mkr_cd = ln.ser_num
AND bs.rec_lot_num = ln.lot_num
WHERE
{where_clause}
@ -32,45 +35,40 @@ class BioSalesViewRepository(BaseRepository):
bs.rec_whs_cd,
bs.rec_whs_sub_cd,
bs.rev_hsdnymd_srk,
bs.slip_mgt_no
bs.slip_mgt_num
ASC\
"""
def fetch_many(self, parameter: BioModel) -> list[BioSalesViewModel]:
try:
self._database.connect()
logger.debug('DB参照実行')
where_clause = self.__build_condition(parameter)
# error_log(date("Y/m/d H:i:s") . " [INFO] DB Return=" . $result . "\r\n", 3, "$execLog");
# error_log(date("Y/m/d H:i:s") . " [INFO] DB参照実行" . "\r\n", 3, "$execLog");
query = self.FETCH_SQL.format(where_clause=where_clause)
# error_log(date("Y/m/d H:i:s") . " [INFO] SQL: " . $query . "\r\n", 3, "$execLog");
logger.debug(f'SQL: {query}')
result = self._database.execute_select(query, parameter.dict())
logger.debug(f'count= {len(result)}')
models = [BioSalesViewModel(**r) for r in result]
# error_log(date("Y/m/d H:i:s") . " [INFO] count=" . $count . "\r\n", 3, "$execLog");
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
logger.exception(f"DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
def fetch_as_data_frame(self, parameter: BioModel):
try:
self._database.connect()
logger.debug('DB参照実行')
where_clause = self.__build_condition(parameter)
# error_log(date("Y/m/d H:i:s") . " [INFO] DB Return=" . $result . "\r\n", 3, "$execLog");
# error_log(date("Y/m/d H:i:s") . " [INFO] DB参照実行" . "\r\n", 3, "$execLog");
query = self.FETCH_SQL.format(where_clause=where_clause)
# error_log(date("Y/m/d H:i:s") . " [INFO] SQL: " . $query . "\r\n", 3, "$execLog");
# models = [BioSalesViewModel(**r) for r in result]
# error_log(date("Y/m/d H:i:s") . " [INFO] count=" . $count . "\r\n", 3, "$execLog");
logger.debug(f'SQL: {query}')
df = self._to_data_frame(query, parameter)
return df
logger.debug(f'count= {len(df.index)}')
# ログ出力のため、クエリも返却
return df, query
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
logger.exception(f"DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
@ -79,12 +77,12 @@ class BioSalesViewRepository(BaseRepository):
where_clauses: list[SQLCondition] = []
# 卸(コード/サブコード)
if is_not_empty(parameter.wholesaler_code) and is_not_empty(parameter.wholesaler_sub_code):
where_clauses.append(SQLCondition('rec_whs_cd', condition.EQ, 'wholesaler_code'))
where_clauses.append(SQLCondition('rec_whs_sub_cd', condition.EQ, 'wholesaler_sub_code'))
if is_not_empty(parameter.rec_whs_cd) and is_not_empty(parameter.rec_whs_sub_cd):
where_clauses.append(SQLCondition('rec_whs_cd', condition.EQ, 'rec_whs_cd'))
where_clauses.append(SQLCondition('rec_whs_sub_cd', condition.EQ, 'rec_whs_sub_cd'))
# データ種別
if is_not_empty(parameter.org_kbn):
where_clauses.append(SQLCondition('slip_org_kbn', condition.EQ, 'org_kbn'))
if is_not_empty(parameter.slip_org_kbn):
where_clauses.append(SQLCondition('slip_org_kbn', condition.EQ, 'slip_org_kbn'))
# 処理日 開始日
if is_not_empty(parameter.rec_ymd_from):
where_clauses.append(SQLCondition('rec_ymd', condition.GE, 'rec_ymd_from'))
@ -95,14 +93,14 @@ class BioSalesViewRepository(BaseRepository):
if is_not_empty(parameter.rec_lot_num):
rec_lot_num = parameter.rec_lot_num
# あいまい検索文字列('%')が含まれる場合は'LIKE'、でなければ'='で検索
rec_lot_num_comparator = condition.LIKE if rec_lot_num in '%' else condition.EQ
rec_lot_num_comparator = condition.LIKE if rec_lot_num in '%' else condition.EQ
where_clauses.append(SQLCondition('rec_lot_num', rec_lot_num_comparator, 'rec_lot_num'))
# データ区分
if is_not_empty(parameter.data_kbn):
where_clauses.append(SQLCondition('data_kbn', condition.EQ, 'data_kbn'))
# 製品
if is_not_empty(parameter.maker_cd):
where_clauses.append(SQLCondition('mkr_cd', condition.EQ, 'maker_cd'))
if is_not_empty(parameter.mkr_cd):
where_clauses.append(SQLCondition('mkr_cd', condition.EQ, 'mkr_cd'))
# 発伝年月日 開始日
if is_not_empty(parameter.rev_hsdnymd_srk_from):
where_clauses.append(SQLCondition('rev_hsdnymd_srk', condition.GE, 'rev_hsdnymd_srk_from'))
@ -111,12 +109,12 @@ class BioSalesViewRepository(BaseRepository):
where_clauses.append(SQLCondition('rev_hsdnymd_srk', condition.LE, 'rev_hsdnymd_srk_to'))
# 移行フラグ
# チェックが入っていない場合、移行対象(IKO_FLG = '*')を省く
if parameter.ikoFlg is None:
where_clauses.append(SQLCondition('iko_flg', condition.IS, 'NULL', literal=True))
if parameter.iko_flg is None:
where_clauses.append(SQLCondition('iko_flg', condition.IS, 'NULL', literal=True))
# 固定条件
# Viewで返されるロット番号9件をNull以外で抽出
where_clauses.append(SQLCondition('LENGTH(TRIM(rec_lot_num))', condition.GT, '0', literal=True))
where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses])
# error_log(date("Y/m/d H:i:s") . " [INFO] 条件設定終了:" . $szConditions . "\r\n", 3, "$execLog");
logger.debug(f'条件設定終了:{where_clauses_str}')
return where_clauses_str

View File

@ -1,10 +1,12 @@
from src.logging.get_logger import get_logger
from src.model.db.hdke_tbl import HdkeTblModel
from src.model.request.bio import BioModel
from src.repositories.base_repository import BaseRepository
logger = get_logger('日付テーブル取得')
class HdkeTblRepository(BaseRepository):
FETCH_SQL = "SELECT bch_actf FROM src05.hdke_tbl"
FETCH_SQL = "SELECT bch_actf, dump_sts_kbn FROM src05.hdke_tbl"
def fetch_all(self) -> list[HdkeTblModel]:
try:
@ -14,8 +16,7 @@ class HdkeTblRepository(BaseRepository):
models = [HdkeTblModel(**r) for r in result]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
logger.exception(f"DB Error : Exception={e}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,29 @@
from src.model.db.inst_div_master import InstDivMasterModel
from src.repositories.base_repository import BaseRepository
class InstDivMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT
DISTINCT com_inst_div.inst_div_cd AS inst_div_cd,
com_inst_div.inst_div_name AS inst_div_name
FROM src05.com_inst
JOIN src05.com_inst_div on com_inst.inst_div_cd = com_inst_div.inst_div_cd
ORDER BY com_inst_div.inst_div_cd
"""
def fetch_all(self) -> list[InstDivMasterModel]:
try:
self._database.connect()
result = self._database.execute_select(self.FETCH_SQL)
result_data = [res for res in result]
models = [InstDivMasterModel(**r) for r in result_data]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] getOroshiData DB Error. ")
print(f"[ERROR] ErrorMessage: {e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -1,26 +1,29 @@
from src.logging.get_logger import get_logger
from src.model.db.pharmacy_product_master import PharmacyProductMasterModel
from src.repositories.base_repository import BaseRepository
logger = get_logger('製品取得')
class PharmacyProductMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT
CONCAT(IFNULL(mkr_cd, ''), ' ', IFNULL(mkr_inf_1, '')) AS mkr_cd_nm
SELECT
CONCAT(IFNULL(t1.mkr_cd, ''), ' ', IFNULL(t1.mkr_inf_1, '')) AS mkr_cd_name
FROM
src05.phm_prd_mst_v t1
INNER JOIN
INNER JOIN
(
SELECT
prd_cd,MAX(sub_no) AS sno
SELECT
prd_cd, MAX(sub_num) AS sno
FROM
src05.phm_prd_mst_v
WHERE rec_sts_kbn <> '9'
GROUP BY prd_cd
) fmv2
ON t1.prd_cd = fmv2.prd_cd AND t1.sub_no = fmv2.sno
ON t1.prd_cd = fmv2.prd_cd AND t1.sub_num = fmv2.sno
WHERE
mkr_cd IS NOT NULL
t1.mkr_cd IS NOT NULL
ORDER BY mkr_cd
"""
@ -31,9 +34,7 @@ class PharmacyProductMasterRepository(BaseRepository):
models = [PharmacyProductMasterModel(**r) for r in result]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] getOroshiData DB Error. ")
print(f"[ERROR] ErrorMessage: {e.args}")
logger.exception(f"DB Error : Exception={e}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,31 @@
from src.model.db.prefc_master import PrefcMasterModel
from src.repositories.base_repository import BaseRepository
class PrefcMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT DISTINCT
com_inst.prefc_cd AS prefc_cd,
mst_prefc.prefc_name AS prefc_name
FROM
src05.com_inst
JOIN src05.mst_prefc ON com_inst.prefc_cd = mst_prefc.prefc_cd
ORDER BY
mst_prefc.prefc_cd
"""
def fetch_all(self) -> list[PrefcMasterModel]:
try:
self._database.connect()
result = self._database.execute_select(self.FETCH_SQL)
result_data = [res for res in result]
models = [PrefcMasterModel(**r) for r in result_data]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] getOroshiData DB Error. ")
print(f"[ERROR] ErrorMessage: {e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,185 @@
from src.db import sql_condition as condition
from src.db.sql_condition import SQLCondition
from src.model.db.ultmarc_doctor import UltmarcDoctorDBModel
from src.model.db.ultmarc_doctor_info import UltmarcDoctorInfoDBModel
from src.model.request.ultmarc_doctor import UltmarcDoctorSearchModel
from src.repositories.base_repository import BaseRepository
from src.util.string_util import is_not_empty
import mojimoji
class UltmarcDoctorRepository(BaseRepository):
FETCH_SQL = """\
SELECT
com_dr.dcf_pcf_dr_cd,
com_dr.dr_name,
com_inst.form_inst_name_kanji,
com_inst.dcf_dsf_inst_cd,
com_blng_sec.blng_sec_name,
GROUP_CONCAT(com_trt_course.trt_course_name separator ' / ') AS trt_course_name,
com_post.form_post_name,
com_alma.alma,
com_dr.grad_y,
mst_prefc.prefc_name,
com_dr_wrkplace.blng_sec_cd
FROM
src05.com_dr
LEFT JOIN src05.mst_prefc ON com_dr.prefc_cd = mst_prefc.prefc_cd
LEFT JOIN src05.com_dr_wrkplace ON com_dr.dcf_pcf_dr_cd = com_dr_wrkplace.dcf_pcf_dr_cd
LEFT JOIN src05.com_inst ON com_dr_wrkplace.dcf_dsf_inst_cd = com_inst.dcf_dsf_inst_cd
LEFT JOIN src05.com_blng_sec ON com_dr_wrkplace.blng_sec_cd = com_blng_sec.blng_sec_cd
LEFT JOIN src05.com_dr_trt_course ON com_dr.dcf_pcf_dr_cd = com_dr_trt_course.dcf_pcf_dr_cd
LEFT JOIN src05.com_trt_course ON com_dr_trt_course.trt_course_cd = com_trt_course.trt_course_cd
LEFT JOIN src05.com_post ON com_dr_wrkplace.post_cd = com_post.post_cd
LEFT JOIN src05.com_alma ON com_dr.alma_cd = com_alma.alma_cd
WHERE
{where_clause}
GROUP BY com_dr.dcf_pcf_dr_cd, com_inst.dcf_dsf_inst_cd, com_blng_sec.blng_sec_cd
ORDER BY
com_dr.dcf_pcf_dr_cd,
com_dr_wrkplace.dcf_dsf_inst_cd,
com_dr_wrkplace.blng_sec_cd,
com_dr_trt_course.trt_course_cd
\
"""
def fetch_many(self, parameter: UltmarcDoctorSearchModel) -> list[UltmarcDoctorDBModel]:
try:
self._database.connect()
# 文字列の検索を部分一致にするため、モデルをコピー。以降はこのコピーを使用する。
clone_parameter = UltmarcDoctorSearchModel(**parameter.dict())
where_clause = self.__build_condition(clone_parameter)
query = self.FETCH_SQL.format(where_clause=where_clause)
result = self._database.execute_select(query, clone_parameter.dict())
models = [UltmarcDoctorDBModel(**r) for r in result]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
def __build_condition(self, parameter: UltmarcDoctorSearchModel):
where_clauses: list[SQLCondition] = []
# 医師コード
if is_not_empty(parameter.dcf_pcf_dr_cd):
# 必ず部分一致で検索
parameter.dcf_pcf_dr_cd = f'%{parameter.dcf_pcf_dr_cd}%'
where_clauses.append(SQLCondition('com_dr.dcf_pcf_dr_cd', condition.LIKE, 'dcf_pcf_dr_cd'))
# 氏名(漢字)
if is_not_empty(parameter.dr_name):
# 必ず部分一致で検索
parameter.dr_name = f'%{parameter.dr_name}%'
where_clauses.append(SQLCondition('dr_name', condition.LIKE, 'dr_name'))
# 氏名(かな・カナ)
if is_not_empty(parameter.dr_name_kana):
# 必ず部分一致で検索
# ひらがなを全角カタカナへ変換
zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n)
for n in [ord(c) for c in parameter.dr_name_kana]])
# 全角カタカナを半角カタカナへ変換
hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana)
parameter.dr_name_kana = f'%{hankaku_katakana}%'
where_clauses.append(SQLCondition('dr_name_kana', condition.LIKE, 'dr_name_kana'))
# 勤務先コード
if is_not_empty(parameter.dcf_dsf_inst_cd):
# 必ず部分一致で検索
parameter.dcf_dsf_inst_cd = f'%{parameter.dcf_dsf_inst_cd}%'
where_clauses.append(SQLCondition(
'com_inst.dcf_dsf_inst_cd', condition.LIKE, 'dcf_dsf_inst_cd'))
# 勤務先名(漢字)
if is_not_empty(parameter.form_inst_name_kanji):
# 必ず部分一致で検索
parameter.form_inst_name_kanji = f'%{parameter.form_inst_name_kanji}%'
where_clauses.append(SQLCondition(
'form_inst_name_kanji', condition.LIKE, 'form_inst_name_kanji'))
# 勤務先名(かな・カナ)
if is_not_empty(parameter.form_inst_name_kana):
# 必ず部分一致で検索
# ひらがなを全角カタカナへ変換
zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n)
for n in [ord(c) for c in parameter.form_inst_name_kana]])
# 全角カタカナを半角カタカナへ変換
hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana)
parameter.form_inst_name_kana = f'%{hankaku_katakana}%'
where_clauses.append(SQLCondition(
'form_inst_name_kana', condition.LIKE, 'form_inst_name_kana'))
# 勤務先都道府県
if is_not_empty(parameter.prefc_cd):
where_clauses.append(SQLCondition('com_inst.prefc_cd', condition.EQ, 'prefc_cd'))
# 所属部科(漢字)
if is_not_empty(parameter.blng_sec_name):
# 必ず部分一致で検索
parameter.blng_sec_name = f'%{parameter.blng_sec_name}%'
where_clauses.append(SQLCondition(
'com_blng_sec.blng_sec_name', condition.LIKE, 'blng_sec_name'))
# 診療科目(漢字)
if is_not_empty(parameter.trt_course_name):
# 必ず部分一致で検索
parameter.trt_course_name = f'%{parameter.trt_course_name}%'
where_clauses.append(SQLCondition('trt_course_name', condition.LIKE, 'trt_course_name'))
# 出身大学(漢字)
if is_not_empty(parameter.alma):
# 必ず部分一致で検索
parameter.alma = f'%{parameter.alma}%'
where_clauses.append(SQLCondition('alma', condition.LIKE, 'alma'))
# 卒年
if is_not_empty(parameter.grad_y):
# 必ず部分一致で検索
parameter.grad_y = f'%{parameter.grad_y}%'
where_clauses.append(SQLCondition('grad_y', condition.LIKE, 'grad_y'))
# 廃業除外
if where_clauses:
where_clauses.append(SQLCondition(
'', '', '(length(com_inst.abolish_ymd) = 0 OR com_inst.abolish_ymd IS NULL)', literal=True))
where_clauses.append(SQLCondition(
'', '', '(length(com_dr.abolish_ymd) = 0 OR com_dr.abolish_ymd IS NULL)', literal=True))
where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses])
return where_clauses_str
FETCH_ONE_SQL = """\
SELECT
com_dr.dcf_pcf_dr_cd,
com_dr.dr_name,
com_dr.dr_name_kana,
com_sex.sex,
com_dr.birthday,
com_alma.alma,
com_hometown.hometown,
com_dr.grad_y,
com_dr.drday_y,
com_dr.estab_y
FROM src05.com_dr
LEFT JOIN src05.com_sex ON com_dr.sex_cd = com_sex.sex_cd
LEFT JOIN src05.com_alma ON com_dr.alma_cd = com_alma.alma_cd
LEFT JOIN src05.com_hometown ON com_dr.hometown_cd = com_hometown.hometown_cd
WHERE dcf_pcf_dr_cd = :id
"""
def fetch_one(self, id) -> UltmarcDoctorInfoDBModel:
try:
self._database.connect()
query = self.FETCH_ONE_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcDoctorInfoDBModel(**r) for r in result]
if len(models) == 0:
return None
return models[0]
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,42 @@
from src.model.db.ultmarc_doctor_wrkplace_his import \
UltmarcDoctorWrkplaceHisDBModel
from src.repositories.base_repository import BaseRepository
class UltmarcDoctorWrkplaceHisRepository(BaseRepository):
FETCH_SQL = """\
SELECT
com_inst.dcf_dsf_inst_cd,
com_inst.inst_name_kanji,
com_blng_sec.blng_sec_name,
univ_post.form_post_name as univ_post_name,
post.form_post_name as post_name,
com_dr_wrkplace_his.aply_start_ymd,
com_dr_wrkplace_his.aply_end_ymd
FROM com_dr
LEFT JOIN com_dr_wrkplace_his ON com_dr.dcf_pcf_dr_cd = com_dr_wrkplace_his.dcf_pcf_dr_cd
LEFT JOIN com_inst ON com_dr_wrkplace_his.dcf_dsf_inst_cd = com_inst.dcf_dsf_inst_cd
LEFT JOIN com_blng_sec ON com_dr_wrkplace_his.blng_sec_cd = com_blng_sec.blng_sec_cd
LEFT JOIN com_post as univ_post ON com_dr_wrkplace_his.identity_cd = univ_post.post_cd
LEFT JOIN com_post as post ON com_dr_wrkplace_his.post_cd = post.post_cd
WHERE com_dr.dcf_pcf_dr_cd = :id
ORDER BY com_dr_wrkplace_his.aply_end_ymd DESC,
com_dr_wrkplace_his.aply_start_ymd DESC
"""
def fetch_many(self, id) -> list[UltmarcDoctorWrkplaceHisDBModel]:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcDoctorWrkplaceHisDBModel(**r) for r in result]
if len(models) == 0:
return None
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,63 @@
from src.model.db.ultmarc_doctor_wrkplace import UltmarcDoctorWrkplaceDBModel
from src.model.db.ultmarc_doctor_wrkplace_count import \
UltmarcDoctorWrkplaceCountDBModel
from src.repositories.base_repository import BaseRepository
class UltmarcDoctorWrkplaceRepository(BaseRepository):
FETCH_SQL = """\
SELECT
com_inst.dcf_dsf_inst_cd,
com_inst.inst_name_kanji,
com_blng_sec.blng_sec_name,
univ_post.form_post_name AS univ_post_name,
post.form_post_name AS post_name,
com_dr_wrkplace.aply_start_ymd
FROM src05.com_dr
LEFT JOIN src05.com_dr_wrkplace ON com_dr.dcf_pcf_dr_cd = com_dr_wrkplace.dcf_pcf_dr_cd
LEFT JOIN src05.com_inst ON com_dr_wrkplace.dcf_dsf_inst_cd = com_inst.dcf_dsf_inst_cd
LEFT JOIN src05.com_blng_sec ON com_dr_wrkplace.blng_sec_cd = com_blng_sec.blng_sec_cd
LEFT JOIN src05.com_post as univ_post ON com_dr_wrkplace.identity_cd = univ_post.post_cd
LEFT JOIN src05.com_post as post ON com_dr_wrkplace.post_cd = post.post_cd
WHERE com_dr.dcf_pcf_dr_cd = :id
ORDER BY com_dr_wrkplace.aply_start_ymd DESC
"""
def fetch_many(self, id) -> list[UltmarcDoctorWrkplaceDBModel]:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcDoctorWrkplaceDBModel(**r) for r in result]
if len(models) == 0:
return None
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
FETCH_COUNT_SQL = """\
SELECT COUNT(*) AS count
FROM src05.com_dr_wrkplace
WHERE dcf_dsf_inst_cd = :id
"""
def fetch_count(self, id) -> UltmarcDoctorWrkplaceCountDBModel:
try:
self._database.connect()
query = self.FETCH_COUNT_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcDoctorWrkplaceCountDBModel(**r) for r in result]
if len(models) == 0:
return 0
return models[0].count
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,194 @@
from src.db import sql_condition as condition
from src.db.sql_condition import SQLCondition
from src.model.db.ultmarc_inst import UltmarcInstDBModel
from src.model.db.ultmarc_inst_info import UltmarcInstInfoDBModel
from src.model.request.ultmarc_inst import UltmarcInstSearchModel
from src.repositories.base_repository import BaseRepository
from src.util.string_util import is_not_empty
import mojimoji
class UltmarcInstRepository(BaseRepository):
FETCH_SQL = """\
SELECT
dcf_dsf_inst_cd,
abolish_ymd,
delete_sche_reason_cd,
form_inst_name_kanji,
inst_addr,
postal_number,
inst_phone_number,
inst_div_name,
hp_assrt_name,
prefc_name
FROM src05.com_inst
LEFT JOIN src05.mst_prefc ON com_inst.prefc_cd = mst_prefc.prefc_cd
LEFT JOIN src05.com_inst_div ON com_inst.inst_div_cd = com_inst_div.inst_div_cd
LEFT JOIN src05.com_hp_assrt ON com_inst.hp_assrt_cd = com_hp_assrt.hp_assrt_cd
WHERE {where_clause}
ORDER BY dcf_dsf_inst_cd
\
"""
def fetch_many(self, parameter: UltmarcInstSearchModel) -> list[UltmarcInstDBModel]:
try:
self._database.connect()
# 文字列の検索を部分一致にするため、モデルをコピー。以降はこのコピーを使用する。
clone_parameter = UltmarcInstSearchModel(**parameter.dict())
where_clause = self.__build_condition(clone_parameter)
query = self.FETCH_SQL.format(where_clause=where_clause)
result = self._database.execute_select(query, clone_parameter.dict())
models = [UltmarcInstDBModel(**r) for r in result]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
def __build_condition(self, parameter: UltmarcInstSearchModel):
where_clauses: list[SQLCondition] = []
# ULT施設コード
if is_not_empty(parameter.dcf_dsf_inst_cd):
# 部分一致検索
parameter.dcf_dsf_inst_cd = f'%{parameter.dcf_dsf_inst_cd}%'
where_clauses.append(SQLCondition('com_inst.dcf_dsf_inst_cd', condition.LIKE, 'dcf_dsf_inst_cd'))
# 施設区分
if is_not_empty(parameter.inst_div_cd):
where_clauses.append(SQLCondition('com_inst.inst_div_cd', condition.EQ, 'inst_div_cd'))
# ULT施設名(漢字)
if is_not_empty(parameter.form_inst_name_kanji):
# 部分一致検索
parameter.form_inst_name_kanji = f'%{parameter.form_inst_name_kanji}%'
where_clauses.append(SQLCondition('form_inst_name_kanji', condition.LIKE, 'form_inst_name_kanji'))
# ULT施設名(カナ)
if is_not_empty(parameter.form_inst_name_kana):
# 部分一致検索
# ひらがなを全角カタカナへ変換
zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n)
for n in [ord(c) for c in parameter.form_inst_name_kana]])
# 全角カタカナを半角カタカナへ変換
hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana)
parameter.form_inst_name_kana = f'%{hankaku_katakana}%'
where_clauses.append(SQLCondition('form_inst_name_kana', condition.LIKE, 'form_inst_name_kana'))
# 郵便番号
if is_not_empty(parameter.postal_number):
# 前方一致検索
parameter.postal_number = f'{parameter.postal_number}%'
where_clauses.append(SQLCondition('postal_number', condition.LIKE, 'postal_number'))
# 電話番号
if is_not_empty(parameter.inst_phone_number):
# 前方一致検索
parameter.inst_phone_number = f'{parameter.inst_phone_number}%'
where_clauses.append(SQLCondition('inst_phone_number', condition.LIKE, 'inst_phone_number'))
# 勤務先都道府県
if is_not_empty(parameter.prefc_cd):
where_clauses.append(SQLCondition('com_inst.prefc_cd', condition.EQ, 'prefc_cd'))
# ULT施設住所
if is_not_empty(parameter.inst_addr):
# 部分一致検索
parameter.inst_addr = f'%{parameter.inst_addr}%'
where_clauses.append(SQLCondition('inst_addr', condition.LIKE, 'inst_addr'))
# 削除表示フラグ
if is_not_empty(parameter.delFlg) is False:
# 論理和での検索
where_clauses.append(SQLCondition('', '', '(length(abolish_ymd) = 0 OR abolish_ymd IS NULL)', literal=True))
where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses])
return where_clauses_str
FETCH_ONE_SQL = """\
SELECT
com_inst.dcf_dsf_inst_cd,
com_inst.unconf_flg,
com_inst.dup_opp_cd,
com_inst.close_start_ym,
com_inst.close_flg,
com_inst.delete_sche_reason_cd,
com_inst.abolish_ymd,
com_inst.estab_sche_ym,
com_inst.estab_sche_flg,
com_inst.form_inst_name_kana,
com_inst.form_inst_name_kanji,
com_inst.inst_name_kana,
com_inst.inst_name_kanji,
com_inst.manage_cd,
com_inst.postal_number,
com_inst.inst_phone_number,
com_inst.addr_unknown_reason_cd,
com_inst.phone_number_non_flg,
com_inst.inst_addr_kana,
com_inst.inst_addr,
com_inst.re_exam_cd,
com_inst.rltd_univ_prnt_cd,
com_inst.insp_item_micrb,
com_inst.insp_item_serum,
com_inst.insp_item_blood,
com_inst.insp_item_patho,
com_inst.insp_item_paras,
com_inst.insp_item_biochem,
com_inst.insp_item_ri,
com_inst.prmit_bed_num_gen,
com_inst.prmit_bed_num_rcup,
com_inst.prmit_bed_num_mental,
com_inst.prmit_bed_num_infection,
com_inst.prmit_bed_num_tuber,
com_inst.prmit_bed_num_other,
com_inst.prmit_bed_num_sum,
com_inst.ward_abolish_flg,
com_inst.bed_num,
com_inst.prmit_bed_maint_ymd,
com_inst.inst_repre_cd,
com_inst.inst_repre_kana,
com_inst.inst_repre,
com_inst.sys_update_date,
com_inst_delete_sche_reason.delete_sche_reason,
com_inst_div.inst_div_name,
com_manage.manage_name,
com_hp_assrt.hp_assrt_name,
parent_inst.form_inst_name_kanji as parent_name,
com_spcare_med_office_dat.dcf_prnt_inst_cd
FROM src05.com_inst
LEFT JOIN src05.com_inst_div ON com_inst.inst_div_cd = com_inst_div.inst_div_cd
LEFT JOIN src05.com_inst_delete_sche_reason ON
com_inst.delete_sche_reason_cd = com_inst_delete_sche_reason.delete_sche_reason_cd
LEFT JOIN src05.com_manage ON com_inst.manage_cd = com_manage.manage_cd
LEFT JOIN src05.com_inst_addr_unknown_reason ON
com_inst.addr_unknown_reason_cd = com_inst_addr_unknown_reason.addr_unknown_reason_cd
LEFT JOIN src05.com_hp_assrt ON com_hp_assrt.hp_assrt_cd = com_inst.hp_assrt_cd
LEFT JOIN src05.com_re_exam ON com_inst.re_exam_cd = com_re_exam.re_exam_cd
LEFT JOIN src05.com_spcare_med_office_dat ON
com_inst.dcf_dsf_inst_cd = com_spcare_med_office_dat.dcf_chld_inst_cd
LEFT JOIN src05.com_inst as parent_inst ON com_inst.rltd_univ_prnt_cd = parent_inst.dcf_dsf_inst_cd
WHERE com_inst.dcf_dsf_inst_cd = :id
\
"""
def fetch_one(self, id) -> UltmarcInstInfoDBModel:
try:
self._database.connect()
query = self.FETCH_ONE_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcInstInfoDBModel(**r) for r in result]
if len(models) == 0:
return None
return models[0]
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,31 @@
from src.model.db.ultmarc_inst_trt_course import UltmarcInstTrtCourseDBModel
from src.repositories.base_repository import BaseRepository
class UltmarcInstTrtCourseRepository(BaseRepository):
FETCH_SQL = """\
SELECT trt_course_name_abb
FROM src05.com_inst
JOIN src05.com_inst_trt_course ON com_inst.dcf_dsf_inst_cd = com_inst_trt_course.dcf_dsf_inst_cd
LEFT JOIN src05.com_trt_course ON com_inst_trt_course.trt_course_cd = com_trt_course.trt_course_cd
WHERE com_inst.dcf_dsf_inst_cd = :id
ORDER BY com_trt_course.trt_course_cd
"""
def fetch_many(self, id) -> list[UltmarcInstTrtCourseDBModel]:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcInstTrtCourseDBModel(**r) for r in result]
if len(models) == 0:
return None
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,30 @@
from src.model.db.ultmarc_sosiety import UltmarcSosietyDBModel
from src.repositories.base_repository import BaseRepository
class UltmarcSosietyRepository(BaseRepository):
FETCH_SQL = """\
SELECT com_sosiety.sosiety_cd, com_sosiety.sosiety_name
FROM src05.com_dr
LEFT JOIN src05.com_dr_sosiety ON com_dr.dcf_pcf_dr_cd = com_dr_sosiety.dcf_pcf_dr_cd
LEFT JOIN src05.com_sosiety ON com_dr_sosiety.sosiety_cd = com_sosiety.sosiety_cd
WHERE com_dr.dcf_pcf_dr_cd = :id
ORDER BY com_sosiety.sosiety_cd
"""
def fetch_many(self, id) -> list[UltmarcSosietyDBModel]:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcSosietyDBModel(**r) for r in result]
if len(models) == 0:
return None
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,32 @@
from src.model.db.ultmarc_specialist_license import \
UltmarcSpecialistLicenseDBModel
from src.repositories.base_repository import BaseRepository
class UltmarcSpecialistLicenseRepository(BaseRepository):
FETCH_SQL = """\
SELECT
com_specialist_license.specialist_cd, com_specialist_license.specialist_license_name
FROM src05.com_dr
LEFT JOIN src05.com_sp_field ON com_dr.dcf_pcf_dr_cd = com_sp_field.dcf_pcf_dr_cd
LEFT JOIN src05.com_specialist_license ON com_sp_field.specialist_cd = com_specialist_license.specialist_cd
WHERE com_dr.dcf_pcf_dr_cd = :id
ORDER BY com_specialist_license.specialist_cd
"""
def fetch_many(self, id) -> UltmarcSpecialistLicenseDBModel:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcSpecialistLicenseDBModel(**r) for r in result]
if len(models) == 0:
return None
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,31 @@
from src.model.db.ultmarc_trt_course import UltmarcTrtCourseDBModel
from src.repositories.base_repository import BaseRepository
class UltmarcTrtCourseRepository(BaseRepository):
FETCH_SQL = """\
SELECT trt_course_name
FROM src05.com_dr
LEFT JOIN src05.com_dr_trt_course ON com_dr.dcf_pcf_dr_cd = com_dr_trt_course.dcf_pcf_dr_cd
LEFT JOIN src05.com_trt_course ON com_dr_trt_course.trt_course_cd = com_trt_course.trt_course_cd
WHERE com_dr.dcf_pcf_dr_cd = :id
ORDER BY com_trt_course.trt_course_cd
"""
def fetch_many(self, id) -> list[UltmarcTrtCourseDBModel]:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, {'id': id})
models = [UltmarcTrtCourseDBModel(**r) for r in result]
if len(models) == 0:
return None
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -1,7 +1,9 @@
from src.logging.get_logger import get_logger
from src.model.db.user_master import UserMasterModel
from src.model.request.bio import BioModel
from src.repositories.base_repository import BaseRepository
logger = get_logger('ユーザー取得')
class UserMasterRepository(BaseRepository):
FETCH_SQL = """\
@ -23,8 +25,7 @@ class UserMasterRepository(BaseRepository):
return None
return models[0]
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
logger.exception(f"DB Error : Exception={e}")
raise e
finally:
self._database.disconnect()
self._database.disconnect()

View File

@ -1,25 +1,32 @@
from src.logging.get_logger import get_logger
from src.model.db.wholesaler_master import WholesalerMasterModel
from src.repositories.base_repository import BaseRepository
logger = get_logger('卸データ取得')
class WholesalerMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT DISTINCT
b.rec_whs_cd,
b.rec_whs_sub_cd,
v2.nm,
b.whs_nm
v2.name,
b.whs_name
FROM src05.bio_sales b
LEFT OUTER JOIN
(
SELECT sub_no, nm, v_whs_cd, rec_sts_kbn
SELECT
sub_num,
name,
v_whs_cd,
rec_sts_kbn
FROM src05.whs_mst_v
WHERE (SELECT STR_TO_DATE(syor_date, '%Y%m%d') FROM src05.hdke_tbl) BETWEEN start_date AND end_date
) v2
ON b.v_whs_cd = v2.v_whs_cd
ON b.v_whs_cd = v2.v_whs_cd
AND v2.rec_sts_kbn <> '9'
ORDER BY b.rec_whs_cd, b.rec_whs_sub_cd , b.whs_nm DESC
ORDER BY b.rec_whs_cd, b.rec_whs_sub_cd , b.whs_name DESC
"""
def fetch_all(self) -> list[WholesalerMasterModel]:
@ -30,9 +37,7 @@ class WholesalerMasterRepository(BaseRepository):
models = [WholesalerMasterModel(**r) for r in result_data]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] getOroshiData DB Error. ")
print(f"[ERROR] ErrorMessage: {e.args}")
logger.exception(f"DB Error : Exception={e}")
raise e
finally:
self._database.disconnect()

View File

@ -1,4 +1,3 @@
import logging
from typing import Callable
from fastapi import Request, Response
@ -8,10 +7,12 @@ from starlette import status
from src.depends.auth import (check_session_expired, get_current_session,
verify_session)
from src.error.exceptions import UnexpectedException
from src.error.exceptions import DBException, UnexpectedException
from src.logging.get_logger import get_logger
from src.system_var import constants, environment
logger = logging.getLogger('uvicorn')
logger = get_logger('medaca_router')
class MeDaCaRoute(APIRoute):
"""アプリケーションのカスタムルーター
@ -19,6 +20,7 @@ class MeDaCaRoute(APIRoute):
Args:
APIRoute (APIRoute): FastAPIの標準APIRoute
"""
def get_route_handler(self) -> Callable:
"""前後処理を付加するルートハンドラーを返す
@ -34,20 +36,20 @@ class MeDaCaRoute(APIRoute):
# 返却するルートハンドラーを定義。必ず非同期関数にする必要がある。
async def custom_route_handler(request: Request) -> Response:
try:
logger.info('pre routing process')
# 事前処理
request = await self.pre_process_route(request)
# 本来のルーティング処理
logger.info('routing process')
response = await original_route_handler(request)
# 事後処理
logger.info('post routing process')
return await self.post_process_route(request, response)
except HTTPException as e:
raise e
except Exception as e:
logger.exception(e)
raise UnexpectedException(detail=constants.LOGOUT_REASON_UNEXPECTED)
if isinstance(e, DBException):
raise UnexpectedException(detail=constants.LOGOUT_REASON_DB_ERROR)
else:
raise UnexpectedException(detail=constants.LOGOUT_REASON_UNEXPECTED)
return custom_route_handler
async def pre_process_route(self, request: Request) -> Request:
@ -60,6 +62,7 @@ class MeDaCaRoute(APIRoute):
Request: 加工後のRequestインスタンス
"""
return request
async def post_process_route(self, request: Request, response: Response) -> Response:
"""ルートハンドラーの事後処理
@ -71,6 +74,7 @@ class MeDaCaRoute(APIRoute):
"""
return response
class BeforeCheckSessionRoute(MeDaCaRoute):
"""事前処理として、セッションチェックを行うルートハンドラー
@ -86,12 +90,13 @@ class BeforeCheckSessionRoute(MeDaCaRoute):
verified_session = verify_session(checked_session)
# セッションが有効でない場合、エラーにする
if verified_session is None:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED)
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
scope = request.scope
scope['session'] = verified_session
session_request = Request(receive=request.receive, scope=scope)
return session_request
class AfterSetCookieSessionRoute(MeDaCaRoute):
"""事後処理として、セッションキーをcookieに設定するカスタムルートハンドラー
@ -110,16 +115,18 @@ class AfterSetCookieSessionRoute(MeDaCaRoute):
response.set_cookie(
key='session',
value=session_key,
max_age=environment.SESSION_EXPIRE_MINUTE * 60, # cookieの有効期限は秒数指定なので、60秒をかける
max_age=environment.SESSION_EXPIRE_MINUTE * 60, # cookieの有効期限は秒数指定なので、60秒をかける
secure=True,
httponly=True
)
return response
class AuthenticatedRoute(BeforeCheckSessionRoute, AfterSetCookieSessionRoute):
async def pre_process_route(self, request: Request):
request = await super().pre_process_route(request)
return request
async def post_process_route(self, request: Request, response: Response):
response = await super().post_process_route(request, response)
return response

View File

@ -9,5 +9,6 @@ class BaseService(metaclass=ABCMeta):
REPOSITORIES: dict[str, BaseRepository] = {}
# 各サービスが依存するAWS APIクライアントクラスのマップ
CLIENTS: dict[str, AWSAPIClient] = {}
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
pass

View File

@ -4,6 +4,7 @@ from src.model.db.hdke_tbl import HdkeTblModel
from src.repositories.base_repository import BaseRepository
from src.repositories.hdke_tbl_repository import HdkeTblRepository
from src.services.base_service import BaseService
from src.system_var import constants
class BatchStatusService(BaseService):
@ -13,6 +14,7 @@ class BatchStatusService(BaseService):
}
hdke_table_repository: HdkeTblRepository
__hdke_table_record: list[HdkeTblModel] = []
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.hdke_table_repository = repositories['hdke_table_repository']
@ -24,18 +26,30 @@ class BatchStatusService(BaseService):
@property
def hdke_table_record(self) -> HdkeTblModel:
"""日付テーブルを取得する"""
# 日付マスタのレコードがあることを確認
self.__assert_record_exists()
# 日付テーブルのレコードは必ず1件
return self.__hdke_table_record[0]
def is_batch_processing(self):
"""バッチ処理中かどうかを判定する"""
# 日付マスタのレコードがあることを確認
self.__assert_record_exists()
return self.hdke_table_record.bch_actf == '1' # TODO: 定数化する
return self.hdke_table_record.bch_actf == constants.BATCH_STATUS_PROCESSING
def is_dump_processing(self):
"""dump処理処理中かどうかを判定する"""
# 日付マスタのレコードがあることを確認
self.__assert_record_exists()
return self.hdke_table_record.dump_sts_kbn != constants.DUMP_STATUS_UNPROCESSED
def __assert_record_exists(self):
"""日付テーブルが有ることを保証する"""
# 日付マスタのレコードがない場合は例外とする
if len(self.__hdke_table_record) == 0:
raise DBException('日付テーブルのレコードが存在しません')

View File

@ -1,4 +1,4 @@
import os.path as path
import os
import shutil
from datetime import datetime
@ -6,6 +6,7 @@ import pandas as pd
from src.aws.aws_api_client import AWSAPIClient
from src.aws.s3 import S3Client
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.view.bio_disp_model import BisDisplayModel
@ -19,6 +20,8 @@ from src.repositories.wholesaler_master_repository import \
from src.services.base_service import BaseService
from src.system_var import constants, environment
logger = get_logger('生物由来参照')
class BioViewService(BaseService):
REPOSITORIES = {
@ -26,15 +29,16 @@ class BioViewService(BaseService):
'phm_repository': PharmacyProductMasterRepository,
'bio_sales_repository': BioSalesViewRepository
}
CLIENTS = {
's3_client': S3Client
}
whs_repository: WholesalerMasterRepository
phm_repository: PharmacyProductMasterRepository
bio_sales_repository: BioSalesViewRepository
s3_client: S3Client
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.whs_repository = repositories['whs_repository']
@ -45,7 +49,7 @@ class BioViewService(BaseService):
def prepare_bio_view(
self,
session: UserSession
) ->BioViewModel:
) -> BioViewModel:
# 卸リストを取得
wholesalers = self.whs_repository.fetch_all()
# 製品リストを取得
@ -67,12 +71,60 @@ class BioViewService(BaseService):
def search_download_bio_data(self, search_params: BioModel):
# 生物由来データをダウンロードするために、DBから検索した結果をデータフレームに変換
bio_sales_data_frame = self.bio_sales_repository.fetch_as_data_frame(parameter=search_params)
return bio_sales_data_frame
bio_sales_data_frame, query = self.bio_sales_repository.fetch_as_data_frame(parameter=search_params)
return bio_sales_data_frame, query
def write_excel_file(self, data_frame: pd.DataFrame, user_id: str, timestamp: datetime):
def write_access_log(
self,
query: str,
parameters: BioModel,
user_id: str,
timestamp: datetime,
download_file_name: str
):
# アクセスログを書き出し、S3に保管する
access_log_file_name = f'BioAccessLog_{user_id}_{timestamp:%Y%m%d%H%M%S%f}.log'
# アクセスログファイル出力用のロガーを生成
import logging
access_logger = logging.getLogger(access_log_file_name)
level = logging.getLevelName(environment.LOG_LEVEL)
if not isinstance(level, int):
level = logging.INFO
access_logger.setLevel(level)
access_log_file_path = os.path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, access_log_file_name)
if not access_logger.hasHandlers():
access_log_handler = logging.FileHandler(access_log_file_path)
access_logger.addHandler(access_log_handler)
formatter = logging.Formatter(
'[%(levelname)s]\t%(asctime)s\t%(message)s',
'%Y-%m-%d %H:%M:%S'
)
for handler in logger.handlers:
handler.setFormatter(formatter)
# SQL文を出力
sql_message = f'ユーザーID: {user_id} SQL: {query}\t{download_file_name}'
access_logger.info(sql_message)
# 標準出力にも書き出す
logger.info(sql_message)
# 検索パラメータを1行ずつ書き出す
for param_key, param_value in parameters.dict().items():
if param_value is None or len(param_value) == 0:
continue
parameter_message = f'ユーザーID: {user_id} Value: {param_key} = {param_value}\t{download_file_name}'
logger.info(parameter_message)
access_logger.info(parameter_message)
# S3にアップロード
self.upload_bio_access_log_file(access_log_file_path)
def write_excel_file(self, data_frame: pd.DataFrame, user_id: str, download_file_name: str):
# Excelに書き込み
output_file_path = path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, f'Result_{user_id}_{timestamp:%Y%m%d%H%M%S%f}.xlsx')
output_file_path = os.path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, download_file_name)
# テンプレートファイルをコピーして出力ファイルの枠だけを作る
shutil.copyfile(
@ -87,12 +139,12 @@ class BioViewService(BaseService):
# DF内のヘッダと連番を書き込みたくない場合、`header`と`index`をFalseに指定する。
# `startrow`と`startcol`で、Excelの書き込み位置を決定する。省略した場合はA1セルから書く。
data_frame.to_excel(writer, header=False, index=False, startrow=1, startcol=0)
return output_file_path
def write_csv_file(self, data_frame: pd.DataFrame, user_id: str, header: list[str], timestamp: datetime):
def write_csv_file(self, data_frame: pd.DataFrame, user_id: str, header: list[str], download_file_name: str):
# csvに書き込み
output_file_path = path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, f'Result_{user_id}_{timestamp:%Y%m%d%H%M%S%f}.csv')
output_file_path = os.path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, download_file_name)
# 横長のDataFrameとするため、ヘッダーの加工処理
header_data = {}
for df_column, header_column in zip(data_frame.columns, header):
@ -107,13 +159,28 @@ class BioViewService(BaseService):
def upload_bio_data_file(self, local_file_path: str) -> None:
bucket_name = environment.BIO_ACCESS_LOG_BUCKET
# TODO: フォルダを変える
file_key =f'bio/{path.basename(local_file_path)}'
# TODO: ファイルパスにYYYY/MM/DDを加える
file_key = f'data/{os.path.basename(local_file_path)}'
self.s3_client.upload_file(local_file_path, bucket_name, file_key)
def generate_download_file_url(self, local_file_path:str, user_id: str, kind: str) -> str:
# アップロード後、ローカルからは削除する
self.delete_local_file(local_file_path)
def upload_bio_access_log_file(self, local_file_path: str) -> None:
bucket_name = environment.BIO_ACCESS_LOG_BUCKET
# TODO: フォルダを変える
file_key = f'bio/{path.basename(local_file_path)}'
# TODO: ファイルパスにYYYY/MM/DDを加える
file_key = f'log/{os.path.basename(local_file_path)}'
self.s3_client.upload_file(local_file_path, bucket_name, file_key)
# アップロード後、ローカルからは削除する
self.delete_local_file(local_file_path)
def generate_download_file_url(self, local_file_path: str, user_id: str, kind: str) -> str:
bucket_name = environment.BIO_ACCESS_LOG_BUCKET
# TODO: ファイルパスにYYYY/MM/DDを加える
file_key = f'data/{os.path.basename(local_file_path)}'
download_filename = f'{user_id}_生物由来卸販売データ.{kind}'
return self.s3_client.generate_presigned_url(bucket_name, file_key, download_filename)
def delete_local_file(self, local_file_path: str):
os.remove(local_file_path)

View File

@ -17,7 +17,7 @@ class LoginService(BaseService):
REPOSITORIES = {
'user_repository': UserMasterRepository
}
CLIENTS = {
'cognito_client': CognitoClient
}
@ -41,10 +41,10 @@ class LoginService(BaseService):
raise e
return JWTToken(id_token, refresh_token)
def login_with_security_code(self, code: str) -> JWTToken:
return JWTToken.request(code)
def logged_in_user(self, user_id):
user_record: UserMasterModel = self.user_repository.fetch_one({'user_id': user_id})
return user_record

View File

@ -1,15 +1,19 @@
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
logger = get_logger('セッション管理')
def set_session(session: UserSession) -> str:
session.save()
return session.session_key
def get_session(key: str) -> UserSession:
try:
session = UserSession.get(hash_key=key, consistent_read=True)
return session
except UserSession.DoesNotExist as e:
print(e)
logger.debug(f'セッション取得失敗:{e}')
return None

View File

@ -0,0 +1,156 @@
from src.aws.aws_api_client import AWSAPIClient
from src.model.request.ultmarc_doctor import UltmarcDoctorSearchModel
from src.model.request.ultmarc_inst import UltmarcInstSearchModel
from src.model.view.ultmarc_doctor_info_view_model import \
UltmarcDoctorInfoViewModel
from src.model.view.ultmarc_doctor_view_model import UltmarcDoctorViewModel
from src.model.view.ultmarc_inst_info_view_model import \
UltmarcInstInfoViewModel
from src.model.view.ultmarc_inst_view_model import UltmarcInstViewModel
from src.repositories.base_repository import BaseRepository
from src.repositories.inst_master_repository import InstDivMasterRepository
from src.repositories.prefc_master_repository import PrefcMasterRepository
from src.repositories.ultmarc_doctor_repository import UltmarcDoctorRepository
from src.repositories.ultmarc_dr_wrkplace_his_repository import \
UltmarcDoctorWrkplaceHisRepository
from src.repositories.ultmarc_dr_wrkplace_repository import \
UltmarcDoctorWrkplaceRepository
from src.repositories.ultmarc_inst_repository import UltmarcInstRepository
from src.repositories.ultmarc_inst_trt_course_repository import \
UltmarcInstTrtCourseRepository
from src.repositories.ultmarc_sosiety_repository import \
UltmarcSosietyRepository
from src.repositories.ultmarc_specialist_license_repository import \
UltmarcSpecialistLicenseRepository
from src.repositories.ultmarc_trt_course_repository import \
UltmarcTrtCourseRepository
from src.services.base_service import BaseService
class UltmarcViewService(BaseService):
REPOSITORIES = {
'ultmarc_doctor_repository': UltmarcDoctorRepository,
'prefc_repository': PrefcMasterRepository,
'inst_div_repository': InstDivMasterRepository,
'ultmarc_inst_repository': UltmarcInstRepository,
'ultmarc_trt_course_repository': UltmarcTrtCourseRepository,
'ultmarc_inst_trt_course_repository': UltmarcInstTrtCourseRepository,
'ultmarc_sosiety_repository': UltmarcSosietyRepository,
'ultmarc_doctor_wrkplace_repository': UltmarcDoctorWrkplaceRepository,
'ultmarc_doctor_wrkplace_his_repository': UltmarcDoctorWrkplaceHisRepository,
'ultmarc_specialist_license_repository': UltmarcSpecialistLicenseRepository
}
ultmarc_doctor_repository: UltmarcDoctorRepository
prefc_repository: PrefcMasterRepository
inst_div_repository: InstDivMasterRepository
ultmarc_inst_repository: UltmarcInstRepository
ultmarc_trt_course_repository: UltmarcTrtCourseRepository
ultmarc_inst_trt_course_repository: UltmarcInstTrtCourseRepository
ultmarc_sosiety_repository: UltmarcSosietyRepository
ultmarc_doctor_wrkplace_repository: UltmarcDoctorWrkplaceRepository
ultmarc_doctor_wrkplace_his_repository: UltmarcDoctorWrkplaceHisRepository
ultmarc_specialist_license_repository: UltmarcSpecialistLicenseRepository
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.ultmarc_doctor_repository = repositories['ultmarc_doctor_repository']
self.prefc_repository = repositories['prefc_repository']
self.inst_div_repository = repositories['inst_div_repository']
self.ultmarc_inst_repository = repositories['ultmarc_inst_repository']
self.ultmarc_trt_course_repository = repositories['ultmarc_trt_course_repository']
self.ultmarc_inst_trt_course_repository = repositories['ultmarc_inst_trt_course_repository']
self.ultmarc_sosiety_repository = repositories['ultmarc_sosiety_repository']
self.ultmarc_doctor_wrkplace_repository = repositories['ultmarc_doctor_wrkplace_repository']
self.ultmarc_doctor_wrkplace_his_repository = repositories['ultmarc_doctor_wrkplace_his_repository']
self.ultmarc_specialist_license_repository = repositories['ultmarc_specialist_license_repository']
#########################
# アルトマーク医師 #
#########################
def prepare_ultmarc_doctor_search_view(
self
) -> UltmarcDoctorViewModel:
# 医師一覧画面の表示データ取得
# 都道府県リストを取得
prefcs = self.prefc_repository.fetch_all()
ultmarc = UltmarcDoctorViewModel(
prefc_models=prefcs
)
return ultmarc
def search_doctor_data(self, search_params: UltmarcDoctorSearchModel):
# 医師データを検索
ultmarc_doctor_data = self.ultmarc_doctor_repository.fetch_many(parameter=search_params)
return ultmarc_doctor_data
def prepare_ultmarc_doctor_info_view(
self,
id
) -> UltmarcDoctorInfoViewModel:
# 医師情報画面の表示データ取得
# 医師情報を取得
doctor_info = self.ultmarc_doctor_repository.fetch_one(id)
# 診療科目情報を取得
trt_course = self.ultmarc_trt_course_repository.fetch_many(id)
# 所属学会情報を取得
sosiety = self.ultmarc_sosiety_repository.fetch_many(id)
# 所属学会専門医情報を取得
specialist_license = self.ultmarc_specialist_license_repository.fetch_many(id)
# 勤務先情報を取得
wrkplace = self.ultmarc_doctor_wrkplace_repository.fetch_many(id)
# 勤務先履歴情報を取得
wrkplace_his = self.ultmarc_doctor_wrkplace_his_repository.fetch_many(id)
ultmarc = UltmarcDoctorInfoViewModel(
doctor_info_data=doctor_info,
trt_coursed_data=trt_course,
sosiety_data=sosiety,
specialist_license_data=specialist_license,
doctor_wrkplace_data=wrkplace,
doctor_wrkplace_his_data=wrkplace_his
)
return ultmarc
#########################
# アルトマーク施設 #
#########################
def prepare_ultmarc_inst_search_view(
self
) -> UltmarcInstViewModel:
# 施設一覧画面の表示データ取得
# 都道府県リストを取得
prefcs = self.prefc_repository.fetch_all()
# 施設区分リストを取得
inst_div = self.inst_div_repository.fetch_all()
ultmarc = UltmarcInstViewModel(
prefc_models=prefcs,
inst_div_models=inst_div
)
return ultmarc
def search_inst_data(self, search_params: UltmarcInstSearchModel):
# 施設データを検索
ultmarc_inst_data = self.ultmarc_inst_repository.fetch_many(parameter=search_params)
return ultmarc_inst_data
def prepare_ultmarc_inst_info_view(
self,
id
) -> UltmarcInstInfoViewModel:
# 施設情報画面の表示データ取得
# 施設情報を取得
inst_info = self.ultmarc_inst_repository.fetch_one(id)
# 診療科目情報を取得
inst_trt_course = self.ultmarc_inst_trt_course_repository.fetch_many(id)
# 医師件数を取得
doctor_count = self.ultmarc_doctor_wrkplace_repository.fetch_count(id)
ultmarc = UltmarcInstInfoViewModel(
inst_info_data=inst_info,
inst_trt_coursed_data=inst_trt_course,
doctor_wrkplace_count=doctor_count
)
return ultmarc

View File

@ -0,0 +1,164 @@
body{
background-color: LightCyan;
font-family : "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, " Pゴシック", "MS PGothic", sans-serif;
}
h1{
margin-left : 1%;
}
/*ヘッダー*/
.headerTable{
width: 100%;
}
.headerTdLeft{
width: 80%;
}
.headerTdRight{
text-align: right;
padding-right: 2%;
width: 20%;
}
.buttonSize{
width: 85px;
}
/*////////////////////////*/
/*施設担当者データCSVダウンロード*/
/*////////////////////////*/
.searchColumnTd{
width: 14%;
}
.searchTextboxTd{
width: 18%;
}
.searchTable{
margin-left: 3%;
margin-right: 3%;
margin-bottom: 1%;
padding-bottom: 1%;
border-bottom: solid 1px gray;
width: 94%;
}
.searchLabelTd{
text-align: right;
width: 10%;
}
.searchInputTd{
width: 19%;
}
.searchTextbox{
width: 90%;
margin-left: 2.5%;
margin-right: 2.5%;
margin-top: 0.8%;
margin-bottom: 0.8%;
}
.searchDateTextbox{
width: 37%;
margin-left: 2.5%;
margin-right: 2.5%;
margin-top: 0.8%;
margin-bottom: 0.8%;
}
.searchButtonTd{
text-align: right;
padding-top: 1%;
}
.csvOutputMessage{
margin-left: 3%;
}
.errorColor{
color: red;
}
/*//////////////////////////*/
/*施設担当者データExcelアップロード*/
/*//////////////////////////*/
.inputTable{
margin-left: 3%;
margin-right: 3%;
margin-bottom: 1%;
padding-bottom: 1%;
border-bottom: solid 1px gray;
width: 94%;
}
.inputLabelTd{
width: 10%;
}
.inputTd{
width:20%;
}
.inputButtonTd{
width: 50%;
text-align: right;
}
.dataCntDisp{
text-align: right;
margin-right: 3%;
}
table.inputData {
font-family:arial;
background-color: #CDCDCD;
font-size: 12pt;
text-align: left;
white-space: nowrap;
border: 0.1px solid silver;
padding: 4px;
padding-right: 20px;
border-collapse: collapse;
margin-left: 3%;
width: 94%;
}
table.inputData tbody th {
color: #3D3D3D;
padding: 4px;
background-color: #e6EEEE;
border: 0.1px solid silver;
vertical-align: top;
}
table.inputData tbody td {
color: #3D3D3D;
padding: 4px;
background-color: #FFF;
border: 0.1px solid silver;
vertical-align: top;
}
.footerMsg{
margin-left: 3%;
}
/*//////////////////////////*/
/*データ上書きコピー */
/*//////////////////////////*/
.tableOverRide{
margin-right: 3%;
margin-left: 3%;
margin-bottom: 2%;
border-bottom: solid 1px gray;
width: 94%;
}

View File

@ -37,7 +37,7 @@ body{
font-size: 160%;
}
.notUseBioMsg{
.notUseBioMsg,.notUseMainteMsg{
font-size: 143%;
color: red;
}

View File

@ -0,0 +1,671 @@
body {
background-color: LightCyan;
font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, " Pゴシック", "MS PGothic", sans-serif;
}
h1 {
font-size: 150%;
margin-left: 2%;
margin-top: 0%;
margin-bottom: 0%;
}
table{
border-collapse : collapse;
}
.header_bt{
width: 8%;
margin-bottom: 0.8%;
margin-left: 78.5%;
}
.search_textbox{
width: 100%;
}
.search_dropdown{
width: 100%;
}
.search_longtextbox{
width: 100%
}
.instSearchResult {
width: 100%;
}
.scroll_table{
overflow: auto;
white-space: nowrap;
margin-bottom: 2%;
/*スクロール時カラムが動く問題の修正 width: 100%;をコメントアウト*/
width: 100%;
height: 250px;
}
.scroll_table::-webkit-scrollbar {
height: 5px;
width: 10px;
}
.scroll_table::-webkit-scrollbar-track {
border-radius: 5px;
background: #eee;
}
.scroll_table::-webkit-scrollbar-thumb {
border-radius: 5px;
background: #666;
}
.ult_bt {
width: 20%;
height: 80%;
}
.info_bt{
width: 10%
}
.search_bt{
margin-left: 3%;
margin-top: 0.8%;
margin-bottom: 0.8%;
}
.notFind{
margin-top: 5%;
text-align: center;
font-size: 150%;
}
.search_table {
margin-bottom: 1%;
padding-bottom: 1%;
border-bottom: solid 1px gray;
width: 100%;
}
.search_tb {
padding-right: 2%;
padding-top: 0.2%;
padding-bottom: 0.2%;
}
.leftSearch_tb{
width: 35%;
}
.batchMsg{
color: red;
font-size: 120%;
text-align: center;
}
._form {
width: 95%;
margin-left: 3%;
}
.result_info {
text-align: right;
}
/*施設検索一覧ヘッダー*/
.instSearchHeaderTable{
width: 100%;
}
.instSearchHeaderTd{
width: 24%;
}
.instSearchHeaderTdCenter{
text-align: center;
width: 50%;
}
.instSearchHeaderTdRight{
text-align: right;
padding-right: 2%;
}
.instSearchButchMsg{
/* font-size: 80%; */
color: red;
}
.instSearchHeader_bt{
width: 40%;
}
/*施設詳細*/
.instInfoTable{
margin-top: 1%;
margin-left: 5%;
margin-right: 2%;
margin-bottom: 2%;
width: 93%;
}
.instInfoTableHalf1{
margin-top: 1%;
margin-left: 5%;
margin-right: 2%;
width: 93%;
}
.instInfoTableHalf2{
margin-top: -0.05%;
margin-left: 5%;
margin-right: 2%;
margin-bottom: 2%;
width: 93%;
}
.instInfoColumn {
width : 9%;
height: 40px;
background : rgb(225, 233, 250);
border : solid 1px;
}
.instData {
background : rgb(244, 244, 244);
border : solid 1px;
padding-left : 0.5%;
padding-right : 0.5%;
padding-top: 0.25%;
padding-bottom: 0.25%;
}
.instDataMid{
/*NO5修正前 width: 51%;*/
width: 20%;
}
/*NO5にて追加 START*/
.instDataLarge{
width: 85%;
}
.instDataLeft{
width: 20%;
}
.instDataCenter{
width: 7%;
}
.instDataRight{
width: 25%;
}
/*NO5にて追加 END*/
.instDataSmallTextbox{
width: 45%;
}
/*NO5にて追加 START*/
.instDataCenterTextbox{
width: 80%;
}
/*NO5にて追加 END*/
.instInfoTextbox{
width: 98%;
padding-right: 1%;
padding-left: 1%;
}
.instCdTextbox{
/*NO5修正前 width: 13%;*/
width: 35%;
margin-left: 0.5%;
margin-right: 2%;
}
.delReasonCdTextbox{
/*NO5修正前 width: 2%;*/
width: 5%;
margin-left: 0.5%;
margin-right: 1%;
}
.delReasonTextbox{
/*NO5修正前 width: 43%;*/
width: 88%;
margin-left: 0.5%;
margin-right: 2%;
}
.manageTextbox{
width: 40%;
}
.textboxMargin {
margin-left : 0.1%;
}
.transitionBt{
width: 98%;
height: 30px;
}
.instHeaderTable{
margin-left: 40%;
}
.instHeaderTd{
width: 10%;
font-size: 140%;
text-align: center;
padding-right: 2%;
}
.trtCourseTextbox{
width: 6%;
}
.bedTd{
width: 46%;
}
.bedTextbox{
width: 70%;
}
.xSmallTd{
width: 9%;
}
.xSmallTextbox{
width: 75%;
}
.reExamTd{
width: 13%;
}
.repreTd{
width: 50%;
}
.repreTextbox{
width: 95%;
}
.trtTextbox{
width: 5%;
margin-right: 1.2%;
}
.parentCdTextBox{
width: 15%;
}
.parentNameTextBox{
width: 75%;
}
.hpInfoColumn{
width : 12%;
height: 40px;
background : rgb(225, 233, 250);
border : solid 1px;
}
.hpAssrtTd{
width: 12%;
}
.hpAssrtTextbox{
width: 85%;
}
.border_bottom_none {
border-bottom-style:none;
}
.numberBox{
text-align: right;
}
/*医師検索*/
/*ヘッダー*/
.docHeaderTable{
width: 100%;
}
.docHeaderTd{
width: 24%;
}
.docHeaderTdCenter{
text-align: center;
width: 50%;
}
.docHeaderTdRight{
text-align: right;
padding-right: 2%;
}
.docButchMsg{
/* font-size: 80%; */
color: red;
}
.docHeader_bt{
width: 40%;
}
/* アルトマーク課題管理表No.2の修正 8% → 10% */
/* アルトマーク課題管理表No.8の修正 10% → 14% */
.docSearchColumnTd{
width: 14%;
}
.docSearchTextboxTd{
width: 18%;
}
.docSearchTextbox_td{
width: 94%;
}
.docSearchTextbox{
width: 90%;
margin-right: 5%;
margin-top: 0.8%;
margin-bottom: 0.8%;
}
.docSearchTableDivOne{
width: 100%;
}
.docSearchTableDivTwo{
margin-bottom: 1%;
padding-bottom: 1%;
border-bottom: solid 1px gray;
width: 100%;
}
.docSearchScroll{
overflow: auto;
white-space: nowrap;
margin-bottom: 2%;
width: 100%;
height: 270px;
}
.docSearchScroll::-webkit-scrollbar {
height: 5px;
width: 10px;
}
.docSearchScroll::-webkit-scrollbar-track {
border-radius: 5px;
background: #eee;
}
.docSearchScroll::-webkit-scrollbar-thumb {
border-radius: 5px;
background: #666;
}
.allOnOffButton{
width: 6%;
}
/*医師情報*/
.docInfoTable{
margin-top: 1%;
margin-left: 5%;
margin-right: 2%;
margin-bottom: 1%;
width: 93%;
border-bottom: solid 1px gray;
}
.docInfoTd{
padding-bottom: 0.5%;
}
.docInfoTextBox{
margin-left: 0.5%;
margin-right: 2%;
width: 8%;
}
.docInfoTrtTextBox{
margin-left: 0.5%;
}
.docBelongTable{
margin-left: 1%;
width: 98%;
border-bottom: solid 1px gray;
}
.docBelongTd{
width: 49%;
height: 150px;
}
.docSocietyTable{
width: 100%;
}
.scroll{
overflow: auto;
height: 120px;
width: 90%;
margin-left: 7%;
margin-bottom: 4%;
}
.scroll::-webkit-scrollbar {
height: 5px;
width: 10px;
}
.scroll::-webkit-scrollbar-track {
border-radius: 5px;
background: #eee;
}
.scroll::-webkit-scrollbar-thumb {
border-radius: 5px;
background: #666;
}
.rightBoderLine{
border-right: solid 1px gray;
}
.wrkplaceH1{
margin-top: 0.3%;
}
.wrkplaceTable{
width: 100%;
}
/* 生物由来検索、施設検索共通*/
.clear_bt{
margin-left: 120px;
width: 60px
}
.back_bt{
margin-left: 1042px;
width: 80px
}
.noLine{
text-decoration: none;
}
/*アルトマーク施設検索*/
/*共通:アルトマーク施設検索,医師検索,施設詳細*/
.maxWidth_tb {
width: 100%;
}
/*アルトマーク施設検索,医師検索共通*/
.search_btTd {
text-align: right;
}
.selection {
display: none;
}
#page-1 {
display: block;
}
/*医師検索*/
.search_middleTd {
padding-right: 25px;
width : 450px;
}
.docSearchScroll_div {
overflow: auto;
height: 200px;
width: 1132px;
}
/*共通:施設詳細、医師詳細*/
.transition{
text-align: right;
margin-right: 60px;
}
.data_width_middle {
width : 300px;
}
.border_top_none {
border-top-style:none;
}
.textbox_margin_short {
margin-left : 5px;
}
.label_margin {
margin-left: 10px;
margin-right: 10px;
}
/*医師詳細*/
.docInfo_table{
margin-bottom: 30px;
border-bottom: solid 1px gray;
width: 1132px;
}
.small_tb{
width: 100px;
}
.docBelongScroll_div {
overflow: auto;
height: 100px;
width: 500px;
margin: 0px 30px 0px 30px;
}
.rightPadding_table{
padding-right: 50px;
}
.docPlaceScroll_div {
overflow: auto;
height: 150px;
width: 700px;
margin: 0px 30px 0px 30px;
}
.result_tr{
overflow-y: scroll;
overflow-x: scroll;
}
.result_data{
overflow-y: scroll;
overflow-x: scroll;
width: 50px;
}
/* tablesoter */
table.tablesorter {
font-family:arial;
background-color: #CDCDCD;
font-size: 8pt;
text-align: left;
}
table.tablesorter thead tr th, table.tablesorter tfoot tr th {
background-color: #e6EEEE;
border: 0.1px solid silver;
font-size: 8pt;
padding: 4px;
padding-right: 20px;
}
table.tablesorter thead tr .header {
background-image: url(bg.gif);
background-repeat: no-repeat;
background-position: center right;
cursor: pointer;
}
table.tablesorter tbody td {
color: #3D3D3D;
padding: 4px;
background-color: #FFF;
border: 0.1px solid silver;
vertical-align: top;
}
table.tablesorter tbody tr.odd td {
background-color:#F0F0F6;
}
table.tablesorter thead tr .headerSortUp {
background-image: url(asc.gif);
}
table.tablesorter thead tr .headerSortDown {
background-image: url(desc.gif);
}
table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp {
background-color: #8dbdd8;
}

Some files were not shown because too many files have changed in this diff Show More