Merge branch 'develop' into feature-NEWDWH2021-1847

This commit is contained in:
shimoda.m@nds-tyo.co.jp 2025-05-29 15:11:44 +09:00
commit 03df7fda74
77 changed files with 4802 additions and 2497 deletions

2
.vscode/launch.json vendored
View File

@ -9,7 +9,7 @@
"type": "python",
"request": "launch",
// windows\
"program": "ecs\\dataimport\\dataimport\\controller.py",
"program": "ecs/jskult-batch-archive-jsk-data/entrypoint.py",
"console": "integratedTerminal",
"justMyCode": true,
"envFile": "${workspaceFolder}/.env"

View File

@ -0,0 +1,12 @@
tests/*
.coverage
.env
.env.example
.report/*
.vscode/*
.pytest_cache/*
*/__pychache__/*
Dockerfile
pytest.ini
README.md
*.sql

View File

@ -0,0 +1,15 @@
DB_HOST=****************
DB_PORT=****************
DB_USERNAME=****************
DB_PASSWORD=****************
DB_SCHEMA=****************
LOG_LEVEL=INFO
JSKULT_CONFIG_BUCKET=**********************
JSKULT_ARCHIVE_BUCKET=****************
DB_CONNECTION_MAX_RETRY_ATTEMPT=4
DB_CONNECTION_RETRY_INTERVAL_INIT=5
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50

View File

@ -0,0 +1,10 @@
.vscode/settings.json
.env
# python
__pycache__
# python test
.pytest_cache
.coverage
.report/

View File

@ -0,0 +1,16 @@
{
// IntelliSense 使
//
// : https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "(DEBUG)archive",
"type": "python",
"request": "launch",
"program": "entrypoint.py",
"console": "integratedTerminal",
"justMyCode": true
}
]
}

View File

@ -0,0 +1,31 @@
{
"[python]": {
"editor.defaultFormatter": null,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
//
"python.defaultInterpreterPath": "<pythonインタプリターのパス>",
"python.linting.lintOnSave": true,
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"python.linting.flake8Args": [
"--max-line-length=200",
"--ignore=F541"
],
"python.formatting.provider": "autopep8",
"python.formatting.autopep8Path": "autopep8",
"python.formatting.autopep8Args": [
"--max-line-length", "200",
"--ignore=F541"
],
"python.testing.pytestArgs": [
"tests/batch/"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}

View File

@ -0,0 +1,20 @@
FROM python:3.12-slim-bookworm
ENV TZ="Asia/Tokyo"
# pythonの標準出力をバッファリングしないフラグ
ENV PYTHONUNBUFFERED=1
# pythonのバイトコードを生成しないフラグ
ENV PYTHONDONTWRITEBYTECODE=1
WORKDIR /usr/src/app
COPY Pipfile Pipfile.lock ./
RUN \
apt update -y && \
pip install pipenv --no-cache-dir && \
pipenv install --system --deploy && \
pip uninstall -y pipenv virtualenv-clone virtualenv
COPY src ./src
COPY entrypoint.py entrypoint.py
CMD ["python", "entrypoint.py"]

View File

@ -0,0 +1,23 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
boto3 = "*"
PyMySQL = "*"
sqlalchemy = "*"
tenacity = "*"
[dev-packages]
autopep8 = "*"
flake8 = "*"
pytest = "*"
pytest-cov = "*"
boto3 = "*"
[requires]
python_version = "3.12"
[pipenv]
allow_prereleases = true

View File

@ -0,0 +1,447 @@
{
"_meta": {
"hash": {
"sha256": "aa2d1d97600fea225b7d249dae0d065190d00fdadbf85b20773e0c1d9862f5c1"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.12"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"boto3": {
"hashes": [
"sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0",
"sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==1.38.23"
},
"botocore": {
"hashes": [
"sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a",
"sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457"
],
"markers": "python_version >= '3.9'",
"version": "==1.38.23"
},
"greenlet": {
"hashes": [
"sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057",
"sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207",
"sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3",
"sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4",
"sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b",
"sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc",
"sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825",
"sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370",
"sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708",
"sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457",
"sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763",
"sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf",
"sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe",
"sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e",
"sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d",
"sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59",
"sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa",
"sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e",
"sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51",
"sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3",
"sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e",
"sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61",
"sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5",
"sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74",
"sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907",
"sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275",
"sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f",
"sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59",
"sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c",
"sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf",
"sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b",
"sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819",
"sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65",
"sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e",
"sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240",
"sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5",
"sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659",
"sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485",
"sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec",
"sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8",
"sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418",
"sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce",
"sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6",
"sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7",
"sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6",
"sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f",
"sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13",
"sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b",
"sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068",
"sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325",
"sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330",
"sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834",
"sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b",
"sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5",
"sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421"
],
"markers": "python_version >= '3.9'",
"version": "==3.2.2"
},
"jmespath": {
"hashes": [
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
],
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
"pymysql": {
"hashes": [
"sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c",
"sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==1.1.1"
},
"python-dateutil": {
"hashes": [
"sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3",
"sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==2.9.0.post0"
},
"s3transfer": {
"hashes": [
"sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be",
"sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"
],
"markers": "python_version >= '3.9'",
"version": "==0.13.0"
},
"six": {
"hashes": [
"sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274",
"sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==1.17.0"
},
"sqlalchemy": {
"hashes": [
"sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5",
"sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582",
"sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b",
"sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b",
"sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348",
"sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda",
"sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5",
"sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2",
"sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29",
"sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8",
"sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f",
"sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826",
"sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504",
"sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae",
"sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45",
"sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443",
"sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23",
"sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576",
"sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1",
"sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0",
"sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71",
"sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11",
"sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e",
"sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f",
"sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8",
"sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd",
"sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814",
"sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08",
"sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea",
"sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30",
"sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda",
"sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9",
"sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923",
"sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df",
"sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036",
"sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3",
"sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f",
"sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6",
"sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04",
"sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2",
"sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560",
"sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70",
"sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769",
"sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1",
"sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6",
"sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b",
"sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747",
"sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078",
"sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440",
"sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f",
"sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2",
"sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d",
"sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc",
"sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a",
"sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd",
"sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9",
"sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==2.0.41"
},
"tenacity": {
"hashes": [
"sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb",
"sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==9.1.2"
},
"typing-extensions": {
"hashes": [
"sha256:6cd49c8b914bb3869a16ed9d1001e3d0ff1d84fae4838076fe3b361ab8b32b65",
"sha256:90196079d79b4658568e177f50c24c327b73a85e664c0af9f3937e2015b65956"
],
"markers": "python_version >= '3.9'",
"version": "==4.14.0rc1"
},
"urllib3": {
"hashes": [
"sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466",
"sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"
],
"markers": "python_version >= '3.9'",
"version": "==2.4.0"
}
},
"develop": {
"autopep8": {
"hashes": [
"sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758",
"sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.3.2"
},
"boto3": {
"hashes": [
"sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0",
"sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==1.38.23"
},
"botocore": {
"hashes": [
"sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a",
"sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457"
],
"markers": "python_version >= '3.9'",
"version": "==1.38.23"
},
"coverage": {
"extras": [
"toml"
],
"hashes": [
"sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7",
"sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be",
"sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404",
"sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11",
"sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5",
"sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d",
"sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347",
"sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36",
"sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3",
"sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3",
"sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b",
"sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e",
"sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85",
"sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279",
"sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d",
"sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a",
"sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3",
"sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7",
"sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57",
"sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8",
"sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625",
"sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b",
"sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740",
"sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a",
"sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be",
"sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257",
"sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622",
"sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6",
"sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879",
"sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a",
"sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a",
"sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a",
"sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050",
"sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0",
"sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32",
"sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1",
"sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48",
"sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f",
"sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008",
"sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223",
"sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2",
"sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53",
"sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975",
"sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7",
"sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199",
"sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f",
"sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7",
"sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27",
"sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c",
"sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca",
"sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787",
"sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9",
"sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a",
"sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8",
"sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20",
"sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d",
"sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99",
"sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108",
"sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7",
"sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c",
"sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb",
"sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46",
"sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca",
"sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d",
"sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837",
"sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54",
"sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3"
],
"markers": "python_version >= '3.9'",
"version": "==7.8.2"
},
"flake8": {
"hashes": [
"sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343",
"sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==7.2.0"
},
"iniconfig": {
"hashes": [
"sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7",
"sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"
],
"markers": "python_version >= '3.8'",
"version": "==2.1.0"
},
"jmespath": {
"hashes": [
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
],
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
"mccabe": {
"hashes": [
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
],
"markers": "python_version >= '3.6'",
"version": "==0.7.0"
},
"packaging": {
"hashes": [
"sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484",
"sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"
],
"markers": "python_version >= '3.8'",
"version": "==25.0"
},
"pluggy": {
"hashes": [
"sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3",
"sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"
],
"markers": "python_version >= '3.9'",
"version": "==1.6.0"
},
"pycodestyle": {
"hashes": [
"sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9",
"sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"
],
"markers": "python_version >= '3.9'",
"version": "==2.13.0"
},
"pyflakes": {
"hashes": [
"sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a",
"sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b"
],
"markers": "python_version >= '3.9'",
"version": "==3.3.2"
},
"pytest": {
"hashes": [
"sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820",
"sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==8.3.5"
},
"pytest-cov": {
"hashes": [
"sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a",
"sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==6.1.1"
},
"python-dateutil": {
"hashes": [
"sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3",
"sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==2.9.0.post0"
},
"s3transfer": {
"hashes": [
"sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be",
"sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"
],
"markers": "python_version >= '3.9'",
"version": "==0.13.0"
},
"six": {
"hashes": [
"sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274",
"sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==1.17.0"
},
"urllib3": {
"hashes": [
"sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466",
"sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"
],
"markers": "python_version >= '3.9'",
"version": "==2.4.0"
}
}
}

View File

@ -0,0 +1,72 @@
# 実消化過去データアーカイブ処理
## 概要
実消化過去データアーカイブ処理。
## 環境情報
- Python 3.12
- MySQL 8.23
- VSCode
## 環境構築
- Python の構築
- Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照
- 「Pipenv の導入」までを行っておくこと
- 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する
- `pipenv install --dev --python <pyenvでインストールしたpythonバージョン>`
- この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく
- MySQL の環境構築
- Windows の場合、以下のリンクからダウンロードする
- <https://dev.mysql.com/downloads/installer/>
- Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利
- 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると
- `docker-compose up -d`
- Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと
- データを投入する
- 立ち上げたデータベースに「src05」スキーマを作成する
- [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する
- `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql`
- 環境変数の設定
- 「.env.example」ファイルをコピーし、「.env」ファイルを作成する
- 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください
- VSCode の設定
- 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する
- 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する
## 実行
- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。
- 「entrypoint.py」が、バッチ処理のエントリーポイント。
- 実際の処理は、「src/jobctrl_daily.py」で行っている。
## フォルダ構成
```text
.
├── Pipfile -- Pythonモジュールの依存関係を管理するファイル
├── Dockerfile -- Dockerイメージを作成するためのファイル
├── Pipfile -- Pythonモジュールの依存関係を管理するファイル
├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル
├── README.md -- 当ファイル
├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル
└── src -- ソースコードの保管場所
├── aws -- AWS関連処理
│ └── s3.py -- S3クライアントとバケット処理
├── batch -- バッチ処理関連ソース置き場
│ ├── archive_jsk_data.py -- 実消化過去データアーカイブ処理
│ └── jskult_archive_manager.py -- アーカイブ管理テーブル操作処理
├── db
│ └── database.py -- データベース操作共通処理
├── error
│ └── exceptions.py -- カスタム例外
├── logging
│ └── get_logger.py -- ログ出力の共通処理
└── system_var
├── constants.py -- 定数
└── environment.py -- 環境変数

View File

@ -0,0 +1,10 @@
"""実消化過去データアーカイブ処理ののエントリーポイント"""
from src.batch import archive_jsk_data
if __name__ == '__main__':
# try:
exit(archive_jsk_data.exec())
# except Exception:
# エラーが起きても、正常系のコードで返す。
# エラーが起きた事実はbatch_process内でログを出す。
exit(0)

View File

@ -0,0 +1,3 @@
[pytest]
log_format = %(levelname)s %(asctime)s %(message)s
log_date_format = %Y-%m-%d %H:%M:%S

View File

@ -0,0 +1,61 @@
import boto3
from src.system_var import environment
class S3Client:
__s3_client = boto3.client('s3')
_bucket_name: str
def list_objects(self, bucket_name: str, folder_name: str):
response = self.__s3_client.list_objects_v2(
Bucket=bucket_name, Prefix=folder_name)
if response['KeyCount'] == 0:
return []
contents = response['Contents']
# 末尾がスラッシュで終わるものはフォルダとみなしてスキップする
objects = [{'filename': content['Key'], 'size': content['Size']}
for content in contents if not content['Key'].endswith('/')]
return objects
def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None:
copy_source = {'Bucket': src_bucket, 'Key': src_key}
self.__s3_client.copy(copy_source, dest_bucket, dest_key)
return
def download_file(self, bucket_name: str, file_key: str, file):
self.__s3_client.download_fileobj(
Bucket=bucket_name,
Key=file_key,
Fileobj=file
)
return
def upload_file(self, local_file_path: str, bucket_name: str, file_key: str):
self.__s3_client.upload_file(
local_file_path,
Bucket=bucket_name,
Key=file_key
)
def delete_file(self, bucket_name: str, file_key: str):
self.__s3_client.delete_object(
Bucket=bucket_name,
Key=file_key
)
class S3Bucket():
_s3_client = S3Client()
_bucket_name: str = None
class JskultArchiveBucket(S3Bucket):
_bucket_name = environment.JSKULT_ARCHIVE_BUCKET
def upload_archive_zip_file(self, archive_zip: str, archive_zip_path: str, send_folder: str):
# S3バケットにファイルを移動
archive_zip_name = f'{send_folder}/{archive_zip}'
s3_client = S3Client()
s3_client.upload_file(
archive_zip_path, self._bucket_name, archive_zip_name)
return f"{self._bucket_name}/{archive_zip_name}"

View File

@ -0,0 +1,75 @@
import csv
import os.path as path
import tempfile
import zipfile
from datetime import timedelta
from src.aws.s3 import JskultArchiveBucket
from src.batch.jskult_archive_manager import JskultArchiveManager
from src.logging.get_logger import get_logger
logger = get_logger("実消化_過去データアーカイブ処理")
def exec():
"""実消化_過去データアーカイブ処理"""
try:
logger.info("処理開始:実消化_過去データアーカイブ処理")
jskult_archive_manager = JskultArchiveManager()
# アーカイブ管理テーブルから対象テーブル、条件項目、条件年月、実行間隔(月)、前回条件年月、保存先を取得
jskult_archive_manage_data_list = jskult_archive_manager.get_archive_manage()
# 取得したレコード分繰り返す
for jskult_archive_manage_data in jskult_archive_manage_data_list:
# 対象テーブルで条件項目が条件年月以前のデータを取得
archive_data = jskult_archive_manager.get_archive_data(
jskult_archive_manage_data["target_table"], jskult_archive_manage_data["filter_column"], jskult_archive_manage_data["filter_date"])
# 取得データが0件の場合、スキップする
if not archive_data:
logger.info(
f"アーカイブ対象データがありませんでした。対象テーブル:{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}")
continue
# 一時フォルダ作成
with tempfile.TemporaryDirectory() as temporary_dir:
# 取得したデータをCSVに出力
day_after_prev_filter_date = jskult_archive_manage_data["prev_filter_date"] + timedelta(
days=1)
file_name = f'{jskult_archive_manage_data["target_table"]}_{day_after_prev_filter_date.strftime('%Y%m%d')}_{jskult_archive_manage_data["filter_date"].strftime('%Y%m%d')}'
csv_file_path = path.join(temporary_dir, f"{file_name}.csv")
headers = archive_data[0].keys()
with open(csv_file_path, 'w', newline='') as file:
writer = csv.DictWriter(
file, fieldnames=headers, quoting=csv.QUOTE_ALL)
writer.writeheader()
writer.writerows(archive_data)
logger.info(f"CSVファイル作成に成功しました。{file_name}.csv")
# 作成したCSVをzip形式に圧縮
zip_file_path = path.join(temporary_dir, f"{file_name}.zip")
with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
zipf.write(csv_file_path)
logger.info(f"zip形式への圧縮に成功しました。{file_name}.zip")
# 圧縮したCSVを保存先へアップロード
archive_bucket = JskultArchiveBucket()
upload_file_path = archive_bucket.upload_archive_zip_file(
f"{file_name}.zip", zip_file_path, jskult_archive_manage_data["archive_storage"])
logger.info(f"{upload_file_path}へのアップロードに成功しました。")
# アーカイブしたデータをDBから削除
jskult_archive_manager.delete_archive_data(
jskult_archive_manage_data["target_table"],
jskult_archive_manage_data["filter_column"],
jskult_archive_manage_data["filter_date"])
logger.info(
f"アーカイブしたデータのDBから削除に成功しました。対象テーブル{jskult_archive_manage_data['target_table']} 条件年月:{jskult_archive_manage_data['filter_date']}")
# 次回に向けてアーカイブ管理テーブルを更新する
jskult_archive_manager.update_archive_manage(
jskult_archive_manage_data["target_table"])
logger.info(
f"アーカイブ管理テーブルの更新に成功しました。対象テーブル:{jskult_archive_manage_data['target_table']}")
logger.info("処理終了:実消化_過去データアーカイブ処理")
except Exception as e:
logger.exception(f"異常終了:実消化_過去データアーカイブ処理 {e}")

View File

@ -0,0 +1,112 @@
from src.db.database import Database
from src.logging.get_logger import get_logger
logger = get_logger("アーカイブ管理テーブル操作")
class JskultArchiveManager:
_db: Database = None
def __init__(self):
self._db = Database.get_instance()
def get_archive_manage(self):
"""対象テーブル、対象項目、対象年月、実行間隔(月)、前回対象年月、保存先を取得"""
try:
logger.info("処理開始get_archive_manage")
sql = """
select
target_table
, filter_column
, filter_date
, run_interval_months
, prev_filter_date
, archive_storage
from
internal07.jskult_archive_manage;
"""
self._db.connect()
jskult_archive_manage_data = self._db.execute_select(sql)
logger.info("処理終了get_archive_manage")
return jskult_archive_manage_data
except Exception as e:
logger.info("異常終了get_archive_manage")
raise
finally:
self._db.disconnect()
def get_archive_data(self,target_table:str, filter_column:str, filter_date:str):
"""アーカイブするデータを取得"""
try:
logger.info("処理開始get_archive_data")
sql = f"""
select
*
from
src07.{target_table}
where
str_to_date({filter_column},'%Y%m%d') <= :filter_date;
"""
self._db.connect()
parameter_dict = {'filter_date' : filter_date}
target_table_data = self._db.execute_select(sql, parameter_dict)
logger.info("処理終了get_archive_data")
return target_table_data
except Exception as e:
logger.info("異常終了get_archive_data")
raise
finally:
self._db.disconnect()
def delete_archive_data(self, target_table:str, filter_column:str, filter_date:str):
"""アーカイブしたデータを削除"""
try:
logger.info("処理開始delete_archive_data")
sql = f"""
delete from
src07.{target_table}
where
str_to_date({filter_column},'%Y%m%d') <= :filter_date;
"""
self._db.connect()
self._db.begin()
parameter_dict = {'filter_date' : filter_date}
self._db.execute(sql, parameter_dict)
self._db.commit()
logger.info("処理終了delete_archive_data")
return
except Exception as e:
self._db.rollback()
logger.info("異常終了delete_archive_data")
raise
finally:
self._db.disconnect()
def update_archive_manage(self, target_table:str):
"""アーカイブ管理テーブルの指定した対象テーブルのレコードを更新する"""
try:
logger.info("処理開始update_archive_manage")
sql = f"""
update internal07.jskult_archive_manage
set
prev_filter_date = filter_date
, filter_date = LAST_DAY(
DATE_ADD(filter_date, INTERVAL run_interval_months MONTH)
)
, upd_user = CURRENT_USER()
, upd_date = NOW()
where
target_table = '{target_table}';
"""
self._db.connect()
self._db.begin()
self._db.execute(sql)
self._db.commit()
logger.info("処理終了update_archive_manage")
return
except Exception as e:
self._db.rollback()
logger.info("異常終了update_archive_manage")
raise
finally:
self._db.disconnect()

View File

@ -0,0 +1,198 @@
from sqlalchemy import (Connection, CursorResult, Engine, QueuePool,
create_engine, text)
from sqlalchemy.engine.url import URL
from src.error.exceptions import DBException
from src.logging.get_logger import get_logger
from src.system_var import environment
from tenacity import retry, stop_after_attempt, wait_exponential
logger = get_logger(__name__)
class Database:
"""データベース操作クラス"""
__connection: Connection = None
__transactional_engine: Engine = None
__autocommit_engine: Engine = None
__host: str = None
__port: str = None
__username: str = None
__password: str = None
__schema: str = None
__autocommit: bool = None
__connection_string: str = None
def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None:
"""このクラスの新たなインスタンスを初期化します
Args:
username (str): DBユーザー名
password (str): DBパスワード
host (str): DBホスト名
port (int): DBポート
schema (str): DBスキーマ名
autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合トランザクションの有無に限らず即座にコミットされる). Defaults to False.
"""
self.__username = username
self.__password = password
self.__host = host
self.__port = int(port)
self.__schema = schema
self.__autocommit = autocommit
self.__connection_string = URL.create(
drivername='mysql+pymysql',
username=self.__username,
password=self.__password,
host=self.__host,
port=self.__port,
database=self.__schema,
query={"charset": "utf8mb4", "local_infile": "1"},
)
self.__transactional_engine = create_engine(
self.__connection_string,
pool_timeout=5,
poolclass=QueuePool
)
self.__autocommit_engine = self.__transactional_engine.execution_options(
isolation_level='AUTOCOMMIT')
@classmethod
def get_instance(cls, autocommit=False):
"""インスタンスを取得します
Args:
autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合トランザクションの有無に限らず即座にコミットされる). Defaults to False.
Returns:
Database: DB操作クラスインスタンス
"""
return cls(
username=environment.DB_USERNAME,
password=environment.DB_PASSWORD,
host=environment.DB_HOST,
port=environment.DB_PORT,
schema=environment.DB_SCHEMA,
autocommit=autocommit
)
@retry(
wait=wait_exponential(
multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT,
min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS,
max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS
),
stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT),
retry_error_cls=DBException
)
def connect(self):
"""
DBに接続します接続に失敗した場合リトライします\n
インスタンスのautocommitがTrueの場合自動コミットモードで接続する明示的なトランザクションも無視される
Raises:
DBException: 接続失敗
"""
try:
self.__connection = (
self.__autocommit_engine.connect() if self.__autocommit is True
else self.__transactional_engine.connect())
except Exception as e:
raise DBException(e)
def execute_select(self, select_query: str, parameters=None) -> list[dict]:
"""SELECTクエリを実行します。
Args:
select_query (str): SELECT文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
list[dict]: カラム名: 値の辞書リスト
"""
if self.__connection is None:
raise DBException('DBに接続していません')
result = None
try:
# トランザクションが開始している場合は、トランザクションを引き継ぐ
if self.__connection.in_transaction():
result = self.__connection.execute(
text(select_query), parameters)
else:
# トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。
result = self.__execute_with_transaction(
select_query, parameters)
except Exception as e:
raise DBException(f'SQL Error: {e}')
result_rows = result.mappings().all()
return result_rows
def execute(self, query: str, parameters=None) -> CursorResult:
"""SQLクエリを実行します。
Args:
query (str): SQL文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
CursorResult: 取得結果
"""
if self.__connection is None:
raise DBException('DBに接続していません')
result = None
try:
# トランザクションが開始している場合は、トランザクションを引き継ぐ
if self.__connection.in_transaction():
result = self.__connection.execute(text(query), parameters)
else:
# トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。
result = self.__execute_with_transaction(query, parameters)
except Exception as e:
raise DBException(f'SQL Error: {e}')
return result
def begin(self):
"""トランザクションを開始します。"""
if not self.__connection.in_transaction():
self.__connection.begin()
def commit(self):
"""トランザクションをコミットします"""
if self.__connection.in_transaction():
self.__connection.commit()
def rollback(self):
"""トランザクションをロールバックします"""
if self.__connection.in_transaction():
self.__connection.rollback()
def disconnect(self):
"""DB接続を切断します。"""
if self.__connection is not None:
self.__connection.close()
self.__connection = None
def to_jst(self):
self.execute('SET time_zone = "+9:00"')
def __execute_with_transaction(self, query: str, parameters: dict):
# トランザクションを開始してクエリを実行する
with self.__connection.begin():
try:
result = self.__connection.execute(
text(query), parameters=parameters)
except Exception as e:
self.__connection.rollback()
raise e
# ここでコミットされる
return result

View File

@ -0,0 +1,6 @@
class MeDaCaException(Exception):
pass
class DBException(MeDaCaException):
pass

View File

@ -0,0 +1,37 @@
import logging
from src.system_var.environment import LOG_LEVEL
# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する
for name in ["boto3", "botocore", "s3transfer", "urllib3"]:
logging.getLogger(name).setLevel(logging.WARNING)
def get_logger(log_name: str) -> logging.Logger:
"""一意のログ出力モジュールを取得します。
Args:
log_name (str): ロガー名
Returns:
_type_: _description_
"""
logger = logging.getLogger(log_name)
level = logging.getLevelName(LOG_LEVEL)
if not isinstance(level, int):
level = logging.INFO
logger.setLevel(level)
if not logger.hasHandlers():
handler = logging.StreamHandler()
logger.addHandler(handler)
formatter = logging.Formatter(
'%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s',
'%Y-%m-%d %H:%M:%S'
)
for handler in logger.handlers:
handler.setFormatter(formatter)
return logger

View File

@ -0,0 +1,2 @@
# バッチ正常終了コード
BATCH_EXIT_CODE_SUCCESS = 0

View File

@ -0,0 +1,22 @@
import os
# Database
DB_HOST = os.environ['DB_HOST']
DB_PORT = int(os.environ['DB_PORT'])
DB_USERNAME = os.environ['DB_USERNAME']
DB_PASSWORD = os.environ['DB_PASSWORD']
DB_SCHEMA = os.environ['DB_SCHEMA']
# AWS
JSKULT_ARCHIVE_BUCKET = os.environ['JSKULT_ARCHIVE_BUCKET']
# 初期値がある環境変数
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')
DB_CONNECTION_MAX_RETRY_ATTEMPT = int(
os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4))
DB_CONNECTION_RETRY_INTERVAL_INIT = int(
os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(
os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(
os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))

View File

@ -1,29 +1,24 @@
DB_HOST=************
DB_PORT=************
DB_USERNAME=************
DB_PASSWORD=************
DB_HOST******************
DB_PORT=*****************
DB_USERNAME=*************
DB_PASSWORD=*************
DB_SCHEMA=src05
JSK_IO_BUCKET=mbj-newdwh2021-staging-jskult-io
JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult
BATCH_MANAGE_DYNAMODB_TABLE_NAME=mbj-newdwh2021-staging-jskult-batch-run-manage
BATCH_EXECUTION_ID=localtest
MAX_RUN_COUNT=3
LOG_LEVEL=INFO
# 処理名: 起動する処理に応じて変更する
PROCESS_NAME=*************
BATCH_MANAGE_DYNAMODB_TABLE_NAME=****************
ULTMARC_DATA_BUCKET=****************
ULTMARC_DATA_FOLDER=recv
JSKULT_BACKUP_BUCKET=****************
ULTMARC_BACKUP_FOLDER=ultmarc
VJSK_BACKUP_FOLDER=vjsk
JSKULT_CONFIG_BUCKET=**********************
JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar
JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt
VJSK_DATA_SEND_FOLDER=send
VJSK_DATA_RECEIVE_FOLDER=recv
VJSK_DATA_BUCKET=*************
JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt
JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert
JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json
# 連携データ抽出期間
SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0
# 洗替対象テーブル名
SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau
# 卸実績洗替で作成するデータの期間(年単位)
SALES_LAUNDERING_TARGET_YEAR_OFFSET=5
PROCESS_NAME=jskult-batch-dcf-inst-merge-io
JSK_DATA_SEND_FOLDER=send
JSK_BACKUP_FOLDER=jsk/send
TRANSFER_RESULT_FOLDER=transfer_result
TRANSFER_RESULT_FILE_NAME=transfer_result.json
DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv
JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config
# DB接続リトライ設定
DB_CONNECTION_MAX_RETRY_ATTEMPT=1
DB_CONNECTION_RETRY_INTERVAL_INIT=1
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=1

View File

@ -10,7 +10,8 @@
"request": "launch",
"program": "entrypoint.py",
"console": "integratedTerminal",
"justMyCode": true
"justMyCode": true,
"envFile": "${workspaceFolder}/.env"
}
]
}

View File

@ -1,11 +1,7 @@
import gzip
import os
import os.path as path
import shutil
import tempfile
import boto3
from src.system_var import environment
@ -14,7 +10,8 @@ class S3Client:
_bucket_name: str
def list_objects(self, bucket_name: str, folder_name: str):
response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name)
response = self.__s3_client.list_objects_v2(
Bucket=bucket_name, Prefix=folder_name)
if response['KeyCount'] == 0:
return []
contents = response['Contents']
@ -55,61 +52,45 @@ class S3Bucket():
_bucket_name: str = None
class UltmarcBucket(S3Bucket):
_bucket_name = environment.ULTMARC_DATA_BUCKET
_folder = environment.ULTMARC_DATA_FOLDER
def list_dat_file(self):
return self._s3_client.list_objects(self._bucket_name, self._folder)
def download_dat_file(self, dat_filename: str):
# 一時ファイルとして保存する
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}')
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, dat_filename, f)
f.seek(0)
return temporary_file_path
def backup_dat_file(self, dat_file_key: str, datetime_key: str):
# バックアップバケットにコピー
ultmarc_backup_bucket = UltmarcBackupBucket()
backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}'
self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key)
# コピー元のファイルを削除
self._s3_client.delete_file(self._bucket_name, dat_file_key)
class ConfigBucket(S3Bucket):
# TODO 日付更新処理で内容の修正を行う
_bucket_name = environment.JSKULT_CONFIG_BUCKET
def download_holiday_list(self):
# 一時ファイルとして保存する
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME)
temporary_file_path = path.join(
temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME)
holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}'
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, holiday_list_key, f)
self._s3_client.download_file(
self._bucket_name, holiday_list_key, f)
f.seek(0)
return temporary_file_path
def download_wholesaler_stock_input_day_list(self):
# 一時ファイルとして保存する
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME)
wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}'
temporary_file_path = path.join(
temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME)
wholesaler_stock_input_day_list_key = \
f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}'
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f)
self._s3_client.download_file(
self._bucket_name, wholesaler_stock_input_day_list_key, f)
f.seek(0)
return temporary_file_path
def download_ultmarc_hex_convert_config(self):
# 一時ファイルとして保存する
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME)
temporary_file_path = path.join(
temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME)
hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}'
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f)
self._s3_client.download_file(
self._bucket_name, hex_convert_config_key, f)
f.seek(0)
return temporary_file_path
@ -118,68 +99,42 @@ class JskUltBackupBucket(S3Bucket):
_bucket_name = environment.JSKULT_BACKUP_BUCKET
class UltmarcBackupBucket(JskUltBackupBucket):
_folder = environment.ULTMARC_BACKUP_FOLDER
class JskBackupBucket(JskUltBackupBucket):
_folder = environment.JSKULT_BACKUP_BUCKET
class VjskBackupBucket(JskUltBackupBucket):
_folder = environment.VJSK_BACKUP_FOLDER
class JskTransferListBucket(JskUltBackupBucket):
_folder = environment.TRANSFER_RESULT_FOLDER
class VjskReceiveBucket(S3Bucket):
_bucket_name = environment.VJSK_DATA_BUCKET
_recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER
_s3_file_list = None
def get_s3_file_list(self):
self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder)
return self._s3_file_list
def download_data_file(self, data_filename: str):
def download_transfer_result_file(self, process_date_yyyymmdd: str):
file_name = environment.TRANSFER_RESULT_FILE_NAME
# 一時ファイルとして保存する
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}')
temporary_file_path = path.join(
temporary_dir, file_name)
holiday_list_key = f'{self._folder}/{process_date_yyyymmdd}/{file_name}'
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, data_filename, f)
self._s3_client.download_file(
self._bucket_name, holiday_list_key, f)
f.seek(0)
return temporary_file_path
def unzip_data_file(self, filename: str):
temp_dir = os.path.dirname(filename)
decompress_filename = os.path.basename(filename).replace('.gz', '')
decompress_file_path = os.path.join(temp_dir, decompress_filename)
with gzip.open(filename, 'rb') as gz:
with open(decompress_file_path, 'wb') as decompressed_file:
shutil.copyfileobj(gz, decompressed_file)
ret = [decompress_file_path]
return ret
class JskSendBucket(S3Bucket):
_bucket_name = environment.JSK_IO_BUCKET
_send_folder = environment.JSK_DATA_SEND_FOLDER
def backup_dat_file(self, target_files: list, datetime_key: str):
jskult_backup_bucket = VjskBackupBucket()
for target_file in target_files:
backup_from_file_path = target_file.get("filename")
backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "")
backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}'
self._s3_client.copy(self._bucket_name, backup_from_file_path,
jskult_backup_bucket._bucket_name, backup_key)
self._s3_client.delete_file(self._bucket_name, backup_from_file_path)
class VjskSendBucket(S3Bucket):
_bucket_name = environment.VJSK_DATA_BUCKET
_send_folder = environment.VJSK_DATA_SEND_FOLDER
def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str):
def upload_dcf_inst_merge_csv_file(self, jskult_create_csv: str, csv_file_path: str):
# S3バケットにファイルを移動
csv_file_name = f'{self._send_folder}/{vjsk_create_csv}'
s3_client = S3Client()
s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name)
csv_file_name = f'{self._send_folder}/{jskult_create_csv}'
self._s3_client.upload_file(
csv_file_path, self._bucket_name, csv_file_name)
return
def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str):
def backup_dcf_inst_merge_csv_file(self, dat_file_key: str, datetime_key: str):
# バックアップバケットにコピー
vjsk_backup_bucket = VjskBackupBucket()
jskult_backup_bucket = JskUltBackupBucket()
dat_key = f'{self._send_folder}/{dat_file_key}'
backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}'
self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key)
backup_key = f'{jskult_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}'
self._s3_client.copy(self._bucket_name, dat_key,
jskult_backup_bucket._bucket_name, backup_key)

View File

@ -1,4 +1,20 @@
import csv
import json
import os.path as path
import tempfile
from src.aws.s3 import JskSendBucket, JskTransferListBucket
from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint
from src.db.database import Database
from src.error.exceptions import (BatchOperationException,
MaxRunCountReachedException)
from src.logging.get_logger import get_logger
from src.manager.jskult_batch_run_manager import JskultBatchRunManager
from src.manager.jskult_batch_status_manager import JskultBatchStatusManager
from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager
from src.system_var import environment
logger = get_logger('DCF削除新規マスタ作成')
class DcfInstMergeIO(JskultBatchEntrypoint):
@ -6,5 +22,280 @@ class DcfInstMergeIO(JskultBatchEntrypoint):
super().__init__()
def execute(self):
# TODO: ここでDCF削除新規マスタ作成/データ出力処理を実行する
pass
logger.info("DCF削除新規マスタ作成処理を開始します。")
jskult_hdke_tbl_manager = JskultHdkeTblManager()
jskult_batch_run_manager = JskultBatchRunManager(
environment.BATCH_EXECUTION_ID)
if not jskult_hdke_tbl_manager.can_run_process():
logger.error(
'日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。')
# バッチ実行管理テーブルをfailedで登録
jskult_batch_run_manager.batch_failed()
return
# 業務日付を取得
_, _, process_date = jskult_hdke_tbl_manager.get_batch_statuses()
# 転送ファイル一覧を取得し、転送件数を取得
try:
transfer_list_bucket = JskTransferListBucket()
transfer_list_file_path = transfer_list_bucket.download_transfer_result_file(
process_date)
except Exception as e:
logger.exception(f'転送ファイル一覧の取得に失敗しました。 {e}')
# バッチ実行管理テーブルをfailedで登録
jskult_batch_run_manager.batch_failed()
with open(transfer_list_file_path) as f:
transfer_list = json.load(f)
# 実消化データ + アルトマークデータの転送件数を合算し、受信ファイル件数とする
receive_file_count = len(
transfer_list['jsk_transfer_list']) + len(transfer_list['ult_transfer_list'])
jskult_batch_status_manager = JskultBatchStatusManager(
environment.PROCESS_NAME,
# TODO チケットNEWDWH2021-1847の実装で作成した定数に置き換え
'post_process',
environment.MAX_RUN_COUNT,
receive_file_count
)
try:
jskult_batch_status_manager.set_process_status("start")
try:
if not jskult_batch_status_manager.can_run_post_process():
# 後続処理の起動条件を満たしていない場合
# 処理ステータスを「処理待」に設定
jskult_batch_status_manager.set_process_status("waiting")
# バッチ実行管理テーブルに「retry」で登録
jskult_batch_run_manager.batch_retry()
return
except MaxRunCountReachedException:
logger.info('最大起動回数に到達したため、DCF削除新規マスタ作成処理を実行します。')
jskult_batch_status_manager.set_process_status("doing")
# アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行
if jskult_batch_status_manager.is_done_ultmarc_import():
# COM_施設からDCF削除新規マスタに登録
(is_add_dcf_inst_merge,
duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(self)
if is_add_dcf_inst_merge:
self._output_add_dcf_inst_merge_log(
duplication_inst_records)
# CSV出力
dcf_inst_merge_all_records = self._select_dcf_inst_merge_all()
file_path = self._make_csv_data(
environment.DCF_INST_MERGE_SEND_FILE_NAME,
dcf_inst_merge_all_records)
# CSVをS3にアップロード
self._upload_dcf_inst_merge_csv_file(
file_path, process_date, environment.DCF_INST_MERGE_SEND_FILE_NAME)
# 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録
logger.info("DCF削除新規マスタ作成処理を正常終了します。")
jskult_batch_run_manager.batch_success()
jskult_batch_status_manager.set_process_status("done")
return
except Exception as e:
# 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録
logger.exception(f'予期せぬエラーが発生したため、DCF削除新規マスタ作成処理を終了します。{e}')
jskult_batch_run_manager.batch_failed()
jskult_batch_status_manager.set_process_status("failed")
def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]:
try:
self._db = Database.get_instance()
self._db.connect()
sql = """\
SELECT
*
FROM
src07.dcf_inst_merge
"""
dcf_inst_merge_all_records = self._db.execute_select(sql)
return dcf_inst_merge_all_records
except Exception as e:
raise BatchOperationException(e)
finally:
self._db.disconnect()
# com_instからdcf_inst_mergeにinsert
def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]:
try:
self._db = Database.get_instance()
self._db.connect()
self._db.begin()
self._db.to_jst()
sql = """\
SELECT
ci.DCF_DSF_INST_CD,
ci.FORM_INST_NAME_KANJI,
ci.DELETE_SCHE_REASON_CD,
ci.DUP_OPP_CD,
ci.SYS_UPDATE_DATE
FROM
src05.COM_INST AS ci
WHERE
ci.DUP_OPP_CD IS NOT NULL
AND
ci.DELETE_SCHE_REASON_CD = 'D'
AND
ci.DELETE_DATA IS NULL
AND
ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW()
AND
NOT EXISTS (
SELECT
dim.DCF_INST_CD
FROM
src07.DCF_INST_MERGE AS dim
WHERE
dim.DCF_INST_CD = ci.DCF_DSF_INST_CD
)
AND
(ci.DCF_DSF_INST_CD EXISTS(
SELECT
mia.INST_CD
FROM
src07.MST_INST_ASSN as mia
WHERE
mia.INST_CD = ci.DCF_DSF_INST_CD
)
)
OR ci.DCF_DSF_INST_CD EXISTS(
SELECT
ap.PRSB_INST_CD
FROM
src07.ATC_PHARM AS ap
WHERE
ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD
)
OR ci.DCF_DSF_INST_CD EXISTS(
SELECT
trd.INST_CD
FROM
src07.TRN_RESULT_DATA AS trd
WHERE
trd.INST_CD = ci.DCF_DSF_INST_CD
)
)
;
"""
duplication_inst_records = self._db.execute_select(sql)
# DCF削除新規マスタ取り込み
values_clauses = []
params = {}
for clauses_no, row in enumerate(duplication_inst_records, start=1):
dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}"
dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}"
values_clause = f"""(:{dcf_inst_cd_arr},
:{dup_opp_cd_arr},
DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH),
NULL,
NULL,
NULL,
"Y",
batchuser,
SYSDATE(),
batchuser,
SYSDATE()
)"""
values_clauses.append(values_clause)
params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD']
params[dup_opp_cd_arr] = row['DUP_OPP_CD']
insert_sql = f"""
INSERT INTO
src07.dcf_inst_merge (
DCF_INST_CD,
DUP_OPP_CD,
START_MONTH,
INVALID_FLG,
REMARKS,
DCF_INST_CD_NEW,
ENABLED_FLG,
CREATER,
CREATE_DATE,
UPDATER,
UPDATE_DATE
)
VALUES
{','.join(values_clauses)}
"""
self._db.execute(insert_sql, params)
return (True, duplication_inst_records)
except Exception as e:
self._db.rollback()
raise BatchOperationException(e)
finally:
self._db.disconnect()
def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]):
sys_update_date = duplication_inst_records[0]['sys_update_date']
set_year_month = '{set_year}{set_month}'.format(
set_year=sys_update_date[0:4],
set_month=sys_update_date[-2:]
)
add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}'
add_dct_inst_merge_list = []
for row in duplication_inst_records:
add_dct_inst_merge_list.append(
add_dct_inst_merge.format(**row))
add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list)
# 顧客報告用にログ出力
logger.info(
f"""DCF施設統合マスタが追加されました。
**********************************************************
適用月度 {set_year_month}
**********************************************************
{add_dct_inst_merge_list}
**********************************************************
合計 {len(duplication_inst_records)}"""
)
return
def _make_csv_data(csv_file_name: str, record_inst: list):
temporary_dir = tempfile.mkdtemp()
csv_file_path = path.join(temporary_dir, csv_file_name)
head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH',
'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG',
'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE']
with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file:
# ヘッダ行書き込みくくり文字をつけない為にwriterowではなく、writeを使用しています
csv_file.write(f"{','.join(head_str)}\n")
# UTF-8、CRLF、価囲いありで書き込む
writer = csv.writer(csv_file, delimiter=',', lineterminator='\r\n',
quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL,
strict=True
)
# データ部分書き込み(施設)
for record_inst_data in record_inst:
record_inst_value = list(record_inst_data.values())
csv_data = [
'' if n is None else n for n in record_inst_value]
writer.writerow(csv_data)
return csv_file_path
def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, process_date: str, csv_file_path: str):
jsk_send_bucket = JskSendBucket()
# S3バケットにファイルをアップロード
jsk_send_bucket.upload_dcf_inst_merge_csv_file(
csv_file_name, csv_file_path)
# CSVファイルをバックアップ
jsk_send_bucket.backup_dcf_inst_merge_csv_file(
csv_file_name, process_date)
return

View File

@ -7,15 +7,29 @@ DB_USERNAME = os.environ['DB_USERNAME']
DB_PASSWORD = os.environ['DB_PASSWORD']
DB_SCHEMA = os.environ['DB_SCHEMA']
# 処理名
# AWS
JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET']
BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID']
MAX_RUN_COUNT = int(os.environ['MAX_RUN_COUNT'])
TRANSFER_RESULT_FOLDER = os.environ['TRANSFER_RESULT_FOLDER']
TRANSFER_RESULT_FILE_NAME = os.environ['TRANSFER_RESULT_FILE_NAME']
DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME']
PROCESS_NAME = os.environ['PROCESS_NAME']
JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET']
JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET']
JSK_BACKUP_FOLDER = os.environ['JSK_BACKUP_FOLDER']
JSK_DATA_SEND_FOLDER = os.environ['JSK_DATA_SEND_FOLDER']
# AWS
BATCH_MANAGE_DYNAMODB_TABLE_NAME = os.environ.get('BATCH_MANAGE_DYNAMODB_TABLE_NAME')
# 初期値がある環境変数
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')
DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4))
DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))
DB_CONNECTION_MAX_RETRY_ATTEMPT = int(
os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4))
DB_CONNECTION_RETRY_INTERVAL_INIT = int(
os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(
os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(
os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))

0
ecs/jskult-batch/test.py Normal file
View File

View File

@ -1,15 +1,15 @@
FROM python:3.9
FROM python:3.12-slim-bookworm
ENV TZ="Asia/Tokyo"
# pythonの標準出力をバッファリングしないフラグ
ENV PYTHONUNBUFFERED=1
# pythonのバイトコードを生成しないフラグ
ENV PYTHONDONTWRITEBYTECODE=1
WORKDIR /usr/src/app
COPY Pipfile Pipfile.lock ./
RUN \
apt update -y && \
# パッケージのセキュリティアップデートのみを適用するコマンド
apt install -y unattended-upgrades && \
unattended-upgrades && \
pip install --upgrade pip wheel setuptools && \
pip install pipenv --no-cache-dir && \
pipenv install --system --deploy && \
pip uninstall -y pipenv virtualenv-clone virtualenv

View File

@ -26,14 +26,14 @@ openpyxl = "*"
xlrd = "*"
sqlalchemy = "==2.*"
mojimoji = "*"
numpy = "==2.0.*"
numpy = "==2.2.*"
[dev-packages]
autopep8 = "*"
flake8 = "*"
[requires]
python_version = "3.9"
python_version = "3.12"
[pipenv]
allow_prereleases = true

View File

@ -1,11 +1,11 @@
{
"_meta": {
"hash": {
"sha256": "f727e8be45822a45479f4b39e614a35fe2d493378fef76db529e9ce4e452979d"
"sha256": "5ce8ef42345c4fd4dad84cb149002b3c9e0eda0d57b189b10284495378c8f499"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
"python_version": "3.12"
},
"sources": [
{
@ -34,20 +34,20 @@
},
"boto3": {
"hashes": [
"sha256:6633bce2b73284acce1453ca85834c7c5a59e0dbcce1170be461cc079bdcdfcf",
"sha256:668400d13889d2d2fcd66ce785cc0b0fc040681f58a9c7f67daa9149a52b6c63"
"sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0",
"sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==1.38.13"
"version": "==1.38.23"
},
"botocore": {
"hashes": [
"sha256:22feee15753cd3f9f7179d041604078a1024701497d27b22be7c6707e8d13ccb",
"sha256:de29fee43a1f02787fb5b3756ec09917d5661ed95b2b2d64797ab04196f69e14"
"sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a",
"sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457"
],
"markers": "python_version >= '3.9'",
"version": "==1.38.13"
"version": "==1.38.23"
},
"certifi": {
"hashes": [
@ -230,54 +230,54 @@
},
"click": {
"hashes": [
"sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2",
"sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"
"sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202",
"sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"
],
"markers": "python_version >= '3.7'",
"version": "==8.1.8"
"markers": "python_version >= '3.10'",
"version": "==8.2.1"
},
"cryptography": {
"hashes": [
"sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259",
"sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43",
"sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645",
"sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8",
"sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44",
"sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d",
"sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f",
"sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d",
"sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54",
"sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9",
"sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137",
"sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f",
"sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c",
"sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334",
"sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c",
"sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b",
"sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2",
"sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375",
"sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88",
"sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5",
"sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647",
"sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c",
"sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359",
"sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5",
"sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d",
"sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028",
"sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01",
"sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904",
"sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d",
"sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93",
"sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06",
"sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff",
"sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76",
"sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff",
"sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759",
"sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4",
"sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"
"sha256:00094838ecc7c6594171e8c8a9166124c1197b074cfca23645cee573910d76bc",
"sha256:050ce5209d5072472971e6efbfc8ec5a8f9a841de5a4db0ebd9c2e392cb81972",
"sha256:232954730c362638544758a8160c4ee1b832dc011d2c41a306ad8f7cccc5bb0b",
"sha256:25286aacb947286620a31f78f2ed1a32cded7be5d8b729ba3fb2c988457639e4",
"sha256:2f8f8f0b73b885ddd7f3d8c2b2234a7d3ba49002b0223f58cfde1bedd9563c56",
"sha256:38deed72285c7ed699864f964a3f4cf11ab3fb38e8d39cfcd96710cd2b5bb716",
"sha256:3ad69eeb92a9de9421e1f6685e85a10fbcfb75c833b42cc9bc2ba9fb00da4710",
"sha256:5555365a50efe1f486eed6ac7062c33b97ccef409f5970a0b6f205a7cfab59c8",
"sha256:555e5e2d3a53b4fabeca32835878b2818b3f23966a4efb0d566689777c5a12c8",
"sha256:57a6500d459e8035e813bd8b51b671977fb149a8c95ed814989da682314d0782",
"sha256:5833bb4355cb377ebd880457663a972cd044e7f49585aee39245c0d592904578",
"sha256:71320fbefd05454ef2d457c481ba9a5b0e540f3753354fff6f780927c25d19b0",
"sha256:7573d9eebaeceeb55285205dbbb8753ac1e962af3d9640791d12b36864065e71",
"sha256:92d5f428c1a0439b2040435a1d6bc1b26ebf0af88b093c3628913dd464d13fa1",
"sha256:97787952246a77d77934d41b62fb1b6f3581d83f71b44796a4158d93b8f5c490",
"sha256:9bb5bf55dcb69f7067d80354d0a348368da907345a2c448b0babc4215ccd3497",
"sha256:9cc80ce69032ffa528b5e16d217fa4d8d4bb7d6ba8659c1b4d74a1b0f4235fca",
"sha256:9e4253ed8f5948a3589b3caee7ad9a5bf218ffd16869c516535325fece163dcc",
"sha256:9eda14f049d7f09c2e8fb411dda17dd6b16a3c76a1de5e249188a32aeb92de19",
"sha256:a2b56de3417fd5f48773ad8e91abaa700b678dc7fe1e0c757e1ae340779acf7b",
"sha256:af3f92b1dc25621f5fad065288a44ac790c5798e986a34d393ab27d2b27fcff9",
"sha256:c5edcb90da1843df85292ef3a313513766a78fbbb83f584a5a58fb001a5a9d57",
"sha256:c824c9281cb628015bfc3c59335163d4ca0540d49de4582d6c2637312907e4b1",
"sha256:c92519d242703b675ccefd0f0562eb45e74d438e001f8ab52d628e885751fb06",
"sha256:ca932e11218bcc9ef812aa497cdf669484870ecbcf2d99b765d6c27a86000942",
"sha256:cb6ab89421bc90e0422aca911c69044c2912fc3debb19bb3c1bfe28ee3dff6ab",
"sha256:cfd84777b4b6684955ce86156cfb5e08d75e80dc2585e10d69e47f014f0a5342",
"sha256:d377dde61c5d67eb4311eace661c3efda46c62113ff56bf05e2d679e02aebb5b",
"sha256:d54ae41e6bd70ea23707843021c778f151ca258081586f0cfa31d936ae43d1b2",
"sha256:dc10ec1e9f21f33420cc05214989544727e776286c1c16697178978327b95c9c",
"sha256:ec21313dd335c51d7877baf2972569f40a4291b76a0ce51391523ae358d05899",
"sha256:ec64ee375b5aaa354b2b273c921144a660a511f9df8785e6d1c942967106438e",
"sha256:ed43d396f42028c1f47b5fec012e9e12631266e3825e95c00e3cf94d472dac49",
"sha256:edd6d51869beb7f0d472e902ef231a9b7689508e83880ea16ca3311a00bf5ce7",
"sha256:f22af3c78abfbc7cbcdf2c55d23c3e022e1a462ee2481011d518c7fb9c9f3d65",
"sha256:fae1e637f527750811588e4582988932c222f8251f7b7ea93739acb624e1487f",
"sha256:fed5aaca1750e46db870874c9c273cd5182a9e9deb16f06f7bdffdb5c2bde4b9"
],
"markers": "python_version >= '3.7' and python_full_version not in '3.9.0, 3.9.1'",
"version": "==44.0.3"
"version": "==45.0.3"
},
"et-xmlfile": {
"hashes": [
@ -304,6 +304,67 @@
"markers": "python_version >= '3.8'",
"version": "==0.115.12"
},
"greenlet": {
"hashes": [
"sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057",
"sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207",
"sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3",
"sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4",
"sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b",
"sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc",
"sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825",
"sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370",
"sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708",
"sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457",
"sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763",
"sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf",
"sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe",
"sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e",
"sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d",
"sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59",
"sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa",
"sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e",
"sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51",
"sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3",
"sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e",
"sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61",
"sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5",
"sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74",
"sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907",
"sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275",
"sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f",
"sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59",
"sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c",
"sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf",
"sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b",
"sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819",
"sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65",
"sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e",
"sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240",
"sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5",
"sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659",
"sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485",
"sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec",
"sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8",
"sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418",
"sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce",
"sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6",
"sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7",
"sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6",
"sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f",
"sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13",
"sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b",
"sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068",
"sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325",
"sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330",
"sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834",
"sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b",
"sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5",
"sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421"
],
"markers": "python_version >= '3.9'",
"version": "==3.2.2"
},
"gunicorn": {
"hashes": [
"sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d",
@ -507,55 +568,65 @@
},
"numpy": {
"hashes": [
"sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a",
"sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195",
"sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951",
"sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1",
"sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c",
"sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc",
"sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b",
"sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd",
"sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4",
"sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd",
"sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318",
"sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448",
"sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece",
"sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d",
"sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5",
"sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8",
"sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57",
"sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78",
"sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66",
"sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a",
"sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e",
"sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c",
"sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa",
"sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d",
"sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c",
"sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729",
"sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97",
"sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c",
"sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9",
"sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669",
"sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4",
"sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73",
"sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385",
"sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8",
"sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c",
"sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b",
"sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692",
"sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15",
"sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131",
"sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a",
"sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326",
"sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b",
"sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded",
"sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04",
"sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"
"sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff",
"sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47",
"sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84",
"sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d",
"sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6",
"sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f",
"sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b",
"sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49",
"sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163",
"sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571",
"sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42",
"sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff",
"sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491",
"sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4",
"sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566",
"sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf",
"sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40",
"sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd",
"sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06",
"sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282",
"sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680",
"sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db",
"sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3",
"sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90",
"sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1",
"sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289",
"sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab",
"sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c",
"sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d",
"sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb",
"sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d",
"sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a",
"sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf",
"sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1",
"sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2",
"sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a",
"sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543",
"sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00",
"sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c",
"sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f",
"sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd",
"sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868",
"sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303",
"sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83",
"sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3",
"sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d",
"sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87",
"sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa",
"sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f",
"sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae",
"sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda",
"sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915",
"sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249",
"sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de",
"sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.0.2"
"markers": "python_version >= '3.10'",
"version": "==2.2.6"
},
"openpyxl": {
"hashes": [
@ -633,12 +704,12 @@
},
"pydantic": {
"hashes": [
"sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d",
"sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"
"sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a",
"sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.11.4"
"version": "==2.11.5"
},
"pydantic-core": {
"hashes": [
@ -877,11 +948,11 @@
},
"s3transfer": {
"hashes": [
"sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18",
"sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c"
"sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be",
"sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"
],
"markers": "python_version >= '3.9'",
"version": "==0.12.0"
"version": "==0.13.0"
},
"six": {
"hashes": [
@ -901,67 +972,67 @@
},
"sqlalchemy": {
"hashes": [
"sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a",
"sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d",
"sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2",
"sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e",
"sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26",
"sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad",
"sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870",
"sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0",
"sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596",
"sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a",
"sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a",
"sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4",
"sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867",
"sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a",
"sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff",
"sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705",
"sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2",
"sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5",
"sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51",
"sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00",
"sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364",
"sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011",
"sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4",
"sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9",
"sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1",
"sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad",
"sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1",
"sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716",
"sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0",
"sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37",
"sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5",
"sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625",
"sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01",
"sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47",
"sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98",
"sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1",
"sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d",
"sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500",
"sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af",
"sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96",
"sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758",
"sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706",
"sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438",
"sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db",
"sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e",
"sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b",
"sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08",
"sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3",
"sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e",
"sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a",
"sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8",
"sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00",
"sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191",
"sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c",
"sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7",
"sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e",
"sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"
"sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5",
"sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582",
"sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b",
"sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b",
"sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348",
"sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda",
"sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5",
"sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2",
"sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29",
"sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8",
"sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f",
"sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826",
"sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504",
"sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae",
"sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45",
"sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443",
"sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23",
"sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576",
"sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1",
"sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0",
"sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71",
"sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11",
"sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e",
"sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f",
"sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8",
"sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd",
"sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814",
"sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08",
"sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea",
"sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30",
"sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda",
"sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9",
"sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923",
"sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df",
"sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036",
"sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3",
"sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f",
"sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6",
"sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04",
"sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2",
"sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560",
"sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70",
"sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769",
"sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1",
"sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6",
"sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b",
"sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747",
"sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078",
"sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440",
"sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f",
"sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2",
"sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d",
"sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc",
"sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a",
"sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd",
"sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9",
"sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==2.0.40"
"version": "==2.0.41"
},
"starlette": {
"hashes": [
@ -973,19 +1044,19 @@
},
"typing-extensions": {
"hashes": [
"sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c",
"sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"
"sha256:6cd49c8b914bb3869a16ed9d1001e3d0ff1d84fae4838076fe3b361ab8b32b65",
"sha256:90196079d79b4658568e177f50c24c327b73a85e664c0af9f3937e2015b65956"
],
"markers": "python_version >= '3.8'",
"version": "==4.13.2"
"markers": "python_version >= '3.9'",
"version": "==4.14.0rc1"
},
"typing-inspection": {
"hashes": [
"sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f",
"sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"
"sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51",
"sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"
],
"markers": "python_version >= '3.9'",
"version": "==0.4.0"
"version": "==0.4.1"
},
"tzdata": {
"hashes": [
@ -997,11 +1068,11 @@
},
"urllib3": {
"hashes": [
"sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e",
"sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"
"sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466",
"sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.26.20"
"markers": "python_version >= '3.9'",
"version": "==2.4.0"
},
"uvicorn": {
"extras": [
@ -1227,6 +1298,7 @@
"sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.3.2"
},
"flake8": {
@ -1235,6 +1307,7 @@
"sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==7.2.0"
},
"mccabe": {
@ -1260,44 +1333,6 @@
],
"markers": "python_version >= '3.9'",
"version": "==3.3.2"
},
"tomli": {
"hashes": [
"sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6",
"sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd",
"sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c",
"sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b",
"sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8",
"sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6",
"sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77",
"sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff",
"sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea",
"sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192",
"sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249",
"sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee",
"sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4",
"sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98",
"sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8",
"sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4",
"sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281",
"sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744",
"sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69",
"sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13",
"sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140",
"sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e",
"sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e",
"sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc",
"sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff",
"sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec",
"sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2",
"sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222",
"sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106",
"sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272",
"sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a",
"sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"
],
"markers": "python_version < '3.11'",
"version": "==2.2.1"
}
}
}

View File

@ -47,7 +47,7 @@
## 実行
- VSCode上で「F5」キーを押下すると、Webアプリケーションのサーバーが起動する
- 「<http://localhost:8000/maintlogin>」にアクセスし、ログイン画面が表示されていれば成功
- 「<http://localhost:8000/login/maintlogin>」にアクセスし、ログイン画面が表示されていれば成功
## フォルダ構成

View File

@ -1,371 +1,372 @@
from io import BytesIO, TextIOWrapper
from typing import Optional
# TODO: ファイル削除予定
# from io import BytesIO, TextIOWrapper
# from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import HTMLResponse
from starlette import status
# from fastapi import APIRouter, Depends, HTTPException, Request
# from fastapi.responses import HTMLResponse
# from starlette import status
from src.depends.services import get_service
from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel
from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel
from src.model.view.inst_emp_csv_download_view_model import \
InstEmpCsvDownloadViewModel
from src.model.view.inst_emp_csv_upload_view_model import \
InstEmpCsvUploadViewModel
from src.model.view.master_mainte_menu_view_model import \
MasterMainteMenuViewModel
from src.model.view.table_override_view_model import TableOverrideViewModel
from src.router.session_router import AuthenticatedRoute
from src.services.batch_status_service import BatchStatusService
from src.services.login_service import LoginService
from src.services.master_mainte_service import MasterMainteService
from src.system_var import constants
from src.templates import templates
# from src.depends.services import get_service
# from src.logging.get_logger import get_logger
# from src.model.internal.session import UserSession
# from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel
# from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel
# from src.model.view.inst_emp_csv_download_view_model import \
# InstEmpCsvDownloadViewModel
# from src.model.view.inst_emp_csv_upload_view_model import \
# InstEmpCsvUploadViewModel
# from src.model.view.master_mainte_menu_view_model import \
# MasterMainteMenuViewModel
# from src.model.view.table_override_view_model import TableOverrideViewModel
# from src.router.session_router import AuthenticatedRoute
# from src.services.batch_status_service import BatchStatusService
# from src.services.login_service import LoginService
# from src.services.master_mainte_service import MasterMainteService
# from src.system_var import constants
# from src.templates import templates
logger = get_logger('マスターメンテ')
# logger = get_logger('マスターメンテ')
router = APIRouter()
router.route_class = AuthenticatedRoute
# router = APIRouter()
# router.route_class = AuthenticatedRoute
#########################
# Views #
#########################
# #########################
# # Views #
# #########################
@router.get('/masterMainteMenu', response_class=HTMLResponse)
def menu_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.get('/masterMainteMenu', response_class=HTMLResponse)
# def menu_view(
# request: Request,
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
menu = MasterMainteMenuViewModel()
# # 画面表示用のモデル
# menu = MasterMainteMenuViewModel()
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'masterMainteMenu.html',
{
'request': request,
'menu': menu
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'masterMainteMenu.html',
# {
# 'request': request,
# 'menu': menu
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.get('/instEmpCsvUL', response_class=HTMLResponse)
def inst_emp_csv_upload_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.get('/instEmpCsvUL', response_class=HTMLResponse)
# def inst_emp_csv_upload_view(
# request: Request,
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
mainte_csv_up = InstEmpCsvUploadViewModel()
# # 画面表示用のモデル
# mainte_csv_up = InstEmpCsvUploadViewModel()
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'instEmpCsvUL.html',
{
'request': request,
'mainte_csv_up': mainte_csv_up
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'instEmpCsvUL.html',
# {
# 'request': request,
# 'mainte_csv_up': mainte_csv_up
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.post('/instEmpCsvUL', response_class=HTMLResponse)
async def inst_emp_csv_upload(
request: Request,
csv_upload_form: MasterMainteCsvUpModel = Depends(MasterMainteCsvUpModel.as_form),
master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.post('/instEmpCsvUL', response_class=HTMLResponse)
# async def inst_emp_csv_upload(
# request: Request,
# csv_upload_form: MasterMainteCsvUpModel = Depends(MasterMainteCsvUpModel.as_form),
# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
error_message_list = []
csv_filename: str = csv_upload_form.csv_file.filename
if csv_upload_form.csv_file.size == 0:
error_message_list.append('選択されたファイルが見つかりませんでした。')
elif not csv_filename.endswith('.csv'):
error_message_list.append('選択されたファイル形式が"csv"ではありません。')
elif csv_upload_form.csv_file.size >= constants.MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE:
error_message_list.append('選択されたCSVファイルサイズが大きいです。20MB未満にしてください。')
else:
mainte_csv_up = master_mainte_service.prepare_mainte_csv_up_view(
TextIOWrapper(BytesIO(await csv_upload_form.csv_file.read()), encoding='utf-8'),
csv_upload_form.csv_file.filename,
csv_upload_form)
# # 画面表示用のモデル
# error_message_list = []
# csv_filename: str = csv_upload_form.csv_file.filename
# if csv_upload_form.csv_file.size == 0:
# error_message_list.append('選択されたファイルが見つかりませんでした。')
# elif not csv_filename.endswith('.csv'):
# error_message_list.append('選択されたファイル形式が"csv"ではありません。')
# elif csv_upload_form.csv_file.size >= constants.MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE:
# error_message_list.append('選択されたCSVファイルサイズが大きいです。20MB未満にしてください。')
# else:
# mainte_csv_up = master_mainte_service.prepare_mainte_csv_up_view(
# TextIOWrapper(BytesIO(await csv_upload_form.csv_file.read()), encoding='utf-8'),
# csv_upload_form.csv_file.filename,
# csv_upload_form)
if len(error_message_list) > 0:
mainte_csv_up = InstEmpCsvUploadViewModel(
is_verified=True,
error_message_list=error_message_list,
select_function=csv_upload_form.select_function,
select_table=csv_upload_form.select_table)
# if len(error_message_list) > 0:
# mainte_csv_up = InstEmpCsvUploadViewModel(
# is_verified=True,
# error_message_list=error_message_list,
# select_function=csv_upload_form.select_function,
# select_table=csv_upload_form.select_table)
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'instEmpCsvUL.html',
{
'request': request,
'mainte_csv_up': mainte_csv_up
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'instEmpCsvUL.html',
# {
# 'request': request,
# 'mainte_csv_up': mainte_csv_up
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.post('/newInst', response_class=HTMLResponse)
def new_inst_result_view(
request: Request,
csv_upload_form: Optional[MasterMainteCsvUpModel] = Depends(MasterMainteCsvUpModel.as_form),
master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
login_service: LoginService = Depends(get_service(LoginService))
):
session: UserSession = request.session
# @router.post('/newInst', response_class=HTMLResponse)
# def new_inst_result_view(
# request: Request,
# csv_upload_form: Optional[MasterMainteCsvUpModel] = Depends(MasterMainteCsvUpModel.as_form),
# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
# login_service: LoginService = Depends(get_service(LoginService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# ユーザIDからユーザ名を取得
user_name = login_service.logged_in_user(session.user_id).user_name
# CSVデータを登録し、登録完了画面のモデルを返却する
mainte_csv_up = master_mainte_service.prepare_mainte_new_inst_view(user_name, csv_upload_form)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # ユーザIDからユーザ名を取得
# user_name = login_service.logged_in_user(session.user_id).user_name
# # CSVデータを登録し、登録完了画面のモデルを返却する
# mainte_csv_up = master_mainte_service.prepare_mainte_new_inst_view(user_name, csv_upload_form)
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'instEmpCsvUL.html',
{
'request': request,
'mainte_csv_up': mainte_csv_up
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'instEmpCsvUL.html',
# {
# 'request': request,
# 'mainte_csv_up': mainte_csv_up
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.get('/instEmpCsvDL', response_class=HTMLResponse)
def inst_emp_csv_download_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.get('/instEmpCsvDL', response_class=HTMLResponse)
# def inst_emp_csv_download_view(
# request: Request,
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
mainte_csv_dl = InstEmpCsvDownloadViewModel(
is_search=False
)
# # 画面表示用のモデル
# mainte_csv_dl = InstEmpCsvDownloadViewModel(
# is_search=False
# )
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'instEmpCsvDL.html',
{
'request': request,
'mainte_csv_dl': mainte_csv_dl
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'instEmpCsvDL.html',
# {
# 'request': request,
# 'mainte_csv_dl': mainte_csv_dl
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.post('/download', response_class=HTMLResponse)
def inst_emp_csv_download(
request: Request,
csv_download_form: Optional[MasterMainteCsvDlModel] = Depends(MasterMainteCsvDlModel.as_form),
master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.post('/download', response_class=HTMLResponse)
# def inst_emp_csv_download(
# request: Request,
# csv_download_form: Optional[MasterMainteCsvDlModel] = Depends(MasterMainteCsvDlModel.as_form),
# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
search_result_df = master_mainte_service.search_emp_chg_inst_data(csv_download_form)
# search_result_df = master_mainte_service.search_emp_chg_inst_data(csv_download_form)
(result_msg, download_file_url) = master_mainte_service.upload_emp_chg_inst_data_file(
search_result_df,
session.user_id,
csv_download_form.select_table)
# (result_msg, download_file_url) = master_mainte_service.upload_emp_chg_inst_data_file(
# search_result_df,
# session.user_id,
# csv_download_form.select_table)
# 画面表示用のモデル
mainte_csv_dl = InstEmpCsvDownloadViewModel(
is_search=True,
ta_cd=csv_download_form.ta_cd,
inst_cd=csv_download_form.inst_cd,
emp_cd=csv_download_form.emp_cd,
emp_chg_type_cd=csv_download_form.emp_chg_type_cd,
apply_date_from=csv_download_form.apply_date_from,
start_date_from=csv_download_form.start_date_from,
start_date_to=csv_download_form.start_date_to,
end_date_from=csv_download_form.end_date_from,
end_date_to=csv_download_form.end_date_to,
create_date_from=csv_download_form.create_date_from,
create_date_to=csv_download_form.create_date_to,
update_date_from=csv_download_form.update_date_from,
update_date_to=csv_download_form.update_date_to,
select_table=csv_download_form.select_table,
data_count=search_result_df.shape[0],
download_file_url=download_file_url,
file_name=constants.MENTE_CSV_DOWNLOAD_FILE_NAME,
result_msg=result_msg
)
# # 画面表示用のモデル
# mainte_csv_dl = InstEmpCsvDownloadViewModel(
# is_search=True,
# ta_cd=csv_download_form.ta_cd,
# inst_cd=csv_download_form.inst_cd,
# emp_cd=csv_download_form.emp_cd,
# emp_chg_type_cd=csv_download_form.emp_chg_type_cd,
# apply_date_from=csv_download_form.apply_date_from,
# start_date_from=csv_download_form.start_date_from,
# start_date_to=csv_download_form.start_date_to,
# end_date_from=csv_download_form.end_date_from,
# end_date_to=csv_download_form.end_date_to,
# create_date_from=csv_download_form.create_date_from,
# create_date_to=csv_download_form.create_date_to,
# update_date_from=csv_download_form.update_date_from,
# update_date_to=csv_download_form.update_date_to,
# select_table=csv_download_form.select_table,
# data_count=search_result_df.shape[0],
# download_file_url=download_file_url,
# file_name=constants.MENTE_CSV_DOWNLOAD_FILE_NAME,
# result_msg=result_msg
# )
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'instEmpCsvDL.html',
{
'request': request,
'mainte_csv_dl': mainte_csv_dl
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'instEmpCsvDL.html',
# {
# 'request': request,
# 'mainte_csv_dl': mainte_csv_dl
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.get('/tableOverride', response_class=HTMLResponse)
def table_override_view(
request: Request,
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.get('/tableOverride', response_class=HTMLResponse)
# def table_override_view(
# request: Request,
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
table_override = TableOverrideViewModel()
# # 画面表示用のモデル
# table_override = TableOverrideViewModel()
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'tableOverride.html',
{
'request': request,
'table_override': table_override
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'tableOverride.html',
# {
# 'request': request,
# 'table_override': table_override
# },
# headers={'session_key': session_key}
# )
# return templates_response
@router.post('/tableOverride', response_class=HTMLResponse)
def table_override_result_view(
request: Request,
master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# @router.post('/tableOverride', response_class=HTMLResponse)
# def table_override_result_view(
# request: Request,
# master_mainte_service: MasterMainteService = Depends(get_service(MasterMainteService)),
# batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
# ):
# session: UserSession = request.session
# マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
if session.master_mainte_flg == constants.PERMISSION_DISABLED:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# # マスタメンテメニューへのアクセス権がない場合、ログアウトさせる
# if session.master_mainte_flg == constants.PERMISSION_DISABLED:
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# バッチ処理中の場合、ログアウトさせる
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# dump処理中の場合、ログアウトさせる
if batch_status_service.is_dump_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# # バッチ処理中の場合、ログアウトさせる
# if batch_status_service.is_batch_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
# detail=constants.LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE)
# # dump処理中の場合、ログアウトさせる
# if batch_status_service.is_dump_processing():
# raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BACKUP_PROCESSING)
# 画面表示用のモデル
table_override = master_mainte_service.copy_data_real_to_dummy()
# # 画面表示用のモデル
# table_override = master_mainte_service.copy_data_real_to_dummy()
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'tableOverride.html',
{
'request': request,
'table_override': table_override
},
headers={'session_key': session_key}
)
return templates_response
# # レスポンス
# session_key = session.session_key
# templates_response = templates.TemplateResponse(
# 'tableOverride.html',
# {
# 'request': request,
# 'table_override': table_override
# },
# headers={'session_key': session_key}
# )
# return templates_response

View File

@ -32,7 +32,7 @@ app.include_router(ultmarc.router, prefix='/ultmarc')
# クライアントから非同期呼出しされるため、共通ルーターとは異なる扱いとする。
app.include_router(bio_api.router, prefix='/bio')
# マスタメンテ
app.include_router(master_mainte.router, prefix='/masterMainte')
# 削除予定app.include_router(master_mainte.router, prefix='/masterMainte')
# ヘルスチェック用のルーター
app.include_router(healthcheck.router, prefix='/healthcheck')

View File

@ -1,9 +1,10 @@
from typing import Optional
# TODO: ファイル削除予定
# from typing import Optional
from src.model.db.base_db_model import BaseDBModel
from src.util.sanitize import sanitize
# from src.model.db.base_db_model import BaseDBModel
# from src.util.sanitize import sanitize
@sanitize
class MasterMenteCountModel(BaseDBModel):
count: Optional[int]
# @sanitize
# class MasterMenteCountModel(BaseDBModel):
# count: Optional[int]

File diff suppressed because it is too large Load Diff

View File

@ -1,166 +1,167 @@
from abc import ABCMeta, abstractmethod
from src.repositories.emp_chg_inst_repository import EmpChgInstRepository
from src.logging.get_logger import get_logger
# TODO:ファイル削除予定
# from abc import ABCMeta, abstractmethod
# from src.repositories.emp_chg_inst_repository import EmpChgInstRepository
# from src.logging.get_logger import get_logger
logger = get_logger('マスターメンテ')
# logger = get_logger('マスターメンテ')
class MasterMainteEmpChgInstFunction(metaclass=ABCMeta):
insert_data: list[dict]
table_name: str
select_table_message: str
user_name: str
emp_chginst_repository: EmpChgInstRepository
# class MasterMainteEmpChgInstFunction(metaclass=ABCMeta):
# insert_data: list[dict]
# table_name: str
# select_table_message: str
# user_name: str
# emp_chginst_repository: EmpChgInstRepository
def __init__(
self,
insert_data,
table_name: str,
select_table_message: str,
user_name: str,
emp_chginst_repository: EmpChgInstRepository
):
self.insert_data = insert_data
self.table_name = table_name
self.select_table_message = select_table_message
self.user_name = user_name
self.emp_chginst_repository = emp_chginst_repository
# def __init__(
# self,
# insert_data,
# table_name: str,
# select_table_message: str,
# user_name: str,
# emp_chginst_repository: EmpChgInstRepository
# ):
# self.insert_data = insert_data
# self.table_name = table_name
# self.select_table_message = select_table_message
# self.user_name = user_name
# self.emp_chginst_repository = emp_chginst_repository
def save(self):
error_list = []
try:
self.emp_chginst_repository.begin()
self.emp_chginst_repository.to_jst()
(result_message, error_list) = self.write_emp_chg_inst_table()
if len(error_list) > 0:
self.emp_chginst_repository.rollback()
else:
self.emp_chginst_repository.commit()
except Exception as e:
self.emp_chginst_repository.rollback()
raise e
# def save(self):
# error_list = []
# try:
# self.emp_chginst_repository.begin()
# self.emp_chginst_repository.to_jst()
# (result_message, error_list) = self.write_emp_chg_inst_table()
# if len(error_list) > 0:
# self.emp_chginst_repository.rollback()
# else:
# self.emp_chginst_repository.commit()
# except Exception as e:
# self.emp_chginst_repository.rollback()
# raise e
return (result_message, error_list)
# return (result_message, error_list)
def add_emp_chg_inst_table(self, data, start_date, end_date):
self.emp_chginst_repository.insert_emp_chg_inst(
data['施設コード'],
data['領域コード'],
data['担当者種別コード'],
data['MUID'],
data['ビジネスユニットコード'],
start_date,
end_date,
self.user_name,
self.table_name)
# def add_emp_chg_inst_table(self, data, start_date, end_date):
# self.emp_chginst_repository.insert_emp_chg_inst(
# data['施設コード'],
# data['領域コード'],
# data['担当者種別コード'],
# data['MUID'],
# data['ビジネスユニットコード'],
# start_date,
# end_date,
# self.user_name,
# self.table_name)
@abstractmethod
def write_emp_chg_inst_table(self):
pass
# @abstractmethod
# def write_emp_chg_inst_table(self):
# pass
class NewEmpChgInstFunction(MasterMainteEmpChgInstFunction):
# class NewEmpChgInstFunction(MasterMainteEmpChgInstFunction):
def __init__(
self,
insert_data_list: list[dict],
table_name: str,
select_table_message: str,
user_name: str,
emp_chginst_repository: EmpChgInstRepository
):
super().__init__(
insert_data_list,
table_name,
select_table_message,
user_name,
emp_chginst_repository
)
# def __init__(
# self,
# insert_data_list: list[dict],
# table_name: str,
# select_table_message: str,
# user_name: str,
# emp_chginst_repository: EmpChgInstRepository
# ):
# super().__init__(
# insert_data_list,
# table_name,
# select_table_message,
# user_name,
# emp_chginst_repository
# )
def write_emp_chg_inst_table(self):
error_list = []
# def write_emp_chg_inst_table(self):
# error_list = []
add_count = 0
for row_no, data in enumerate(self.insert_data, start=1):
try:
self.add_emp_chg_inst_table(data, data['適用開始日'], data['適用終了日'])
add_count += 1
except Exception as e:
error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。')
logger.info(f'新規施設登録時に{row_no}行目でエラーが発生しました: {e}')
# add_count = 0
# for row_no, data in enumerate(self.insert_data, start=1):
# try:
# self.add_emp_chg_inst_table(data, data['適用開始日'], data['適用終了日'])
# add_count += 1
# except Exception as e:
# error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。')
# logger.info(f'新規施設登録時に{row_no}行目でエラーが発生しました: {e}')
result_message_list = []
if len(error_list) == 0:
result_message_list.append('新規施設登録を行いました')
result_message_list.append('対象:' + self.select_table_message)
result_message_list.append('追加:' + str(add_count) + '')
return (result_message_list, error_list)
# result_message_list = []
# if len(error_list) == 0:
# result_message_list.append('新規施設登録を行いました')
# result_message_list.append('対象:' + self.select_table_message)
# result_message_list.append('追加:' + str(add_count) + '件')
# return (result_message_list, error_list)
class ChangeEmpChgInstFunction(MasterMainteEmpChgInstFunction):
# class ChangeEmpChgInstFunction(MasterMainteEmpChgInstFunction):
def __init__(
self,
insert_data: list[dict],
table_name: str,
select_table_message: str,
user_name: str,
emp_chginst_repository: EmpChgInstRepository
# def __init__(
# self,
# insert_data: list[dict],
# table_name: str,
# select_table_message: str,
# user_name: str,
# emp_chginst_repository: EmpChgInstRepository
):
super().__init__(
insert_data,
table_name,
select_table_message,
user_name,
emp_chginst_repository
)
# ):
# super().__init__(
# insert_data,
# table_name,
# select_table_message,
# user_name,
# emp_chginst_repository
# )
def write_emp_chg_inst_table(self):
add_count = 0
end_count = 0
modify_count = 0
error_list = []
for row_no, data in enumerate(self.insert_data, start=1):
try:
if data['コメント'] == '追加':
self.add_emp_chg_inst_table(data, data['施設担当_開始日'], data['施設担当_終了日'])
add_count += 1
elif data['コメント'] == '終了':
self.__end_emp_chg_inst(data)
end_count += 1
elif data['コメント'] == '担当者修正':
self.__modify_emp_chg_inst(data)
modify_count += 1
except Exception as e:
error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。')
logger.info(f'施設担当者変更時に{row_no}行目でエラーが発生しました: {e}')
# def write_emp_chg_inst_table(self):
# add_count = 0
# end_count = 0
# modify_count = 0
# error_list = []
# for row_no, data in enumerate(self.insert_data, start=1):
# try:
# if data['コメント'] == '追加':
# self.add_emp_chg_inst_table(data, data['施設担当_開始日'], data['施設担当_終了日'])
# add_count += 1
# elif data['コメント'] == '終了':
# self.__end_emp_chg_inst(data)
# end_count += 1
# elif data['コメント'] == '担当者修正':
# self.__modify_emp_chg_inst(data)
# modify_count += 1
# except Exception as e:
# error_list.append(f'{str(row_no)}行目がSQL実行エラーです。CSVファイルを確認してください。')
# logger.info(f'施設担当者変更時に{row_no}行目でエラーが発生しました: {e}')
result_message_list = []
if len(error_list) == 0:
result_message_list.append('施設担当者変更を行いました')
result_message_list.append('対象:' + self.select_table_message)
result_message_list.append('追加:' + str(add_count) + '')
result_message_list.append('修正:' + str(modify_count) + '')
result_message_list.append('終了:' + str(end_count) + '')
return (result_message_list, error_list)
# result_message_list = []
# if len(error_list) == 0:
# result_message_list.append('施設担当者変更を行いました')
# result_message_list.append('対象:' + self.select_table_message)
# result_message_list.append('追加:' + str(add_count) + '件')
# result_message_list.append('修正:' + str(modify_count) + '件')
# result_message_list.append('終了:' + str(end_count) + '件')
# return (result_message_list, error_list)
def __end_emp_chg_inst(self, data: dict):
self.emp_chginst_repository.end_emp_chg_inst(
data['施設コード'],
data['領域コード'],
data['担当者種別コード'],
data['施設担当_開始日'],
data['終了日の変更'],
self.user_name,
self.table_name)
# def __end_emp_chg_inst(self, data: dict):
# self.emp_chginst_repository.end_emp_chg_inst(
# data['施設コード'],
# data['領域コード'],
# data['担当者種別コード'],
# data['施設担当_開始日'],
# data['終了日の変更'],
# self.user_name,
# self.table_name)
def __modify_emp_chg_inst(self, data: dict):
self.emp_chginst_repository.modify_emp_chg_inst(
data['施設コード'],
data['領域コード'],
data['施設担当_開始日'],
data['担当者種別コード'],
data['MUID'],
self.user_name,
self.table_name)
# def __modify_emp_chg_inst(self, data: dict):
# self.emp_chginst_repository.modify_emp_chg_inst(
# data['施設コード'],
# data['領域コード'],
# data['施設担当_開始日'],
# data['担当者種別コード'],
# data['MUID'],
# self.user_name,
# self.table_name)

View File

@ -1,177 +1,178 @@
from typing import Optional
# TODO:ファイル削除予定
# from typing import Optional
from fastapi import Form
# from fastapi import Form
from src.model.request.request_base_model import RequestBaseModel
from src.util.sanitize import sanitize
from src.util.string_util import is_not_empty
# from src.model.request.request_base_model import RequestBaseModel
# from src.util.sanitize import sanitize
# from src.util.string_util import is_not_empty
@sanitize
class MasterMainteCsvDlModel(RequestBaseModel):
# adaptは検索に使用する値
ta_cd: Optional[str]
adapt_ta_cd: Optional[str]
inst_cd: Optional[str]
adapt_inst_cd: Optional[str]
emp_cd: Optional[str]
adapt_emp_cd: Optional[str]
emp_chg_type_cd: Optional[str]
adapt_emp_chg_type_cd: Optional[str]
apply_date_from: Optional[str]
adapt_apply_date_from: Optional[str]
start_date_from: Optional[str]
adapt_start_date_from: Optional[str]
start_date_to: Optional[str]
adapt_start_date_to: Optional[str]
end_date_from: Optional[str]
adapt_end_date_from: Optional[str]
end_date_to: Optional[str]
adapt_end_date_to: Optional[str]
select_table: Optional[str]
create_date_from: Optional[str]
adapt_create_date_from: Optional[str]
create_date_to: Optional[str]
adapt_create_date_to: Optional[str]
update_date_from: Optional[str]
adapt_update_date_from: Optional[str]
update_date_to: Optional[str]
adapt_update_date_to: Optional[str]
# @sanitize
# class MasterMainteCsvDlModel(RequestBaseModel):
# # adaptは検索に使用する値
# ta_cd: Optional[str]
# adapt_ta_cd: Optional[str]
# inst_cd: Optional[str]
# adapt_inst_cd: Optional[str]
# emp_cd: Optional[str]
# adapt_emp_cd: Optional[str]
# emp_chg_type_cd: Optional[str]
# adapt_emp_chg_type_cd: Optional[str]
# apply_date_from: Optional[str]
# adapt_apply_date_from: Optional[str]
# start_date_from: Optional[str]
# adapt_start_date_from: Optional[str]
# start_date_to: Optional[str]
# adapt_start_date_to: Optional[str]
# end_date_from: Optional[str]
# adapt_end_date_from: Optional[str]
# end_date_to: Optional[str]
# adapt_end_date_to: Optional[str]
# select_table: Optional[str]
# create_date_from: Optional[str]
# adapt_create_date_from: Optional[str]
# create_date_to: Optional[str]
# adapt_create_date_to: Optional[str]
# update_date_from: Optional[str]
# adapt_update_date_from: Optional[str]
# update_date_to: Optional[str]
# adapt_update_date_to: Optional[str]
@classmethod
def as_form(
cls,
ctrl_ta_cd: Optional[str] = Form(None),
ctrl_inst_cd: Optional[str] = Form(None),
ctrl_emp_cd: Optional[str] = Form(None),
ctrl_emp_chg_type_cd: Optional[str] = Form(None),
ctrl_apply_date_from: Optional[str] = Form(None),
ctrl_start_date_from: Optional[str] = Form(None),
ctrl_start_date_to: Optional[str] = Form(None),
ctrl_end_date_from: Optional[str] = Form(None),
ctrl_end_date_to: Optional[str] = Form(None),
radio_select_table: Optional[str] = Form(None),
ctrl_create_date_from: Optional[str] = Form(None),
ctrl_create_date_to: Optional[str] = Form(None),
ctrl_update_date_from: Optional[str] = Form(None),
ctrl_update_date_to: Optional[str] = Form(None)
):
return cls.__convert_request_param(
cls,
ctrl_ta_cd,
ctrl_inst_cd,
ctrl_emp_cd,
ctrl_emp_chg_type_cd,
ctrl_apply_date_from,
ctrl_start_date_from,
ctrl_start_date_to,
ctrl_end_date_from,
ctrl_end_date_to,
radio_select_table,
ctrl_create_date_from,
ctrl_create_date_to,
ctrl_update_date_from,
ctrl_update_date_to
)
# @classmethod
# def as_form(
# cls,
# ctrl_ta_cd: Optional[str] = Form(None),
# ctrl_inst_cd: Optional[str] = Form(None),
# ctrl_emp_cd: Optional[str] = Form(None),
# ctrl_emp_chg_type_cd: Optional[str] = Form(None),
# ctrl_apply_date_from: Optional[str] = Form(None),
# ctrl_start_date_from: Optional[str] = Form(None),
# ctrl_start_date_to: Optional[str] = Form(None),
# ctrl_end_date_from: Optional[str] = Form(None),
# ctrl_end_date_to: Optional[str] = Form(None),
# radio_select_table: Optional[str] = Form(None),
# ctrl_create_date_from: Optional[str] = Form(None),
# ctrl_create_date_to: Optional[str] = Form(None),
# ctrl_update_date_from: Optional[str] = Form(None),
# ctrl_update_date_to: Optional[str] = Form(None)
# ):
# return cls.__convert_request_param(
# cls,
# ctrl_ta_cd,
# ctrl_inst_cd,
# ctrl_emp_cd,
# ctrl_emp_chg_type_cd,
# ctrl_apply_date_from,
# ctrl_start_date_from,
# ctrl_start_date_to,
# ctrl_end_date_from,
# ctrl_end_date_to,
# radio_select_table,
# ctrl_create_date_from,
# ctrl_create_date_to,
# ctrl_update_date_from,
# ctrl_update_date_to
# )
def __convert_request_param(
cls,
ctrl_ta_cd: str,
ctrl_inst_cd: str,
ctrl_emp_cd: str,
ctrl_emp_chg_type_cd,
ctrl_apply_date_from: str,
ctrl_start_date_from: str,
ctrl_start_date_to: str,
ctrl_end_date_from: str,
ctrl_end_date_to: str,
radio_select_table: str,
ctrl_create_date_from: str,
ctrl_create_date_to: str,
ctrl_update_date_from: str,
ctrl_update_date_to: str
):
ctrl_ta_cd = ctrl_ta_cd if is_not_empty(ctrl_ta_cd) else ''
ctrl_inst_cd = ctrl_inst_cd if is_not_empty(ctrl_inst_cd) else ''
ctrl_emp_cd = ctrl_emp_cd if is_not_empty(ctrl_emp_cd) else ''
ctrl_emp_chg_type_cd = ctrl_emp_chg_type_cd if is_not_empty(ctrl_emp_chg_type_cd) else ''
# def __convert_request_param(
# cls,
# ctrl_ta_cd: str,
# ctrl_inst_cd: str,
# ctrl_emp_cd: str,
# ctrl_emp_chg_type_cd,
# ctrl_apply_date_from: str,
# ctrl_start_date_from: str,
# ctrl_start_date_to: str,
# ctrl_end_date_from: str,
# ctrl_end_date_to: str,
# radio_select_table: str,
# ctrl_create_date_from: str,
# ctrl_create_date_to: str,
# ctrl_update_date_from: str,
# ctrl_update_date_to: str
# ):
# ctrl_ta_cd = ctrl_ta_cd if is_not_empty(ctrl_ta_cd) else ''
# ctrl_inst_cd = ctrl_inst_cd if is_not_empty(ctrl_inst_cd) else ''
# ctrl_emp_cd = ctrl_emp_cd if is_not_empty(ctrl_emp_cd) else ''
# ctrl_emp_chg_type_cd = ctrl_emp_chg_type_cd if is_not_empty(ctrl_emp_chg_type_cd) else ''
adapt_apply_date_from = ''
if is_not_empty(ctrl_apply_date_from):
adapt_apply_date_from = ctrl_apply_date_from.replace('/', '')
else:
ctrl_apply_date_from = ''
# adapt_apply_date_from = ''
# if is_not_empty(ctrl_apply_date_from):
# adapt_apply_date_from = ctrl_apply_date_from.replace('/', '')
# else:
# ctrl_apply_date_from = ''
adapt_start_date_from = ''
adapt_start_date_to = ''
if is_not_empty(ctrl_start_date_from):
adapt_start_date_from = ctrl_start_date_from.replace('/', '')
else:
ctrl_start_date_from = ''
if is_not_empty(ctrl_start_date_to):
adapt_start_date_to = ctrl_start_date_to.replace('/', '')
else:
ctrl_start_date_to = ''
# adapt_start_date_from = ''
# adapt_start_date_to = ''
# if is_not_empty(ctrl_start_date_from):
# adapt_start_date_from = ctrl_start_date_from.replace('/', '')
# else:
# ctrl_start_date_from = ''
# if is_not_empty(ctrl_start_date_to):
# adapt_start_date_to = ctrl_start_date_to.replace('/', '')
# else:
# ctrl_start_date_to = ''
adapt_end_date_from = ''
adapt_end_date_to = ''
if is_not_empty(ctrl_end_date_from):
adapt_end_date_from = ctrl_end_date_from.replace('/', '')
else:
ctrl_end_date_from = ''
if is_not_empty(ctrl_end_date_to):
adapt_end_date_to = ctrl_end_date_to.replace('/', '')
else:
ctrl_end_date_to = ''
# adapt_end_date_from = ''
# adapt_end_date_to = ''
# if is_not_empty(ctrl_end_date_from):
# adapt_end_date_from = ctrl_end_date_from.replace('/', '')
# else:
# ctrl_end_date_from = ''
# if is_not_empty(ctrl_end_date_to):
# adapt_end_date_to = ctrl_end_date_to.replace('/', '')
# else:
# ctrl_end_date_to = ''
adapt_create_date_from = ''
adapt_create_date_to = ''
if is_not_empty(ctrl_create_date_from):
adapt_create_date_from = ctrl_create_date_from.replace('/', '-') + ' 00:00:00'
else:
ctrl_create_date_from = ''
if is_not_empty(ctrl_create_date_to):
adapt_create_date_to = ctrl_create_date_to.replace('/', '-') + ' 23:59:59'
else:
ctrl_create_date_to = ''
# adapt_create_date_from = ''
# adapt_create_date_to = ''
# if is_not_empty(ctrl_create_date_from):
# adapt_create_date_from = ctrl_create_date_from.replace('/', '-') + ' 00:00:00'
# else:
# ctrl_create_date_from = ''
# if is_not_empty(ctrl_create_date_to):
# adapt_create_date_to = ctrl_create_date_to.replace('/', '-') + ' 23:59:59'
# else:
# ctrl_create_date_to = ''
adapt_update_date_from = ''
adapt_update_date_to = ''
if is_not_empty(ctrl_update_date_from):
adapt_update_date_from = ctrl_update_date_from.replace('/', '-') + ' 00:00:00'
else:
ctrl_update_date_from = ''
if is_not_empty(ctrl_update_date_to):
adapt_update_date_to = ctrl_update_date_to.replace('/', '-') + ' 23:59:59'
else:
ctrl_update_date_to = ''
# adapt_update_date_from = ''
# adapt_update_date_to = ''
# if is_not_empty(ctrl_update_date_from):
# adapt_update_date_from = ctrl_update_date_from.replace('/', '-') + ' 00:00:00'
# else:
# ctrl_update_date_from = ''
# if is_not_empty(ctrl_update_date_to):
# adapt_update_date_to = ctrl_update_date_to.replace('/', '-') + ' 23:59:59'
# else:
# ctrl_update_date_to = ''
return cls(
ta_cd=ctrl_ta_cd,
adapt_ta_cd=ctrl_ta_cd,
inst_cd=ctrl_inst_cd,
adapt_inst_cd=ctrl_inst_cd,
emp_cd=ctrl_emp_cd,
adapt_emp_cd=ctrl_emp_cd,
emp_chg_type_cd=ctrl_emp_chg_type_cd,
adapt_emp_chg_type_cd=ctrl_emp_chg_type_cd,
apply_date_from=ctrl_apply_date_from,
adapt_apply_date_from=adapt_apply_date_from,
start_date_from=ctrl_start_date_from,
adapt_start_date_from=adapt_start_date_from,
start_date_to=ctrl_start_date_to,
adapt_start_date_to=adapt_start_date_to,
select_table=radio_select_table,
end_date_from=ctrl_end_date_from,
adapt_end_date_from=adapt_end_date_from,
end_date_to=ctrl_end_date_to,
adapt_end_date_to=adapt_end_date_to,
create_date_from=ctrl_create_date_from,
adapt_create_date_from=adapt_create_date_from,
create_date_to=ctrl_create_date_to,
adapt_create_date_to=adapt_create_date_to,
update_date_from=ctrl_update_date_from,
adapt_update_date_from=adapt_update_date_from,
update_date_to=ctrl_update_date_to,
adapt_update_date_to=adapt_update_date_to
)
# return cls(
# ta_cd=ctrl_ta_cd,
# adapt_ta_cd=ctrl_ta_cd,
# inst_cd=ctrl_inst_cd,
# adapt_inst_cd=ctrl_inst_cd,
# emp_cd=ctrl_emp_cd,
# adapt_emp_cd=ctrl_emp_cd,
# emp_chg_type_cd=ctrl_emp_chg_type_cd,
# adapt_emp_chg_type_cd=ctrl_emp_chg_type_cd,
# apply_date_from=ctrl_apply_date_from,
# adapt_apply_date_from=adapt_apply_date_from,
# start_date_from=ctrl_start_date_from,
# adapt_start_date_from=adapt_start_date_from,
# start_date_to=ctrl_start_date_to,
# adapt_start_date_to=adapt_start_date_to,
# select_table=radio_select_table,
# end_date_from=ctrl_end_date_from,
# adapt_end_date_from=adapt_end_date_from,
# end_date_to=ctrl_end_date_to,
# adapt_end_date_to=adapt_end_date_to,
# create_date_from=ctrl_create_date_from,
# adapt_create_date_from=adapt_create_date_from,
# create_date_to=ctrl_create_date_to,
# adapt_create_date_to=adapt_create_date_to,
# update_date_from=ctrl_update_date_from,
# adapt_update_date_from=adapt_update_date_from,
# update_date_to=ctrl_update_date_to,
# adapt_update_date_to=adapt_update_date_to
# )

View File

@ -1,31 +1,32 @@
from typing import Optional, Annotated
# TODO: ファイル削除予定
# from typing import Optional, Annotated
from fastapi import Form
# from fastapi import Form
from src.util.sanitize import sanitize
from fastapi import File, UploadFile
# from src.util.sanitize import sanitize
# from fastapi import File, UploadFile
from src.model.request.request_base_model import RequestBaseModel
# from src.model.request.request_base_model import RequestBaseModel
@sanitize
class MasterMainteCsvUpModel(RequestBaseModel):
csv_file: Optional[Annotated[UploadFile, File()]]
select_function: Optional[str]
select_table: Optional[str]
json_upload_data: Optional[str]
# @sanitize
# class MasterMainteCsvUpModel(RequestBaseModel):
# csv_file: Optional[Annotated[UploadFile, File()]]
# select_function: Optional[str]
# select_table: Optional[str]
# json_upload_data: Optional[str]
@classmethod
def as_form(
cls,
ctrl_csv_file: UploadFile = Form(None),
ctrl_select_function: Optional[str] = Form(None),
ctrl_select_table: Optional[str] = Form(None),
ctrl_json_upload_data: Optional[str] = Form(None)
):
return cls(
csv_file=ctrl_csv_file,
select_function=ctrl_select_function,
select_table=ctrl_select_table,
json_upload_data=ctrl_json_upload_data
)
# @classmethod
# def as_form(
# cls,
# ctrl_csv_file: UploadFile = Form(None),
# ctrl_select_function: Optional[str] = Form(None),
# ctrl_select_table: Optional[str] = Form(None),
# ctrl_json_upload_data: Optional[str] = Form(None)
# ):
# return cls(
# csv_file=ctrl_csv_file,
# select_function=ctrl_select_function,
# select_table=ctrl_select_table,
# json_upload_data=ctrl_json_upload_data
# )

View File

@ -1,32 +1,33 @@
from pydantic import BaseModel
# TODO: ファイル削除予定
# from pydantic import BaseModel
from src.util.string_util import is_not_empty
# from src.util.string_util import is_not_empty
class InstEmpCsvDownloadViewModel(BaseModel):
subtitle: str = '施設担当者データCSVダウンロード'
is_search: bool = False
ta_cd: str = ''
inst_cd: str = ''
emp_cd: str = ''
emp_chg_type_cd: str = ''
apply_date_from: str = ''
start_date_from: str = ''
start_date_to: str = ''
end_date_from: str = ''
end_date_to: str = ''
create_date_from: str = ''
create_date_to: str = ''
update_date_from: str = ''
update_date_to: str = ''
select_table: str = ''
data_count: int = 0
result_msg: str = ''
download_file_url: str = ''
file_name: str = ''
# class InstEmpCsvDownloadViewModel(BaseModel):
# subtitle: str = '施設担当者データCSVダウンロード'
# is_search: bool = False
# ta_cd: str = ''
# inst_cd: str = ''
# emp_cd: str = ''
# emp_chg_type_cd: str = ''
# apply_date_from: str = ''
# start_date_from: str = ''
# start_date_to: str = ''
# end_date_from: str = ''
# end_date_to: str = ''
# create_date_from: str = ''
# create_date_to: str = ''
# update_date_from: str = ''
# update_date_to: str = ''
# select_table: str = ''
# data_count: int = 0
# result_msg: str = ''
# download_file_url: str = ''
# file_name: str = ''
def is_select_table_empty(self):
return not is_not_empty(self.select_table)
# def is_select_table_empty(self):
# return not is_not_empty(self.select_table)
def is_download_file_url_empty(self):
return not is_not_empty(self.download_file_url)
# def is_download_file_url_empty(self):
# return not is_not_empty(self.download_file_url)

View File

@ -1,48 +1,49 @@
from pydantic import BaseModel
# TODO: ファイル削除予定
# from pydantic import BaseModel
from src.system_var import constants
# from src.system_var import constants
class InstEmpCsvUploadViewModel(BaseModel):
subtitle: str = '施設担当者データCSVアップロード'
is_verified: bool = False
is_insert: bool = False
error_message_list: list[str] = None
select_function: str = None
select_table: str = None
csv_file_name: str = None
csv_upload_list: list[dict] = None
json_upload_data: str = None
result_message_list: list[str] = None
select_function_message: str = None
# class InstEmpCsvUploadViewModel(BaseModel):
# subtitle: str = '施設担当者データCSVアップロード'
# is_verified: bool = False
# is_insert: bool = False
# error_message_list: list[str] = None
# select_function: str = None
# select_table: str = None
# csv_file_name: str = None
# csv_upload_list: list[dict] = None
# json_upload_data: str = None
# result_message_list: list[str] = None
# select_function_message: str = None
def select_table_message(self):
return self.__dummy_table() if self.select_table == 'dummy' else self.__real_table()
# def select_table_message(self):
# return self.__dummy_table() if self.select_table == 'dummy' else self.__real_table()
def upload_data_columns(self) -> list[str]:
return self.__inst_emp_columns()
# def upload_data_columns(self) -> list[str]:
# return self.__inst_emp_columns()
def is_select_function_empty(self):
return self.select_function is None or len(self.select_function) == 0
# def is_select_function_empty(self):
# return self.select_function is None or len(self.select_function) == 0
def is_select_table_empty(self):
return self.select_table is None or len(self.select_table) == 0
# def is_select_table_empty(self):
# return self.select_table is None or len(self.select_table) == 0
def is_error_message_list_empty(self):
return self.error_message_list is None or len(self.error_message_list) == 0
# def is_error_message_list_empty(self):
# return self.error_message_list is None or len(self.error_message_list) == 0
def csv_data_count(self):
return 0 if self.csv_upload_list is None else len(self.csv_upload_list)
# def csv_data_count(self):
# return 0 if self.csv_upload_list is None else len(self.csv_upload_list)
def __inst_emp_columns(self) -> list[str]:
if self.select_function == 'new':
return constants.NEW_INST_EMP_CSV_LOGICAL_NAMES
if self.select_function == 'change':
return constants.CHANGE_INST_CSV_LOGICAL_NAMES
return []
# def __inst_emp_columns(self) -> list[str]:
# if self.select_function == 'new':
# return constants.NEW_INST_EMP_CSV_LOGICAL_NAMES
# if self.select_function == 'change':
# return constants.CHANGE_INST_CSV_LOGICAL_NAMES
# return []
def __real_table(self):
return constants.CSV_REAL_TABLE_NAME
# def __real_table(self):
# return constants.CSV_REAL_TABLE_NAME
def __dummy_table(self):
return constants.CSV_CHANGE_TABLE_NAME
# def __dummy_table(self):
# return constants.CSV_CHANGE_TABLE_NAME

View File

@ -1,5 +1,6 @@
from pydantic import BaseModel
# TODO: ファイル削除予定
# from pydantic import BaseModel
class MasterMainteMenuViewModel(BaseModel):
subtitle: str = 'MeDaCA マスターメンテメニュー'
# class MasterMainteMenuViewModel(BaseModel):
# subtitle: str = 'MeDaCA マスターメンテメニュー'

View File

@ -13,8 +13,8 @@ class MenuViewModel(BaseModel):
def is_batch_processing(self):
return self.batch_status == constants.BATCH_STATUS_PROCESSING
def is_backup_processing(self):
return self.dump_status != constants.DUMP_STATUS_UNPROCESSED
#TODO 削除予定 def is_backup_processing(self):
# return self.dump_status != constants.DUMP_STATUS_UNPROCESSED
def is_available_ult_doctor_menu(self):
return self.user_model.has_ult_doctor_permission()
@ -24,6 +24,7 @@ class MenuViewModel(BaseModel):
def is_available_bio_menu(self):
return self.user_model.has_bio_permission()
#TODO 削除予定 def is_available_master_maintenance_menu(self):
# return self.user_model.has_master_maintenance_permission()
def is_available_master_maintenance_menu(self):
return self.user_model.has_master_maintenance_permission()

View File

@ -1,7 +1,8 @@
from pydantic import BaseModel
# TODO: ファイル削除予定
# from pydantic import BaseModel
class TableOverrideViewModel(BaseModel):
subtitle: str = 'テーブル上書きコピー'
# class TableOverrideViewModel(BaseModel):
# subtitle: str = 'テーブル上書きコピー'
is_override: bool = False
# is_override: bool = False

View File

@ -19,6 +19,7 @@ class UserViewModel(BaseModel):
def has_bio_permission(self):
return self.bio_flg == constants.PERMISSION_ENABLED
#TODO 削除予定 def has_master_maintenance_permission(self):
# return self.master_mainte_flg == constants.PERMISSION_ENABLED
def has_master_maintenance_permission(self):
return self.master_mainte_flg == constants.PERMISSION_ENABLED

View File

@ -1,29 +1,30 @@
from src.repositories.base_repository import BaseRepository
from src.model.db.master_mente_count import MasterMenteCountModel
from src.logging.get_logger import get_logger
# TODO: file削除予定
# from src.repositories.base_repository import BaseRepository
# from src.model.db.master_mente_count import MasterMenteCountModel
# from src.logging.get_logger import get_logger
logger = get_logger('ビジネスユニットマスタ')
# logger = get_logger('ビジネスユニットマスタ')
class BuMasterRepository(BaseRepository):
# class BuMasterRepository(BaseRepository):
FETCH_COUNT_SQL = """\
SELECT
COUNT(*) AS count
FROM
src05.bu
WHERE
bu.bu_cd = :bu_cd
"""
# FETCH_COUNT_SQL = """\
# SELECT
# COUNT(*) AS count
# FROM
# src05.bu
# WHERE
# bu.bu_cd = :bu_cd
# """
def fetch_count(self, bu_cd) -> MasterMenteCountModel:
try:
query = self.FETCH_COUNT_SQL
result = self._database.execute_select(query, {'bu_cd': bu_cd})
models = [MasterMenteCountModel(**r) for r in result]
if len(models) == 0:
return 0
return models[0].count
except Exception as e:
logger.exception(f"DB Error : Exception={e.args}")
raise e
# def fetch_count(self, bu_cd) -> MasterMenteCountModel:
# try:
# query = self.FETCH_COUNT_SQL
# result = self._database.execute_select(query, {'bu_cd': bu_cd})
# models = [MasterMenteCountModel(**r) for r in result]
# if len(models) == 0:
# return 0
# return models[0].count
# except Exception as e:
# logger.exception(f"DB Error : Exception={e.args}")
# raise e

View File

@ -1,289 +1,290 @@
from src.db import sql_condition as condition
from src.db.sql_condition import SQLCondition
from src.logging.get_logger import get_logger
from src.model.db.master_mente_count import MasterMenteCountModel
from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel
from src.repositories.base_repository import BaseRepository
from src.util.string_util import is_not_empty
# TODO: ファイル削除予定
# from src.db import sql_condition as condition
# from src.db.sql_condition import SQLCondition
# from src.logging.get_logger import get_logger
# from src.model.db.master_mente_count import MasterMenteCountModel
# from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel
# from src.repositories.base_repository import BaseRepository
# from src.util.string_util import is_not_empty
logger = get_logger('従業員担当施設マスタ')
# logger = get_logger('従業員担当施設マスタ')
class EmpChgInstRepository(BaseRepository):
# class EmpChgInstRepository(BaseRepository):
def to_jst(self):
self._database.to_jst()
# def to_jst(self):
# self._database.to_jst()
def begin(self):
self._database.begin()
# def begin(self):
# self._database.begin()
def commit(self):
self._database.commit()
# def commit(self):
# self._database.commit()
def rollback(self):
self._database.rollback()
# def rollback(self):
# self._database.rollback()
INSERT_SQL = """\
INSERT INTO {table_name}
(
inst_cd,
ta_cd,
emp_chg_type_cd,
emp_cd,
bu_cd,
start_date,
end_date,
main_chg_flg,
enabled_flg,
creater,
create_date,
updater,
update_date
)
VALUES (
:inst_cd,
:ta_cd,
:emp_chg_type_cd,
:emp_cd,
:bu_cd,
:start_date,
:end_date,
'1',
'Y',
:create_user_name,
NOW(),
:update_user_name,
NOW()
)
"""
# INSERT_SQL = """\
# INSERT INTO {table_name}
# (
# inst_cd,
# ta_cd,
# emp_chg_type_cd,
# emp_cd,
# bu_cd,
# start_date,
# end_date,
# main_chg_flg,
# enabled_flg,
# creater,
# create_date,
# updater,
# update_date
# )
# VALUES (
# :inst_cd,
# :ta_cd,
# :emp_chg_type_cd,
# :emp_cd,
# :bu_cd,
# :start_date,
# :end_date,
# '1',
# 'Y',
# :create_user_name,
# NOW(),
# :update_user_name,
# NOW()
# )
# """
def insert_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, emp_cd, bu_cd, start_date,
end_date, create_user_name, table_name):
try:
query = self.INSERT_SQL.format(table_name=table_name)
self._database.execute(query, {
'inst_cd': inst_cd,
'ta_cd': ta_cd,
'emp_chg_type_cd': emp_chg_type_cd,
'emp_cd': emp_cd,
'bu_cd': bu_cd,
'start_date': start_date,
'end_date': end_date,
'create_user_name': create_user_name,
'update_user_name': create_user_name
})
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def insert_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, emp_cd, bu_cd, start_date,
# end_date, create_user_name, table_name):
# try:
# query = self.INSERT_SQL.format(table_name=table_name)
# self._database.execute(query, {
# 'inst_cd': inst_cd,
# 'ta_cd': ta_cd,
# 'emp_chg_type_cd': emp_chg_type_cd,
# 'emp_cd': emp_cd,
# 'bu_cd': bu_cd,
# 'start_date': start_date,
# 'end_date': end_date,
# 'create_user_name': create_user_name,
# 'update_user_name': create_user_name
# })
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e
UPDATE_END_DATE_SQL = """\
UPDATE
{table_name}
SET
end_date = :end_date,
updater = :update_user_name,
update_date = NOW()
WHERE
inst_cd = :inst_cd
AND ta_cd = :ta_cd
AND emp_chg_type_cd = :emp_chg_type_cd
AND start_date = :start_date
"""
# UPDATE_END_DATE_SQL = """\
# UPDATE
# {table_name}
# SET
# end_date = :end_date,
# updater = :update_user_name,
# update_date = NOW()
# WHERE
# inst_cd = :inst_cd
# AND ta_cd = :ta_cd
# AND emp_chg_type_cd = :emp_chg_type_cd
# AND start_date = :start_date
# """
def end_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, start_date,
end_date, update_user_name, table_name):
try:
query = self.UPDATE_END_DATE_SQL.format(table_name=table_name)
self._database.execute(query, {
'inst_cd': inst_cd,
'ta_cd': ta_cd,
'emp_chg_type_cd': emp_chg_type_cd,
'start_date': start_date,
'end_date': end_date,
'update_user_name': update_user_name
})
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def end_emp_chg_inst(self, inst_cd, ta_cd, emp_chg_type_cd, start_date,
# end_date, update_user_name, table_name):
# try:
# query = self.UPDATE_END_DATE_SQL.format(table_name=table_name)
# self._database.execute(query, {
# 'inst_cd': inst_cd,
# 'ta_cd': ta_cd,
# 'emp_chg_type_cd': emp_chg_type_cd,
# 'start_date': start_date,
# 'end_date': end_date,
# 'update_user_name': update_user_name
# })
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e
UPDATE_EMP_CD_SQL = """\
UPDATE
{table_name}
SET
emp_cd = :emp_cd,
updater = :update_user_name,
update_date = NOW()
where
inst_cd = :inst_cd
AND ta_cd = :ta_cd
AND emp_chg_type_cd = :emp_chg_type_cd
AND start_date = :start_date
"""
# UPDATE_EMP_CD_SQL = """\
# UPDATE
# {table_name}
# SET
# emp_cd = :emp_cd,
# updater = :update_user_name,
# update_date = NOW()
# where
# inst_cd = :inst_cd
# AND ta_cd = :ta_cd
# AND emp_chg_type_cd = :emp_chg_type_cd
# AND start_date = :start_date
# """
def modify_emp_chg_inst(self, inst_cd, ta_cd, start_date, emp_chg_type_cd, emp_cd, update_user_name, table_name):
try:
query = self.UPDATE_EMP_CD_SQL.format(table_name=table_name)
self._database.execute(query, {
'inst_cd': inst_cd,
'ta_cd': ta_cd,
'emp_chg_type_cd': emp_chg_type_cd,
'start_date': start_date,
'emp_cd': emp_cd,
'update_user_name': update_user_name
})
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def modify_emp_chg_inst(self, inst_cd, ta_cd, start_date, emp_chg_type_cd, emp_cd, update_user_name, table_name):
# try:
# query = self.UPDATE_EMP_CD_SQL.format(table_name=table_name)
# self._database.execute(query, {
# 'inst_cd': inst_cd,
# 'ta_cd': ta_cd,
# 'emp_chg_type_cd': emp_chg_type_cd,
# 'start_date': start_date,
# 'emp_cd': emp_cd,
# 'update_user_name': update_user_name
# })
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e
FETCH_COUNT_SQL = """\
SELECT
COUNT(*) AS count
FROM
{table_name}
WHERE
inst_cd = :inst_cd
AND ta_cd = :ta_cd
AND emp_chg_type_cd = :emp_chg_type_cd
AND start_date = :start_date
"""
# FETCH_COUNT_SQL = """\
# SELECT
# COUNT(*) AS count
# FROM
# {table_name}
# WHERE
# inst_cd = :inst_cd
# AND ta_cd = :ta_cd
# AND emp_chg_type_cd = :emp_chg_type_cd
# AND start_date = :start_date
# """
def fetch_count(self, inst_cd, ta_cd, emp_chg_type_cd, start_date, table_name) -> MasterMenteCountModel:
try:
query = self.FETCH_COUNT_SQL.format(table_name=table_name)
result = self._database.execute_select(query, {'inst_cd': inst_cd, 'ta_cd': ta_cd,
'emp_chg_type_cd': emp_chg_type_cd, 'start_date': start_date})
models = [MasterMenteCountModel(**r) for r in result]
if len(models) == 0:
return 0
return models[0].count
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def fetch_count(self, inst_cd, ta_cd, emp_chg_type_cd, start_date, table_name) -> MasterMenteCountModel:
# try:
# query = self.FETCH_COUNT_SQL.format(table_name=table_name)
# result = self._database.execute_select(query, {'inst_cd': inst_cd, 'ta_cd': ta_cd,
# 'emp_chg_type_cd': emp_chg_type_cd, 'start_date': start_date})
# models = [MasterMenteCountModel(**r) for r in result]
# if len(models) == 0:
# return 0
# return models[0].count
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e
FETCH_SQL = """\
SELECT DISTINCT
eci.inst_cd AS inst_cd,
mi.inst_name AS inst_name,
eci.ta_cd AS ta_cd,
eci.emp_chg_type_cd AS emp_chg_type_cd,
eci.emp_cd AS emp_cd,
CONCAT(emp.emp_name_family, " ", emp.emp_name_first) AS emp_name_full,
eci.bu_cd AS bu_cd,
bu.bu_name AS bu_name,
eci.start_date AS start_date,
eci.end_date AS end_date,
eci.creater AS creater,
eci.create_date AS create_date,
eci.updater AS updater,
eci.update_date AS update_date
FROM
{table_name} AS eci
LEFT JOIN mst_inst AS mi
ON eci.inst_cd = mi.inst_cd
LEFT JOIN emp
ON eci.emp_cd = emp.emp_cd
LEFT JOIN bu
ON eci.bu_cd = bu.bu_cd
WHERE
{where_clause}
"""
# FETCH_SQL = """\
# SELECT DISTINCT
# eci.inst_cd AS inst_cd,
# mi.inst_name AS inst_name,
# eci.ta_cd AS ta_cd,
# eci.emp_chg_type_cd AS emp_chg_type_cd,
# eci.emp_cd AS emp_cd,
# CONCAT(emp.emp_name_family, " ", emp.emp_name_first) AS emp_name_full,
# eci.bu_cd AS bu_cd,
# bu.bu_name AS bu_name,
# eci.start_date AS start_date,
# eci.end_date AS end_date,
# eci.creater AS creater,
# eci.create_date AS create_date,
# eci.updater AS updater,
# eci.update_date AS update_date
# FROM
# {table_name} AS eci
# LEFT JOIN mst_inst AS mi
# ON eci.inst_cd = mi.inst_cd
# LEFT JOIN emp
# ON eci.emp_cd = emp.emp_cd
# LEFT JOIN bu
# ON eci.bu_cd = bu.bu_cd
# WHERE
# {where_clause}
# """
def fetch_as_data_frame(self, table_name: str, parameter: MasterMainteCsvDlModel):
try:
where_clause = self.__build_condition(parameter)
query = self.FETCH_SQL.format(table_name=table_name, where_clause=where_clause)
logger.debug(f'SQL: {query}')
df = self._to_data_frame(query, parameter)
logger.debug(f'count= {df.shape[0]}')
return df
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def fetch_as_data_frame(self, table_name: str, parameter: MasterMainteCsvDlModel):
# try:
# where_clause = self.__build_condition(parameter)
# query = self.FETCH_SQL.format(table_name=table_name, where_clause=where_clause)
# logger.debug(f'SQL: {query}')
# df = self._to_data_frame(query, parameter)
# logger.debug(f'count= {df.shape[0]}')
# return df
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e
def __build_condition(self, parameter: MasterMainteCsvDlModel):
where_clauses: list[SQLCondition] = []
# def __build_condition(self, parameter: MasterMainteCsvDlModel):
# where_clauses: list[SQLCondition] = []
# 検索条件が指定されずにSQLが壊れることを予防するため、常に真の固定条件を追加しておく
where_clauses.append(SQLCondition('', '', '1 = 1', literal=True))
# # 検索条件が指定されずにSQLが壊れることを予防するため、常に真の固定条件を追加しておく
# where_clauses.append(SQLCondition('', '', '1 = 1', literal=True))
# 領域コードが入力されていた場合
if is_not_empty(parameter.ta_cd):
parameter.adapt_ta_cd = f'%{parameter.ta_cd}%'
where_clauses.append(SQLCondition('eci.ta_cd', condition.LIKE, 'adapt_ta_cd'))
# # 領域コードが入力されていた場合
# if is_not_empty(parameter.ta_cd):
# parameter.adapt_ta_cd = f'%{parameter.ta_cd}%'
# where_clauses.append(SQLCondition('eci.ta_cd', condition.LIKE, 'adapt_ta_cd'))
# 施設コードが入力されていた場合
if is_not_empty(parameter.inst_cd):
parameter.adapt_inst_cd = f'%{parameter.inst_cd}%'
where_clauses.append(SQLCondition('eci.inst_cd', condition.LIKE, 'adapt_inst_cd'))
# # 施設コードが入力されていた場合
# if is_not_empty(parameter.inst_cd):
# parameter.adapt_inst_cd = f'%{parameter.inst_cd}%'
# where_clauses.append(SQLCondition('eci.inst_cd', condition.LIKE, 'adapt_inst_cd'))
# MUIDが入力されていた場合
if is_not_empty(parameter.emp_cd):
parameter.adapt_emp_cd = f'%{parameter.emp_cd}%'
where_clauses.append(SQLCondition('eci.emp_cd', condition.LIKE, 'adapt_emp_cd'))
# # MUIDが入力されていた場合
# if is_not_empty(parameter.emp_cd):
# parameter.adapt_emp_cd = f'%{parameter.emp_cd}%'
# where_clauses.append(SQLCondition('eci.emp_cd', condition.LIKE, 'adapt_emp_cd'))
# 担当者種別コードが入力されていた場合
if is_not_empty(parameter.emp_chg_type_cd):
parameter.adapt_emp_chg_type_cd = f'%{parameter.emp_chg_type_cd}%'
where_clauses.append(SQLCondition('eci.emp_chg_type_cd', condition.LIKE, 'adapt_emp_chg_type_cd'))
# # 担当者種別コードが入力されていた場合
# if is_not_empty(parameter.emp_chg_type_cd):
# parameter.adapt_emp_chg_type_cd = f'%{parameter.emp_chg_type_cd}%'
# where_clauses.append(SQLCondition('eci.emp_chg_type_cd', condition.LIKE, 'adapt_emp_chg_type_cd'))
# 適用期間内が入力されていた場合
if is_not_empty(parameter.adapt_apply_date_from):
where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_apply_date_from'))
where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_apply_date_from'))
# # 適用期間内が入力されていた場合
# if is_not_empty(parameter.adapt_apply_date_from):
# where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_apply_date_from'))
# where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_apply_date_from'))
# 適用開始日FROMが入力されていた場合
if is_not_empty(parameter.adapt_start_date_from):
where_clauses.append(SQLCondition('eci.start_date', condition.GE, 'adapt_start_date_from'))
# # 適用開始日FROMが入力されていた場合
# if is_not_empty(parameter.adapt_start_date_from):
# where_clauses.append(SQLCondition('eci.start_date', condition.GE, 'adapt_start_date_from'))
# 適用開始日TOが入力されていた場合
if is_not_empty(parameter.adapt_start_date_to):
where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_start_date_to'))
# # 適用開始日TOが入力されていた場合
# if is_not_empty(parameter.adapt_start_date_to):
# where_clauses.append(SQLCondition('eci.start_date', condition.LE, 'adapt_start_date_to'))
# 適用終了日FROMが入力されていた場合
if is_not_empty(parameter.adapt_end_date_from):
where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_end_date_from'))
# # 適用終了日FROMが入力されていた場合
# if is_not_empty(parameter.adapt_end_date_from):
# where_clauses.append(SQLCondition('eci.end_date', condition.GE, 'adapt_end_date_from'))
# 適用終了日TOが入力されていた場合
if is_not_empty(parameter.adapt_end_date_to):
where_clauses.append(SQLCondition('eci.end_date', condition.LE, 'adapt_end_date_to'))
# # 適用終了日TOが入力されていた場合
# if is_not_empty(parameter.adapt_end_date_to):
# where_clauses.append(SQLCondition('eci.end_date', condition.LE, 'adapt_end_date_to'))
# データ作成日FROMが入力されていた場合
if is_not_empty(parameter.adapt_create_date_from):
where_clauses.append(SQLCondition('eci.create_date', condition.GE, 'adapt_create_date_from'))
# # データ作成日FROMが入力されていた場合
# if is_not_empty(parameter.adapt_create_date_from):
# where_clauses.append(SQLCondition('eci.create_date', condition.GE, 'adapt_create_date_from'))
# データ作成日TOが入力されていた場合
if is_not_empty(parameter.adapt_create_date_to):
where_clauses.append(SQLCondition('eci.create_date', condition.LE, 'adapt_create_date_to'))
# # データ作成日TOが入力されていた場合
# if is_not_empty(parameter.adapt_create_date_to):
# where_clauses.append(SQLCondition('eci.create_date', condition.LE, 'adapt_create_date_to'))
# データ更新日FROMが入力されていた場合
if is_not_empty(parameter.adapt_update_date_from):
where_clauses.append(SQLCondition('eci.update_date', condition.GE, 'adapt_update_date_from'))
# # データ更新日FROMが入力されていた場合
# if is_not_empty(parameter.adapt_update_date_from):
# where_clauses.append(SQLCondition('eci.update_date', condition.GE, 'adapt_update_date_from'))
# データ更新日TOが入力されていた場合
if is_not_empty(parameter.adapt_update_date_to):
where_clauses.append(SQLCondition('eci.update_date', condition.LE, 'adapt_update_date_to'))
# # データ更新日TOが入力されていた場合
# if is_not_empty(parameter.adapt_update_date_to):
# where_clauses.append(SQLCondition('eci.update_date', condition.LE, 'adapt_update_date_to'))
where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses])
# where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses])
logger.debug(f'条件設定終了:{where_clauses_str}')
return where_clauses_str
# logger.debug(f'条件設定終了:{where_clauses_str}')
# return where_clauses_str
DELETE_SQL = "DELETE FROM emp_chg_inst_wrk"
# DELETE_SQL = "DELETE FROM emp_chg_inst_wrk"
def delete_dummy_table(self):
try:
query = self.DELETE_SQL
self._database.execute(query)
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def delete_dummy_table(self):
# try:
# query = self.DELETE_SQL
# self._database.execute(query)
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e
COPY_TABLE_SQL = "INSERT INTO emp_chg_inst_wrk SELECT * FROM emp_chg_inst"
# COPY_TABLE_SQL = "INSERT INTO emp_chg_inst_wrk SELECT * FROM emp_chg_inst"
def copy_real_to_dummy(self):
try:
query = self.COPY_TABLE_SQL
self._database.execute(query)
except Exception as e:
logger.exception(f'DB Error : Exception={e.args}')
raise e
# def copy_real_to_dummy(self):
# try:
# query = self.COPY_TABLE_SQL
# self._database.execute(query)
# except Exception as e:
# logger.exception(f'DB Error : Exception={e.args}')
# raise e

View File

@ -1,31 +1,32 @@
from src.repositories.base_repository import BaseRepository
from src.model.db.master_mente_count import MasterMenteCountModel
from src.logging.get_logger import get_logger
# TODO: ファイル削除予定
# from src.repositories.base_repository import BaseRepository
# from src.model.db.master_mente_count import MasterMenteCountModel
# from src.logging.get_logger import get_logger
logger = get_logger('従業員マスタ')
# logger = get_logger('従業員マスタ')
class EmpMasterRepository(BaseRepository):
# class EmpMasterRepository(BaseRepository):
FETCH_COUNT_SQL = """\
SELECT
COUNT(*) AS count
FROM
src05.emp
WHERE
emp.emp_cd = :emp_cd
AND str_to_date(emp.start_date, '%Y%m%d') <= str_to_date(:start_work_date, '%Y%m%d')
AND str_to_date(:start_work_date, '%Y%m%d') <= str_to_date(emp.end_date ,'%Y%m%d')
"""
# FETCH_COUNT_SQL = """\
# SELECT
# COUNT(*) AS count
# FROM
# src05.emp
# WHERE
# emp.emp_cd = :emp_cd
# AND str_to_date(emp.start_date, '%Y%m%d') <= str_to_date(:start_work_date, '%Y%m%d')
# AND str_to_date(:start_work_date, '%Y%m%d') <= str_to_date(emp.end_date ,'%Y%m%d')
# """
def fetch_count(self, emp_cd, start_work_date) -> MasterMenteCountModel:
try:
query = self.FETCH_COUNT_SQL
result = self._database.execute_select(query, {'emp_cd': emp_cd, 'start_work_date': start_work_date})
models = [MasterMenteCountModel(**r) for r in result]
if len(models) == 0:
return 0
return models[0].count
except Exception as e:
logger.exception(f"DB Error : Exception={e.args}")
raise e
# def fetch_count(self, emp_cd, start_work_date) -> MasterMenteCountModel:
# try:
# query = self.FETCH_COUNT_SQL
# result = self._database.execute_select(query, {'emp_cd': emp_cd, 'start_work_date': start_work_date})
# models = [MasterMenteCountModel(**r) for r in result]
# if len(models) == 0:
# return 0
# return models[0].count
# except Exception as e:
# logger.exception(f"DB Error : Exception={e.args}")
# raise e

View File

@ -1,33 +1,34 @@
from src.repositories.base_repository import BaseRepository
from src.model.db.master_mente_count import MasterMenteCountModel
from src.logging.get_logger import get_logger
# TODO: ファイル削除予定
# from src.repositories.base_repository import BaseRepository
# from src.model.db.master_mente_count import MasterMenteCountModel
# from src.logging.get_logger import get_logger
logger = get_logger('汎用区分マスタ')
# logger = get_logger('汎用区分マスタ')
class GenericKbnMstRepository(BaseRepository):
# class GenericKbnMstRepository(BaseRepository):
FETCH_SQL = """\
SELECT
COUNT(*) AS count
FROM
src05.generic_kbn_mst
WHERE
generic_kbn_mst.generic_kbn_cd = :generic_kbn_cd
AND
generic_kbn_mst.kbn_cd = :kbn_cd
AND
STR_TO_DATE( :start_date , '%Y%m%d') BETWEEN generic_kbn_mst.start_date AND generic_kbn_mst.end_date\
"""
# FETCH_SQL = """\
# SELECT
# COUNT(*) AS count
# FROM
# src05.generic_kbn_mst
# WHERE
# generic_kbn_mst.generic_kbn_cd = :generic_kbn_cd
# AND
# generic_kbn_mst.kbn_cd = :kbn_cd
# AND
# STR_TO_DATE( :start_date , '%Y%m%d') BETWEEN generic_kbn_mst.start_date AND generic_kbn_mst.end_date\
# """
def fetch_count(self, generic_kbn_cd: str, kbn_cd: str, start_date: str) -> MasterMenteCountModel:
try:
query = self.FETCH_SQL
result = self._database.execute_select(query, {'generic_kbn_cd': generic_kbn_cd, 'kbn_cd': kbn_cd, 'start_date' : start_date})
models = [MasterMenteCountModel(**r) for r in result]
if len(models) == 0:
return 0
return models[0].count
except Exception as e:
logger.error(f"DB Error : Exception={e.args}")
raise e
# def fetch_count(self, generic_kbn_cd: str, kbn_cd: str, start_date: str) -> MasterMenteCountModel:
# try:
# query = self.FETCH_SQL
# result = self._database.execute_select(query, {'generic_kbn_cd': generic_kbn_cd, 'kbn_cd': kbn_cd, 'start_date' : start_date})
# models = [MasterMenteCountModel(**r) for r in result]
# if len(models) == 0:
# return 0
# return models[0].count
# except Exception as e:
# logger.error(f"DB Error : Exception={e.args}")
# raise e

View File

@ -1,29 +1,30 @@
from src.repositories.base_repository import BaseRepository
from src.model.db.master_mente_count import MasterMenteCountModel
from src.logging.get_logger import get_logger
# TODO: ファイル削除予定
# from src.repositories.base_repository import BaseRepository
# from src.model.db.master_mente_count import MasterMenteCountModel
# from src.logging.get_logger import get_logger
logger = get_logger('メルク施設マスタ')
# logger = get_logger('メルク施設マスタ')
class MstInstRepository(BaseRepository):
# class MstInstRepository(BaseRepository):
FETCH_COUNT_SQL = """\
SELECT
COUNT(*) AS count
FROM
src05.mst_inst
WHERE
mst_inst.inst_cd = :inst_cd
"""
# FETCH_COUNT_SQL = """\
# SELECT
# COUNT(*) AS count
# FROM
# src05.mst_inst
# WHERE
# mst_inst.inst_cd = :inst_cd
# """
def fetch_count(self, inst_cd) -> MasterMenteCountModel:
try:
query = self.FETCH_COUNT_SQL
result = self._database.execute_select(query, {'inst_cd': inst_cd})
models = [MasterMenteCountModel(**r) for r in result]
if len(models) == 0:
return 0
return models[0].count
except Exception as e:
logger.exception(f"DB Error : Exception={e.args}")
raise e
# def fetch_count(self, inst_cd) -> MasterMenteCountModel:
# try:
# query = self.FETCH_COUNT_SQL
# result = self._database.execute_select(query, {'inst_cd': inst_cd})
# models = [MasterMenteCountModel(**r) for r in result]
# if len(models) == 0:
# return 0
# return models[0].count
# except Exception as e:
# logger.exception(f"DB Error : Exception={e.args}")
# raise e

View File

@ -40,12 +40,13 @@ class BatchStatusService(BaseService):
self.__assert_record_exists()
return self.hdke_table_record.bch_actf == constants.BATCH_STATUS_PROCESSING
def is_dump_processing(self):
"""dump処理処理中かどうかを判定する"""
# TODO: 削除予定
# def is_dump_processing(self):
# """dump処理処理中かどうかを判定する"""
# 日付マスタのレコードがあることを確認
self.__assert_record_exists()
return self.hdke_table_record.dump_sts_kbn != constants.DUMP_STATUS_UNPROCESSED
# # 日付マスタのレコードがあることを確認
# self.__assert_record_exists()
# return self.hdke_table_record.dump_sts_kbn != constants.DUMP_STATUS_UNPROCESSED
def __assert_record_exists(self):
"""日付テーブルが有ることを保証する"""

View File

@ -1,265 +1,266 @@
import os
import json
import html
import csv
# TODO: ファイル削除予定
# import os
# import json
# import html
# import csv
import pandas as pd
# import pandas as pd
from fastapi import HTTPException
from io import TextIOWrapper
from src.aws.aws_api_client import AWSAPIClient
from src.aws.s3 import S3Client
from src.error.exceptions import DBException
from starlette import status
from datetime import datetime
from src.services.base_service import BaseService
from src.system_var import constants, environment
from src.repositories.base_repository import BaseRepository
from src.repositories.mst_inst_repository import MstInstRepository
from src.repositories.bu_master_cd_repository import BuMasterRepository
from src.repositories.emp_master_repository import EmpMasterRepository
from src.repositories.emp_chg_inst_repository import EmpChgInstRepository
from src.repositories.generic_kbn_mst_repository import GenericKbnMstRepository
from src.model.internal.master_mainte_csv import MasterMainteCSVItems
from src.model.internal.master_mainte_emp_chg_inst_function import NewEmpChgInstFunction
from src.model.internal.master_mainte_emp_chg_inst_function import ChangeEmpChgInstFunction
from src.model.view.inst_emp_csv_upload_view_model import InstEmpCsvUploadViewModel
from src.model.view.table_override_view_model import TableOverrideViewModel
from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel
from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel
from src.logging.get_logger import get_logger
# from fastapi import HTTPException
# from io import TextIOWrapper
# from src.aws.aws_api_client import AWSAPIClient
# from src.aws.s3 import S3Client
# from src.error.exceptions import DBException
# from starlette import status
# from datetime import datetime
# from src.services.base_service import BaseService
# from src.system_var import constants, environment
# from src.repositories.base_repository import BaseRepository
# from src.repositories.mst_inst_repository import MstInstRepository
# from src.repositories.bu_master_cd_repository import BuMasterRepository
# from src.repositories.emp_master_repository import EmpMasterRepository
# from src.repositories.emp_chg_inst_repository import EmpChgInstRepository
# from src.repositories.generic_kbn_mst_repository import GenericKbnMstRepository
# from src.model.internal.master_mainte_csv import MasterMainteCSVItems
# from src.model.internal.master_mainte_emp_chg_inst_function import NewEmpChgInstFunction
# from src.model.internal.master_mainte_emp_chg_inst_function import ChangeEmpChgInstFunction
# from src.model.view.inst_emp_csv_upload_view_model import InstEmpCsvUploadViewModel
# from src.model.view.table_override_view_model import TableOverrideViewModel
# from src.model.request.master_mainte_csvup import MasterMainteCsvUpModel
# from src.model.request.master_mainte_csvdl import MasterMainteCsvDlModel
# from src.logging.get_logger import get_logger
logger = get_logger('マスターメンテ')
# logger = get_logger('マスターメンテ')
class MasterMainteService(BaseService):
REPOSITORIES = {
'mst_inst_repository': MstInstRepository,
'emp_master_repository': EmpMasterRepository,
'bu_master_repository': BuMasterRepository,
'emp_chginst_repository': EmpChgInstRepository,
'generic_kbn_mst_repository': GenericKbnMstRepository,
}
# class MasterMainteService(BaseService):
# REPOSITORIES = {
# 'mst_inst_repository': MstInstRepository,
# 'emp_master_repository': EmpMasterRepository,
# 'bu_master_repository': BuMasterRepository,
# 'emp_chginst_repository': EmpChgInstRepository,
# 'generic_kbn_mst_repository': GenericKbnMstRepository,
# }
CLIENTS = {
's3_client': S3Client
}
# CLIENTS = {
# 's3_client': S3Client
# }
mst_inst_repository: MstInstRepository
emp_master_repository: EmpMasterRepository
bu_master_repository: BuMasterRepository
emp_chginst_repository: EmpChgInstRepository
generic_kbn_mst_repository: GenericKbnMstRepository
s3_client: S3Client
# mst_inst_repository: MstInstRepository
# emp_master_repository: EmpMasterRepository
# bu_master_repository: BuMasterRepository
# emp_chginst_repository: EmpChgInstRepository
# generic_kbn_mst_repository: GenericKbnMstRepository
# s3_client: S3Client
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.mst_inst_repository = repositories['mst_inst_repository']
self.emp_master_repository = repositories['emp_master_repository']
self.bu_master_repository = repositories['bu_master_repository']
self.emp_chginst_repository = repositories['emp_chginst_repository']
self.generic_kbn_mst_repository = repositories['generic_kbn_mst_repository']
self.s3_client = clients['s3_client']
# def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
# super().__init__(repositories, clients)
# self.mst_inst_repository = repositories['mst_inst_repository']
# self.emp_master_repository = repositories['emp_master_repository']
# self.bu_master_repository = repositories['bu_master_repository']
# self.emp_chginst_repository = repositories['emp_chginst_repository']
# self.generic_kbn_mst_repository = repositories['generic_kbn_mst_repository']
# self.s3_client = clients['s3_client']
def prepare_mainte_csv_up_view(self,
file: TextIOWrapper,
csv_file_name: str,
csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel:
# def prepare_mainte_csv_up_view(self,
# file: TextIOWrapper,
# csv_file_name: str,
# csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel:
if csv_upload_form.select_function != 'new' and csv_upload_form.select_function != 'change':
raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}')
if csv_upload_form.select_table != 'dummy' and csv_upload_form.select_table != 'real':
raise Exception(f'登録テーブルの選択値が不正です: {csv_upload_form.select_table}')
# if csv_upload_form.select_function != 'new' and csv_upload_form.select_function != 'change':
# raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}')
# if csv_upload_form.select_table != 'dummy' and csv_upload_form.select_table != 'real':
# raise Exception(f'登録テーブルの選択値が不正です: {csv_upload_form.select_table}')
(table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table)
# (table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table)
csv_items = MasterMainteCSVItems(
file,
csv_upload_form.select_function,
table_name,
self.mst_inst_repository,
self.emp_master_repository,
self.bu_master_repository,
self.emp_chginst_repository,
self.generic_kbn_mst_repository
)
# csv_items = MasterMainteCSVItems(
# file,
# csv_upload_form.select_function,
# table_name,
# self.mst_inst_repository,
# self.emp_master_repository,
# self.bu_master_repository,
# self.emp_chginst_repository,
# self.generic_kbn_mst_repository
# )
error_message_list = []
# CSVファイル0件(ヘッダ行のみ)チェック
if len(csv_items.lines) == 0:
error_message_list.append('選択されたCSVファイルの2行目以降に値が記入されておりません。')
else:
for row_item in csv_items:
error_message_list.extend([data for data in row_item.validate()])
# error_message_list = []
# # CSVファイル0件(ヘッダ行のみ)チェック
# if len(csv_items.lines) == 0:
# error_message_list.append('選択されたCSVファイルの2行目以降に値が記入されておりません。')
# else:
# for row_item in csv_items:
# error_message_list.extend([data for data in row_item.validate()])
csv_upload_list = []
json_upload_data = ''
if len(error_message_list) == 0:
csv_upload_list: list[dict] = csv_items.to_dict()
# json作成
json_upload_data = csv_items.to_json()
# csv_upload_list = []
# json_upload_data = ''
# if len(error_message_list) == 0:
# csv_upload_list: list[dict] = csv_items.to_dict()
# # json作成
# json_upload_data = csv_items.to_json()
mainte_csv_up = InstEmpCsvUploadViewModel(
is_verified=True,
error_message_list=error_message_list,
select_function=csv_upload_form.select_function,
select_table=csv_upload_form.select_table,
csv_upload_list=csv_upload_list,
json_upload_data=json_upload_data,
csv_file_name=csv_file_name,
select_function_message=self.__make_dialog_confirm_message(
csv_upload_form.select_function,
selected_table_msg)
)
return mainte_csv_up
# mainte_csv_up = InstEmpCsvUploadViewModel(
# is_verified=True,
# error_message_list=error_message_list,
# select_function=csv_upload_form.select_function,
# select_table=csv_upload_form.select_table,
# csv_upload_list=csv_upload_list,
# json_upload_data=json_upload_data,
# csv_file_name=csv_file_name,
# select_function_message=self.__make_dialog_confirm_message(
# csv_upload_form.select_function,
# selected_table_msg)
# )
# return mainte_csv_up
def prepare_mainte_new_inst_view(self,
user_name: str,
csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel:
# def prepare_mainte_new_inst_view(self,
# user_name: str,
# csv_upload_form: MasterMainteCsvUpModel) -> InstEmpCsvUploadViewModel:
(table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table)
# (table_name, selected_table_msg) = self.__choose_target_table(csv_upload_form.select_table)
csv_data_list = json.loads(html.unescape(csv_upload_form.unescape().json_upload_data))
# csv_data_list = json.loads(html.unescape(csv_upload_form.unescape().json_upload_data))
if csv_upload_form.select_function == 'new':
emp_chg_inst = NewEmpChgInstFunction(
csv_data_list,
table_name,
selected_table_msg,
user_name,
self.emp_chginst_repository)
elif csv_upload_form.select_function == 'change':
emp_chg_inst = ChangeEmpChgInstFunction(
csv_data_list,
table_name,
selected_table_msg,
user_name,
self.emp_chginst_repository)
else:
raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}')
# if csv_upload_form.select_function == 'new':
# emp_chg_inst = NewEmpChgInstFunction(
# csv_data_list,
# table_name,
# selected_table_msg,
# user_name,
# self.emp_chginst_repository)
# elif csv_upload_form.select_function == 'change':
# emp_chg_inst = ChangeEmpChgInstFunction(
# csv_data_list,
# table_name,
# selected_table_msg,
# user_name,
# self.emp_chginst_repository)
# else:
# raise Exception(f'機能の選択値が不正です: {csv_upload_form.select_function}')
(result_message_list, raw_error_list) = emp_chg_inst.save()
# (result_message_list, raw_error_list) = emp_chg_inst.save()
error_message_list = []
error_message_list.extend(raw_error_list)
# error_message_list = []
# error_message_list.extend(raw_error_list)
mainte_csv_up = InstEmpCsvUploadViewModel(
is_insert=True,
result_message_list=result_message_list,
error_message_list=error_message_list
)
return mainte_csv_up
# mainte_csv_up = InstEmpCsvUploadViewModel(
# is_insert=True,
# result_message_list=result_message_list,
# error_message_list=error_message_list
# )
# return mainte_csv_up
def copy_data_real_to_dummy(self) -> TableOverrideViewModel:
try:
self.emp_chginst_repository.begin()
self.emp_chginst_repository.to_jst()
self.emp_chginst_repository.delete_dummy_table()
self.emp_chginst_repository.copy_real_to_dummy()
self.emp_chginst_repository.commit()
except Exception as e:
self.emp_chginst_repository.rollback()
raise e
# def copy_data_real_to_dummy(self) -> TableOverrideViewModel:
# try:
# self.emp_chginst_repository.begin()
# self.emp_chginst_repository.to_jst()
# self.emp_chginst_repository.delete_dummy_table()
# self.emp_chginst_repository.copy_real_to_dummy()
# self.emp_chginst_repository.commit()
# except Exception as e:
# self.emp_chginst_repository.rollback()
# raise e
# コピー完了をマークして画面に返却
table_override = TableOverrideViewModel(
is_override=True
)
return table_override
# # コピー完了をマークして画面に返却
# table_override = TableOverrideViewModel(
# is_override=True
# )
# return table_override
def search_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel) -> pd.DataFrame:
try:
csv_download_form.unescape()
# 施設担当者データを検索
search_result_df = self.search_download_emp_chg_inst_data(csv_download_form)
except DBException as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'db_error', 'message': e.args}
)
# def search_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel) -> pd.DataFrame:
# try:
# csv_download_form.unescape()
# # 施設担当者データを検索
# search_result_df = self.search_download_emp_chg_inst_data(csv_download_form)
# except DBException as e:
# raise HTTPException(
# status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
# detail={'error': 'db_error', 'message': e.args}
# )
return search_result_df
# return search_result_df
def search_download_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel):
(table_name, _) = self.__choose_target_table(csv_download_form.select_table)
search_result_df = self.emp_chginst_repository.fetch_as_data_frame(table_name, csv_download_form)
return search_result_df
# def search_download_emp_chg_inst_data(self, csv_download_form: MasterMainteCsvDlModel):
# (table_name, _) = self.__choose_target_table(csv_download_form.select_table)
# search_result_df = self.emp_chginst_repository.fetch_as_data_frame(table_name, csv_download_form)
# return search_result_df
def write_csv_file(self, data_frame: pd.DataFrame, header: list[str], download_file_name: str):
# csvに書き込み
output_file_path = os.path.join(constants.MENTE_CSV_TEMPORARY_FILE_DIR_PATH, download_file_name)
# 横長のDataFrameとするため、ヘッダーの加工処理
header_data = {}
for df_column, header_column in zip(data_frame.columns, header):
header_data[df_column] = header_column
# def write_csv_file(self, data_frame: pd.DataFrame, header: list[str], download_file_name: str):
# # csvに書き込み
# output_file_path = os.path.join(constants.MENTE_CSV_TEMPORARY_FILE_DIR_PATH, download_file_name)
# # 横長のDataFrameとするため、ヘッダーの加工処理
# header_data = {}
# for df_column, header_column in zip(data_frame.columns, header):
# header_data[df_column] = header_column
header_df = pd.DataFrame([header_data], index=None)
output_df = pd.concat([header_df, data_frame])
# ヘッダー行としてではなく、1レコードとして出力する
output_df.to_csv(output_file_path, encoding="utf-8_sig", quoting=csv.QUOTE_ALL, index=False, header=False)
# header_df = pd.DataFrame([header_data], index=None)
# output_df = pd.concat([header_df, data_frame])
# # ヘッダー行としてではなく、1レコードとして出力する
# output_df.to_csv(output_file_path, encoding="utf-8_sig", quoting=csv.QUOTE_ALL, index=False, header=False)
return output_file_path
# return output_file_path
def upload_emp_chg_inst_data_file(self, df: pd.DataFrame, user_id: str, select_table: str) -> tuple[str, str]:
if df.shape[0] == 0:
return '該当データが存在しないためCSVファイルを出力しませんでした', ''
# def upload_emp_chg_inst_data_file(self, df: pd.DataFrame, user_id: str, select_table: str) -> tuple[str, str]:
# if df.shape[0] == 0:
# return '該当データが存在しないためCSVファイルを出力しませんでした', ''
# ファイル名に使用するタイムスタンプを初期化しておく
current_timestamp = datetime.now()
download_file_name = f'Result_{user_id}_{current_timestamp:%Y%m%d%H%M%S%f}.csv'
# # ファイル名に使用するタイムスタンプを初期化しておく
# current_timestamp = datetime.now()
# download_file_name = f'Result_{user_id}_{current_timestamp:%Y%m%d%H%M%S%f}.csv'
# ファイルを書き出し(CSV)
local_file_path = self.__write_emp_chg_inst_data_to_file(df, download_file_name)
# # ファイルを書き出し(CSV)
# local_file_path = self.__write_emp_chg_inst_data_to_file(df, download_file_name)
# ローカルファイルからS3にアップロードし、ダウンロード用URLを取得する
download_file_url = ''
try:
bucket_name = environment.MASTER_MAINTENANCE_BUCKET
file_key = f'data/{os.path.basename(local_file_path)}'
self.s3_client.upload_file(local_file_path, bucket_name, file_key)
# アップロード後、ローカルからは削除する
self.delete_local_file(local_file_path)
download_file_url = self.generate_download_file_url(local_file_path)
except Exception as e:
logger.exception(f'S3 アクセスエラー{e}')
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'aws_error', 'message': e.args}
)
if select_table == 'dummy':
result_msg = f'ダミーテーブルのデータ{df.shape[0]}件をCSVファイルに出力しました'
else:
result_msg = f'本番テーブルのデータ{df.shape[0]}件をCSVファイルに出力しました'
# # ローカルファイルからS3にアップロードし、ダウンロード用URLを取得する
# download_file_url = ''
# try:
# bucket_name = environment.MASTER_MAINTENANCE_BUCKET
# file_key = f'data/{os.path.basename(local_file_path)}'
# self.s3_client.upload_file(local_file_path, bucket_name, file_key)
# # アップロード後、ローカルからは削除する
# self.delete_local_file(local_file_path)
# download_file_url = self.generate_download_file_url(local_file_path)
# except Exception as e:
# logger.exception(f'S3 アクセスエラー{e}')
# raise HTTPException(
# status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
# detail={'error': 'aws_error', 'message': e.args}
# )
# if select_table == 'dummy':
# result_msg = f'ダミーテーブルのデータ{df.shape[0]}件をCSVファイルに出力しました'
# else:
# result_msg = f'本番テーブルのデータ{df.shape[0]}件をCSVファイルに出力しました'
return result_msg, download_file_url
# return result_msg, download_file_url
def generate_download_file_url(self, local_file_path: str) -> str:
bucket_name = environment.MASTER_MAINTENANCE_BUCKET
file_key = f'data/{os.path.basename(local_file_path)}'
return self.s3_client.generate_presigned_url(bucket_name, file_key, constants.MENTE_CSV_DOWNLOAD_FILE_NAME)
# def generate_download_file_url(self, local_file_path: str) -> str:
# bucket_name = environment.MASTER_MAINTENANCE_BUCKET
# file_key = f'data/{os.path.basename(local_file_path)}'
# return self.s3_client.generate_presigned_url(bucket_name, file_key, constants.MENTE_CSV_DOWNLOAD_FILE_NAME)
def __write_emp_chg_inst_data_to_file(self, df: pd.DataFrame, download_file_name: str) -> str:
logger.info('CSVファイルを出力する')
local_file_path = self.write_csv_file(
df, header=constants.MENTE_CSV_DOWNLOAD_HEADER, download_file_name=download_file_name)
# def __write_emp_chg_inst_data_to_file(self, df: pd.DataFrame, download_file_name: str) -> str:
# logger.info('CSVファイルを出力する')
# local_file_path = self.write_csv_file(
# df, header=constants.MENTE_CSV_DOWNLOAD_HEADER, download_file_name=download_file_name)
return local_file_path
# return local_file_path
def __choose_target_table(self, select_table: str):
if select_table == 'dummy':
table_name = 'src05.emp_chg_inst_wrk'
selected_table_msg = constants.CSV_CHANGE_TABLE_NAME
elif select_table == 'real':
table_name = 'src05.emp_chg_inst'
selected_table_msg = constants.CSV_REAL_TABLE_NAME
else:
raise Exception(f'登録テーブルの選択値が不正です: {select_table}')
return (table_name, selected_table_msg)
# def __choose_target_table(self, select_table: str):
# if select_table == 'dummy':
# table_name = 'src05.emp_chg_inst_wrk'
# selected_table_msg = constants.CSV_CHANGE_TABLE_NAME
# elif select_table == 'real':
# table_name = 'src05.emp_chg_inst'
# selected_table_msg = constants.CSV_REAL_TABLE_NAME
# else:
# raise Exception(f'登録テーブルの選択値が不正です: {select_table}')
# return (table_name, selected_table_msg)
def __make_dialog_confirm_message(self, select_function: str, selected_table_msg: str) -> str:
select_function_msg = '新規施設登録' if select_function == 'new' else '施設担当者変更'
return f'{selected_table_msg}{select_function_msg}を行いますか?'
# def __make_dialog_confirm_message(self, select_function: str, selected_table_msg: str) -> str:
# select_function_msg = '新規施設登録' if select_function == 'new' else '施設担当者変更'
# return f'{selected_table_msg}に{select_function_msg}を行いますか?'
def delete_local_file(self, local_file_path: str):
os.remove(local_file_path)
# def delete_local_file(self, local_file_path: str):
# os.remove(local_file_path)

View File

@ -1,3 +1,4 @@
/* TODO: ファイル削除予定 */
/* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */
/* @see https://bootstrap-guide.com/content/reboot#page-defaults */
table {

View File

@ -289,31 +289,32 @@ function checkNumberOnlyForm($this)
$this.value=str;
}
// TODO: 削除予定
// メニューへボタンの関数
// 機能概要:マスターメンテメニュー画面に遷移する
function backToMainteMenu(loadingElemId = '_loading'){
sessionStorage.clear();
// ローディング表示
showLoading(loadingElemId);
location.href = "/masterMainte/masterMainteMenu";
}
// function backToMainteMenu(loadingElemId = '_loading'){
// sessionStorage.clear();
// // ローディング表示
// showLoading(loadingElemId);
// location.href = "/masterMainte/masterMainteMenu";
// }
// 確認ダイアログ
function confirmDialog(strMesssage) {
var result = confirm(strMesssage);
return result;
}
// // 確認ダイアログ
// function confirmDialog(strMesssage) {
// var result = confirm(strMesssage);
// return result;
// }
function formInsertBtDisabled(){
var validFlg = false;
if(document.getElementById("csvFile").value === ""){
validFlg = true;
}
// TODO: 削除予定 function formInsertBtDisabled(){
// var validFlg = false;
// if(document.getElementById("csvFile").value === ""){
// validFlg = true;
// }
if (validFlg == true) {
document.getElementById("confirm").disabled = true;
}
else {
document.getElementById("confirm").disabled = false;
}
}
// if (validFlg == true) {
// document.getElementById("confirm").disabled = true;
// }
// else {
// document.getElementById("confirm").disabled = false;
// }
// }

View File

@ -7,8 +7,9 @@ PERMISSION_DISABLED = 0
# 日付テーブル.バッチ処理ステータス:未処理
BATCH_STATUS_PROCESSING = '1'
# TODO: 削除予定
# 日付テーブル.dump取得状態区分未処理
DUMP_STATUS_UNPROCESSED = '0'
# DUMP_STATUS_UNPROCESSED = '0'
# 生物由来照会
@ -76,134 +77,136 @@ LOGOUT_REASON_MESSAGE_MAP = {
LOGOUT_REASON_UNEXPECTED: '予期しないエラーが発生しました。<br>再度Loginするか、<br>管理者に問い合わせてください。',
LOGOUT_REASON_LOGIN_FAILED_LIMIT_EXCEEDED: 'ログイン失敗回数の上限を超えましたので<br>アカウントをロックしました。<br>管理者に連絡してください'
}
# TODO: 削除予定
# 新規施設担当者登録CSV(マスターメンテ)
NEW_INST_EMP_CSV_LOGICAL_NAMES = [
'施設コード',
'施設名',
'領域コード',
'担当者種別コード',
'MUID',
'担当者名(姓)',
'担当者名(名)',
'ビジネスユニットコード',
'適用開始日',
'適用終了日'
]
# NEW_INST_EMP_CSV_LOGICAL_NAMES = [
# '施設コード',
# '施設名',
# '領域コード',
# '担当者種別コード',
# 'MUID',
# '担当者名(姓)',
# '担当者名(名)',
# 'ビジネスユニットコード',
# '適用開始日',
# '適用終了日'
# ]
# 施設コードの列No
CSV_NEW_INST_CD_COL_NO = 0
# CSV_NEW_INST_CD_COL_NO = 0
# 施設名の列No
CSV_NEW_INST_NAME_COL_NO = 1
# CSV_NEW_INST_NAME_COL_NO = 1
# 領域コードの列No
CSV_NEW_TA_CD_COL_NO = 2
# CSV_NEW_TA_CD_COL_NO = 2
# 担当者種別コードの列No
CSV_NEW_EMP_CHG_TYPE_CD_COL_NO = 3
# CSV_NEW_EMP_CHG_TYPE_CD_COL_NO = 3
# MUIDの列No
CSV_NEW_EMP_CD_COL_NO = 4
# CSV_NEW_EMP_CD_COL_NO = 4
# 担当者名の列No
CSV_NEW_EMP_NAME_FAMILY_COL_NO = 5
# CSV_NEW_EMP_NAME_FAMILY_COL_NO = 5
# 担当者名の列No
CSV_NEW_EMP_NAME_FIRST_COL_NO = 6
# CSV_NEW_EMP_NAME_FIRST_COL_NO = 6
# ビジネスユニットコードの列No
CSV_NEW_BU_CD_COL_NO = 7
# CSV_NEW_BU_CD_COL_NO = 7
# 適用開始日の列No
CSV_NEW_START_DATE = 8
# CSV_NEW_START_DATE = 8
# 適用終了日の列No
CSV_NEW_END_DATE = 9
# CSV_NEW_END_DATE = 9
# TODO: 削除予定
# 施設担当者変更登録CSV(マスターメンテ)
CHANGE_INST_CSV_LOGICAL_NAMES = [
'ビジネスユニットコード',
'ビジネスユニット名',
'組織コード',
'組織名略称',
'施設コード',
'施設名',
'領域コード',
'説明',
'担当者種別コード',
'MUID',
'担当者名',
'施設担当_開始日',
'施設担当_終了日',
'終了日の変更',
'コメント'
]
# CHANGE_INST_CSV_LOGICAL_NAMES = [
# 'ビジネスユニットコード',
# 'ビジネスユニット名',
# '組織コード',
# '組織名略称',
# '施設コード',
# '施設名',
# '領域コード',
# '説明',
# '担当者種別コード',
# 'MUID',
# '担当者名',
# '施設担当_開始日',
# '施設担当_終了日',
# '終了日の変更',
# 'コメント'
# ]
# ビジネスユニットコードの列No
CSV_CHANGE_BU_CD_COL_NO = 0
# ビジネスユニット名の列No
CSV_CHANGE_BU_NAME_COL_NO = 1
# 組織コードの列No
CSV_CHANGE_ORG_CD_COL_NO = 2
# 組織名略称の列No
CSV_CHANGE_ORG_SHORT_NAME_COL_NO = 3
# 施設コードの列No
CSV_CHANGE_INST_CD_COL_NO = 4
# 施設名の列No
CSV_CHANGE_INST_NAME_COL_NO = 5
# 領域コードの列No
CSV_CHANGE_TA_CD_COL_NO = 6
# 説明の列No
CSV_CHANGE_EXPLAIN_COL_NO = 7
# 担当者種別コード
CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO = 8
# MUIDの列No
CSV_CHANGE_EMP_CD_COL_NO = 9
# 担当者名の列No
CSV_CHANGE_EMP_FULL_NAME_COL_NO = 10
# 施設担当_開始日の列No
CSV_CHANGE_INST_EMP_START_DATE_COL_NO = 11
# 施設担当_終了日の列No
CSV_CHANGE_INST_EMP_END_DATE_COL_NO = 12
# 終了日の変更の列No
CSV_CHANGE_CHANGE_END_DATE_COL_NO = 13
# コメントの列No
CSV_CHANGE_COMMENT = 14
# CSV_CHANGE_BU_CD_COL_NO = 0
# # ビジネスユニット名の列No
# CSV_CHANGE_BU_NAME_COL_NO = 1
# # 組織コードの列No
# CSV_CHANGE_ORG_CD_COL_NO = 2
# # 組織名略称の列No
# CSV_CHANGE_ORG_SHORT_NAME_COL_NO = 3
# # 施設コードの列No
# CSV_CHANGE_INST_CD_COL_NO = 4
# # 施設名の列No
# CSV_CHANGE_INST_NAME_COL_NO = 5
# # 領域コードの列No
# CSV_CHANGE_TA_CD_COL_NO = 6
# # 説明の列No
# CSV_CHANGE_EXPLAIN_COL_NO = 7
# # 担当者種別コード
# CSV_CHANGE_EMP_CHG_TYPE_CD_COL_NO = 8
# # MUIDの列No
# CSV_CHANGE_EMP_CD_COL_NO = 9
# # 担当者名の列No
# CSV_CHANGE_EMP_FULL_NAME_COL_NO = 10
# # 施設担当_開始日の列No
# CSV_CHANGE_INST_EMP_START_DATE_COL_NO = 11
# # 施設担当_終了日の列No
# CSV_CHANGE_INST_EMP_END_DATE_COL_NO = 12
# # 終了日の変更の列No
# CSV_CHANGE_CHANGE_END_DATE_COL_NO = 13
# # コメントの列No
# CSV_CHANGE_COMMENT = 14
# TODO: 削除予定
# CSVアップロードテーブル名(マスターメンテ)
CSV_REAL_TABLE_NAME = '本番テーブル'
CSV_CHANGE_TABLE_NAME = 'ダミーテーブル'
# CSV_REAL_TABLE_NAME = '本番テーブル'
# CSV_CHANGE_TABLE_NAME = 'ダミーテーブル'
MENTE_CSV_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data')
# MENTE_CSV_TEMPORARY_FILE_DIR_PATH = path.join(path.curdir, 'src', 'data')
MENTE_CSV_DOWNLOAD_EXTRACT_COLUMNS = [
'inst_cd',
'inst_name',
'ta_cd',
'emp_chg_type_cd',
'emp_cd',
'emp_name_full',
'bu_cd',
'bu_name',
'start_date',
'end_date',
'creater',
'create_date',
'updater',
'update_date'
]
# MENTE_CSV_DOWNLOAD_EXTRACT_COLUMNS = [
# 'inst_cd',
# 'inst_name',
# 'ta_cd',
# 'emp_chg_type_cd',
# 'emp_cd',
# 'emp_name_full',
# 'bu_cd',
# 'bu_name',
# 'start_date',
# 'end_date',
# 'creater',
# 'create_date',
# 'updater',
# 'update_date'
# ]
MENTE_CSV_DOWNLOAD_HEADER = [
'施設コード',
'施設名',
'領域コード',
'担当者種別コード',
'MUID',
'担当者名',
'ビジネスユニットコード',
'ビジネスユニット名',
'適用開始日',
'適用終了日',
'作成者',
'作成日',
'更新者',
'更新日'
]
# MENTE_CSV_DOWNLOAD_HEADER = [
# '施設コード',
# '施設名',
# '領域コード',
# '担当者種別コード',
# 'MUID',
# '担当者名',
# 'ビジネスユニットコード',
# 'ビジネスユニット名',
# '適用開始日',
# '適用終了日',
# '作成者',
# '作成日',
# '更新者',
# '更新日'
# ]
MENTE_CSV_DOWNLOAD_FILE_NAME = 'instEmpData.csv'
# MENTE_CSV_DOWNLOAD_FILE_NAME = 'instEmpData.csv'
# CSVアップロードの制限サイズ20MB
MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE = 20971520
# MENTE_CSV_UPLOAD_MAX_FILE_SIZE_BYTE = 20971520
# 利用停止区分
DISPLAY_USER_STOP_DIV = {

View File

@ -11,7 +11,7 @@ COGNITO_CLIENT_SECRET = os.environ['COGNITO_CLIENT_SECRET']
AWS_REGION = os.environ['AWS_REGION']
SESSION_TABLE_NAME = os.environ['SESSION_TABLE_NAME']
BIO_ACCESS_LOG_BUCKET = os.environ['BIO_ACCESS_LOG_BUCKET']
MASTER_MAINTENANCE_BUCKET = os.environ['MASTER_MAINTENANCE_BUCKET']
# TODO: 削除予定 MASTER_MAINTENANCE_BUCKET = os.environ['MASTER_MAINTENANCE_BUCKET']
DB_HOST = os.environ['DB_HOST']
DB_PORT = int(os.environ['DB_PORT'])

View File

@ -15,5 +15,5 @@
<script src="https://cdn.jsdelivr.net/npm/paginationjs@2.5.0/dist/pagination.min.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/flatpickr@4.6.13/dist/flatpickr.min.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/flatpickr/dist/l10n/ja.min.js" crossorigin="anonymous"></script>
<script src="/static/function/businessLogicScript.js" integrity="sha384-ytd1o7Rx4BPzjO3RpzR9fW/Z4avGzS7+BRPZVUsQp5X4zXB6xdZpR47/En1mNl7s" crossorigin="anonymous"></script>
<script src="/static/function/businessLogicScript.js" integrity="sha384-DHLRdpDU8f8zbMCbMMvqoIgr7Je9s5Dgydd5R0j5c0nNAkjTScl2kV8wvx2cE6Ud" crossorigin="anonymous"></script>
<script src="/static/lib/fixed_midashi.js" integrity="sha384-mCd6L3DNaLgUWyH051BywJfzlVavCkK6F0wbMqG+j7jAq174Uf7HJdq3H4wxCJKs" crossorigin="anonymous"></script>

View File

@ -1,3 +1,4 @@
<!-- TODO: ファイル削除予定 -->
<!DOCTYPE html>
<html lang="ja">
<head>

View File

@ -1,3 +1,4 @@
<!-- TODO: ファイル削除予定 -->
<!DOCTYPE html>
<html lang="ja">
<head>
@ -210,4 +211,4 @@
{% include '_loading.html' %}
{% endwith %}
</body>
</html>
</html>

View File

@ -1,3 +1,4 @@
<!-- TODO: ファイル削除予定 -->
<!DOCTYPE html>
<html lang="ja">
<head>

View File

@ -23,7 +23,7 @@
<div class="notUseBioMsg">生物由来データ参照は <br> 日次バッチ処理中のため利用出来ません</div>
{% endif %}
{% endif %}
{% if menu.is_available_master_maintenance_menu() %}
{# TODO: 削除予定 {% if menu.is_available_master_maintenance_menu() %}
{% if menu.is_batch_processing() %}
<div class="notUseMainteMsg"> マスターメンテメニューは <br> 日次バッチ処理中のため利用出来ません </div>
{% elif menu.is_backup_processing() %}
@ -32,6 +32,8 @@
<a href="javascript:void(0);" onclick="transitionTo('/masterMainte/masterMainteMenu')" class="btn btn-primary btn-lg btn_width">マスターメンテメニュー</a><br><br>
{% endif %}
{% endif %}
#}
<br><br><a href="javascript:void(0);" onclick="transitionTo('/logout/?reason=do_logout')" class="btn btn-info btn-lg btn_width">Logout</a>
</div>
<!-- ローディング -->

View File

@ -1,3 +1,4 @@
<!-- TODO: ファイル削除予定 -->
<!DOCTYPE html>
<html lang="ja">
<head>
@ -62,4 +63,4 @@
{% include '_loading.html' %}
{% endwith %}
</body>
</html>
</html>

View File

@ -0,0 +1,11 @@
# task environment file.
LOG_LEVEL=INFO
ARCHIVE_TRN_RESULT_FOLDER=/jsk/trn_result_data
ARCHIVE_TRN_RESULT_BIO_FOLDER=/jsk/trn_result_data_bio
ARCHIVE_TRN_RESULT_BIO_LOT_FOLDER=/jsk/trn_result_data_bio_lot
ARCHIVE_TRN_RESULT_INVENTORY_FOLDER=/jsk/trn_recive_inventry
DB_CONNECTION_MAX_RETRY_ATTEMPT=4
DB_CONNECTION_RETRY_INTERVAL_INIT=5
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50

View File

@ -0,0 +1,13 @@
# task environment file.
LOG_LEVEL=INFO
PROCESS_NAME=jskult-batch-dcf-inst-merge-io
JSK_DATA_SEND_FOLDER=send
JSK_BACKUP_FOLDER=jsk/send
TRANSFER_RESULT_FOLDER=transfer_result
TRANSFER_RESULT_FILE_NAME=transfer_result.json
DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv
JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config
DB_CONNECTION_MAX_RETRY_ATTEMPT=1
DB_CONNECTION_RETRY_INTERVAL_INIT=1
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=1

View File

@ -0,0 +1,9 @@
# task environment file.
LOG_LEVEL=INFO
ENTRYPOINT_MODULE_NAME=jskult-batch-mst-inst-all
TRANSFER_RESULT_FOLDER=transfer_result
TRANSFER_RESULT_FILE_NAME=transfer_result.json
DB_CONNECTION_MAX_RETRY_ATTEMPT=4
DB_CONNECTION_RETRY_INTERVAL_INIT=5
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50

View File

@ -0,0 +1,9 @@
# task environment file.
LOG_LEVEL=INFO
PROCESS_NAME=jskult-batch-trn-result-data-bio-lot
TRANSFER_RESULT_FOLDER=transfer_result
TRANSFER_RESULT_FILE_NAME=transfer_result.json
DB_CONNECTION_MAX_RETRY_ATTEMPT=4
DB_CONNECTION_RETRY_INTERVAL_INIT=5
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50

View File

@ -0,0 +1,11 @@
# task environment file.
LOG_LEVEL=INFO
PROCESS_NAME=jskult-batch-update-business-day
JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar
JSKULT_CONFIG_CALENDAR_RUN_ARCHIVE_DAY_FILE_NAME=jsk_archive_run_day.txt
JSKULT_CONFIG_EXPECTED_DATA_LIST_FOLDER=jskult/expected_data_list
JSKULT_CONFIG_EXPECTED_DATA_LIST_FILE_NAME=jsk_expected_data_list.json
DB_CONNECTION_MAX_RETRY_ATTEMPT=4
DB_CONNECTION_RETRY_INTERVAL_INIT=5
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=5
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=50

View File

@ -32,6 +32,17 @@ resource:
- &STG_SG_JSKULT_TRANSFER_RECEIVE_FILE "sg-08d43e8e118178d39"
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
- &STG_SG_JSKULT_ULTMARC_IO "sg-014caf29e738d106a"
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
- &STG_SG_DCF_INST_MERGE_IO "sg-0b5d1639a83d28f66"
# セキュリティグループ(ecs-jskult-batch-mst-inst-all)
- &STG_SG_MST_INST_ALL "sg-0483fd75c76fa5808"
# セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot)
- &STG_SG_TRN_RESULT_DATA_BIO_LOT "sg-07ca545e440dd83b7"
# セキュリティグループ(ecs-jskult-batch-update-business-day)
- &STG_SG_UPDATE_BIS_DAY "sg-0b58046abfa017e0e"
# セキュリティグループ(ecs-jskult-batch-archive-jsk-data)
- &STG_SG_JSK_ARCHIVE "sg-0bbdd7d4ebb5ba222"
# 本番環境
product:
# サブネット(PrivateSubnet1)
@ -56,6 +67,21 @@ resource:
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
# TODO: 本番リリース時にIDを正式版にする
- &PRD_SG_JSKULT_ULTMARC_IO "sg-xxxxxxxxxxxxx"
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
# TODO: 本番リリース時にIDを正式版にする
- &PRD_SG_DCF_INST_MERGE_IO "sg-xxxxxxxxxxxxx"
# セキュリティグループ(ecs-jskult-batch-mst-inst-all)
# TODO: 本番リリース時にIDを正式版にする
- &PRD_SG_MST_INST_ALL "sg-xxxxxxxxxxxxx"
# セキュリティグループ(ecs-jskult-batch-trn-result-data-bio-lot)
# TODO: 本番リリース時にIDを正式版にする
- &PRD_SG_TRN_RESULT_DATA_BIO_LOT "sg-xxxxxxxxxxxxx"
# セキュリティグループ(ecs-jskult-batch-update-business-day)
# TODO: 本番リリース時にIDを正式版にする
- &PRD_SG_UPDATE_BIS_DAY "sg-xxxxxxxxxxxxx"
# セキュリティグループ(ecs-jskult-batch-archive-jsk-data)
# TODO: 本番リリース時にIDを正式版にする
- &PRD_SG_JSK_ARCHIVE "sg-xxxxxxxxxxxxx"
config:
# CRMデータ取得
r-crm-datafetch-state:
@ -281,3 +307,148 @@ config:
SG_ECS_ALL: *PRD_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_JSKULT_ULTMARC_IO: *PRD_SG_JSKULT_ULTMARC_IO
r-jskult-batch-dcf-inst-merge-io-state:
# ステージング環境
staging:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *STG_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *STG_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *STG_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_DCF_INST_MERGE_IO: *STG_SG_DCF_INST_MERGE_IO
# 本番環境
product:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *PRD_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_DCF_INST_MERGE_IO: *PRD_SG_DCF_INST_MERGE_IO
r-jskult-batch-mst-inst-all-state:
# ステージング環境
staging:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *STG_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *STG_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *STG_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_MST_INST_ALL: *STG_SG_MST_INST_ALL
# 本番環境
product:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *PRD_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_MST_INST_ALL: *PRD_SG_MST_INST_ALL
r-jskult-batch-trn-result-data-bio-lot-state:
# ステージング環境
staging:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *STG_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *STG_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *STG_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_TRN_RESULT_DATA_BIO_LOT: *STG_SG_TRN_RESULT_DATA_BIO_LOT
# 本番環境
product:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *PRD_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_TRN_RESULT_DATA_BIO_LOT: *PRD_SG_TRN_RESULT_DATA_BIO_LOT
r-jskult-batch-update-business-day-state:
# ステージング環境
staging:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *STG_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *STG_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *STG_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_UPDATE_BIS_DAY: *STG_SG_UPDATE_BIS_DAY
# 本番環境
product:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *PRD_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_UPDATE_BIS_DAY: *PRD_SG_UPDATE_BIS_DAY
r-jskult-batch-archive-jsk-data-state:
# ステージング環境
staging:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *STG_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *STG_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *STG_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_JSK_ARCHIVE: *STG_SG_JSK_ARCHIVE
# 本番環境
product:
# AWSアカウントID
AWS_ACCOUNT_ID: *AWS_ACCOUNT_ID
# 東京リージョン
REGION_AP_NORTHEAST_1: *REGION_AP_NORTHEAST_1
# サブネット(PrivateSubnet1)
SUBNET_PRI_1A: *PRD_SUBNET_PRI_1A
# サブネット(PrivateSubnet2)
SUBNET_PRI_1D: *PRD_SUBNET_PRI_1D
# セキュリティグループ(ecs-all)
SG_ECS_ALL: *PRD_SG_ECS_ALL
# セキュリティグループ(ecs-jskult-batch-ultmarc-io)
SG_JSK_ARCHIVE: *PRD_SG_JSK_ARCHIVE

View File

@ -0,0 +1,72 @@
{
"Comment": "実消化_過去データアーカイブ処理ステートマシン",
"StartAt": "params",
"States": {
"params": {
"Comment": "パラメータ設定",
"Type": "Pass",
"Parameters": {
"sns": {
"TopicArn": "arn:aws:sns:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:nds-notice-#{ENV_NAME}"
},
"ecs": {
"Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-archive-jsk-data-ecs",
"LaunchType": "FARGATE",
"NetworkConfiguration": {
"AwsvpcConfiguration": {
"Subnets": [
"#{SUBNET_PRI_1A}",
"#{SUBNET_PRI_1D}"
],
"SecurityGroups": [
"#{SG_ECS_ALL}",
"#{SG_JSK_ARCHIVE}"
],
"AssignPublicIp": "DISABLED"
}
}
}
},
"ResultPath": "$.params",
"Next": "exec-ecs-task"
},
"exec-ecs-task": {
"Comment": "ECSタスク起動",
"Type": "Task",
"Resource": "arn:aws:states:::ecs:runTask.sync",
"Parameters": {
"Cluster.$": "$.params.ecs.Cluster",
"LaunchType.$": "$.params.ecs.LaunchType",
"TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-archive-jsk-data",
"NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration"
},
"Retry": [
{
"ErrorEquals": ["States.ALL"],
"BackoffRate": 2,
"IntervalSeconds": 5,
"MaxAttempts": 3
}
],
"Catch": [
{
"ErrorEquals": ["States.ALL"],
"ResultPath": "$.result",
"Next": "ErrorEnd"
}
],
"ResultPath": "$.result",
"Next": "NormalEnd"
},
"NormalEnd": {
"Comment": "正常終了",
"Type": "Succeed"
},
"ErrorEnd": {
"Comment": "異常終了",
"Type": "Fail",
"Error": "StatesError",
"Cause": "StepFunctions ErrorEnd"
}
}
}

View File

@ -0,0 +1,129 @@
{
"Comment": "実消化&アルトマーク DCF施設削除新規マスタ作成ステートマシン",
"StartAt": "params",
"States": {
"params": {
"Comment": "パラメータ設定",
"Type": "Pass",
"Parameters": {
"ecs": {
"LaunchType": "FARGATE",
"Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-dcf-inst-merge-io-ecs",
"TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-dcf-inst-merge-io",
"NetworkConfiguration": {
"AwsvpcConfiguration": {
"Subnets": [
"#{SUBNET_PRI_1A}",
"#{SUBNET_PRI_1D}"
],
"SecurityGroups": [
"#{SG_ECS_ALL}",
"#{SG_DCF_INST_MERGE_IO}"
],
"AssignPublicIp": "DISABLED"
}
},
"Overrides": {
"ContainerOverrides": [
{
"Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-dcf-inst-merge-io",
"Environment": [
{
"Name": "BATCH_EXECUTION_ID",
"Value.$": "$$.Execution.Id"
},
{
"Name": "MAX_RUN_COUNT",
"Value.$": "$.maxRunCount"
}
]
}
]
}
}
},
"ResultPath": "$.params",
"Next": "exec-ecs-task"
},
"exec-ecs-task": {
"Type": "Task",
"Resource": "arn:aws:states:::ecs:runTask.sync",
"Parameters": {
"LaunchType.$": "$.params.ecs.LaunchType",
"Cluster.$": "$.params.ecs.Cluster",
"TaskDefinition.$": "$.params.ecs.TaskDefinition",
"NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration",
"Overrides.$": "$.params.ecs.Overrides"
},
"ResultPath": "$.result",
"Retry": [
{
"ErrorEquals": [
"States.ALL"
],
"BackoffRate": 2,
"IntervalSeconds": 3,
"MaxAttempts": 3
}
],
"Catch": [
{
"ErrorEquals": [
"States.ALL"
],
"Next": "ErrorEnd",
"ResultPath": "$.result"
}
],
"Next": "scan-jskult-batch-run-manage",
"Comment": "ECSタスク起動"
},
"scan-jskult-batch-run-manage": {
"Type": "Task",
"Resource": "arn:aws:states:::dynamodb:getItem",
"Parameters": {
"TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage",
"Key": {
"execution_id": {
"S.$": "$$.Execution.Id"
}
}
},
"Next": "Choice",
"ResultPath": "$.scan"
},
"Choice": {
"Type": "Choice",
"Choices": [
{
"Variable": "$.scan.Item",
"IsPresent": false,
"Next": "ErrorEnd",
"Comment": "バッチ実行管理テーブルにデータが存在しない場合"
},
{
"Variable": "$.scan.Item.batch_run_status.S",
"StringEquals": "retry",
"Next": "wait-for-retry",
"Comment": "バッチ実行管理テーブルのスターテスがリトライの場合"
}
],
"Default": "NormalEnd"
},
"wait-for-retry": {
"Type": "Wait",
"SecondsPath": "$.retryIntervalSecond",
"Next": "exec-ecs-task"
},
"NormalEnd": {
"Comment": "正常終了",
"Type": "Succeed"
},
"ErrorEnd": {
"Comment": "異常終了",
"Type": "Fail",
"Error": "StatesError",
"Cause": "StepFunctions ErrorEnd"
}
}
}

View File

@ -0,0 +1,129 @@
{
"Comment": "実消化&アルトマーク メルク施設マスタステートマシン",
"StartAt": "params",
"States": {
"params": {
"Comment": "パラメータ設定",
"Type": "Pass",
"Parameters": {
"ecs": {
"LaunchType": "FARGATE",
"Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-mst-inst-all-ecs",
"TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-mst-inst-all",
"NetworkConfiguration": {
"AwsvpcConfiguration": {
"Subnets": [
"#{SUBNET_PRI_1A}",
"#{SUBNET_PRI_1D}"
],
"SecurityGroups": [
"#{SG_ECS_ALL}",
"#{SG_MST_INST_ALL}"
],
"AssignPublicIp": "DISABLED"
}
},
"Overrides": {
"ContainerOverrides": [
{
"Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-mst-inst-all",
"Environment": [
{
"Name": "BATCH_EXECUTION_ID",
"Value.$": "$$.Execution.Id"
},
{
"Name": "MAX_RUN_COUNT",
"Value.$": "$.maxRunCount"
}
]
}
]
}
}
},
"ResultPath": "$.params",
"Next": "exec-ecs-task"
},
"exec-ecs-task": {
"Type": "Task",
"Resource": "arn:aws:states:::ecs:runTask.sync",
"Parameters": {
"LaunchType.$": "$.params.ecs.LaunchType",
"Cluster.$": "$.params.ecs.Cluster",
"TaskDefinition.$": "$.params.ecs.TaskDefinition",
"NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration",
"Overrides.$": "$.params.ecs.Overrides"
},
"ResultPath": "$.result",
"Retry": [
{
"ErrorEquals": [
"States.ALL"
],
"BackoffRate": 2,
"IntervalSeconds": 3,
"MaxAttempts": 3
}
],
"Catch": [
{
"ErrorEquals": [
"States.ALL"
],
"Next": "ErrorEnd",
"ResultPath": "$.result"
}
],
"Next": "scan-jskult-batch-run-manage",
"Comment": "ECSタスク起動"
},
"scan-jskult-batch-run-manage": {
"Type": "Task",
"Resource": "arn:aws:states:::dynamodb:getItem",
"Parameters": {
"TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage",
"Key": {
"execution_id": {
"S.$": "$$.Execution.Id"
}
}
},
"Next": "Choice",
"ResultPath": "$.scan"
},
"Choice": {
"Type": "Choice",
"Choices": [
{
"Variable": "$.scan.Item",
"IsPresent": false,
"Next": "ErrorEnd",
"Comment": "バッチ実行管理テーブルにデータが存在しない場合"
},
{
"Variable": "$.scan.Item.batch_run_status.S",
"StringEquals": "retry",
"Next": "wait-for-retry",
"Comment": "バッチ実行管理テーブルのスターテスがリトライの場合"
}
],
"Default": "NormalEnd"
},
"wait-for-retry": {
"Type": "Wait",
"SecondsPath": "$.retryIntervalSecond",
"Next": "exec-ecs-task"
},
"NormalEnd": {
"Comment": "正常終了",
"Type": "Succeed"
},
"ErrorEnd": {
"Comment": "異常終了",
"Type": "Fail",
"Error": "StatesError",
"Cause": "StepFunctions ErrorEnd"
}
}
}

View File

@ -0,0 +1,129 @@
{
"Comment": "実消化&アルトマーク 生物由来ロット分解ステートマシン",
"StartAt": "params",
"States": {
"params": {
"Comment": "パラメータ設定",
"Type": "Pass",
"Parameters": {
"ecs": {
"LaunchType": "FARGATE",
"Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-trn-result-data-bio-lot-ecs",
"TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-trn-result-data-bio-lot",
"NetworkConfiguration": {
"AwsvpcConfiguration": {
"Subnets": [
"#{SUBNET_PRI_1A}",
"#{SUBNET_PRI_1D}"
],
"SecurityGroups": [
"#{SG_ECS_ALL}",
"#{SG_TRN_RESULT_DATA_BIO_LOT}"
],
"AssignPublicIp": "DISABLED"
}
},
"Overrides": {
"ContainerOverrides": [
{
"Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-trn-result-data-bio-lot",
"Environment": [
{
"Name": "BATCH_EXECUTION_ID",
"Value.$": "$$.Execution.Id"
},
{
"Name": "MAX_RUN_COUNT",
"Value.$": "$.maxRunCount"
}
]
}
]
}
}
},
"ResultPath": "$.params",
"Next": "exec-ecs-task"
},
"exec-ecs-task": {
"Type": "Task",
"Resource": "arn:aws:states:::ecs:runTask.sync",
"Parameters": {
"LaunchType.$": "$.params.ecs.LaunchType",
"Cluster.$": "$.params.ecs.Cluster",
"TaskDefinition.$": "$.params.ecs.TaskDefinition",
"NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration",
"Overrides.$": "$.params.ecs.Overrides"
},
"ResultPath": "$.result",
"Retry": [
{
"ErrorEquals": [
"States.ALL"
],
"BackoffRate": 2,
"IntervalSeconds": 3,
"MaxAttempts": 3
}
],
"Catch": [
{
"ErrorEquals": [
"States.ALL"
],
"Next": "ErrorEnd",
"ResultPath": "$.result"
}
],
"Next": "scan-jskult-batch-run-manage",
"Comment": "ECSタスク起動"
},
"scan-jskult-batch-run-manage": {
"Type": "Task",
"Resource": "arn:aws:states:::dynamodb:getItem",
"Parameters": {
"TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage",
"Key": {
"execution_id": {
"S.$": "$$.Execution.Id"
}
}
},
"Next": "Choice",
"ResultPath": "$.scan"
},
"Choice": {
"Type": "Choice",
"Choices": [
{
"Variable": "$.scan.Item",
"IsPresent": false,
"Next": "ErrorEnd",
"Comment": "バッチ実行管理テーブルにデータが存在しない場合"
},
{
"Variable": "$.scan.Item.batch_run_status.S",
"StringEquals": "retry",
"Next": "wait-for-retry",
"Comment": "バッチ実行管理テーブルのスターテスがリトライの場合"
}
],
"Default": "NormalEnd"
},
"wait-for-retry": {
"Type": "Wait",
"SecondsPath": "$.retryIntervalSecond",
"Next": "exec-ecs-task"
},
"NormalEnd": {
"Comment": "正常終了",
"Type": "Succeed"
},
"ErrorEnd": {
"Comment": "異常終了",
"Type": "Fail",
"Error": "StatesError",
"Cause": "StepFunctions ErrorEnd"
}
}
}

View File

@ -0,0 +1,129 @@
{
"Comment": "実消化&アルトマーク 日付テーブル更新ステートマシン",
"StartAt": "params",
"States": {
"params": {
"Comment": "パラメータ設定",
"Type": "Pass",
"Parameters": {
"ecs": {
"LaunchType": "FARGATE",
"Cluster": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:cluster/mbj-newdwh2021-#{ENV_NAME}-jskult-batch-update-business-day-ecs",
"TaskDefinition": "arn:aws:ecs:#{REGION_AP_NORTHEAST_1}:#{AWS_ACCOUNT_ID}:task-definition/mbj-newdwh2021-#{ENV_NAME}-task-jskult-batch-update-business-day",
"NetworkConfiguration": {
"AwsvpcConfiguration": {
"Subnets": [
"#{SUBNET_PRI_1A}",
"#{SUBNET_PRI_1D}"
],
"SecurityGroups": [
"#{SG_ECS_ALL}",
"#{SG_UPDATE_BIS_DAY}"
],
"AssignPublicIp": "DISABLED"
}
},
"Overrides": {
"ContainerOverrides": [
{
"Name": "mbj-newdwh2021-#{ENV_NAME}-container-jskult-batch-update-business-day",
"Environment": [
{
"Name": "BATCH_EXECUTION_ID",
"Value.$": "$$.Execution.Id"
},
{
"Name": "MAX_RUN_COUNT",
"Value.$": "$.maxRunCount"
}
]
}
]
}
}
},
"ResultPath": "$.params",
"Next": "exec-ecs-task"
},
"exec-ecs-task": {
"Type": "Task",
"Resource": "arn:aws:states:::ecs:runTask.sync",
"Parameters": {
"LaunchType.$": "$.params.ecs.LaunchType",
"Cluster.$": "$.params.ecs.Cluster",
"TaskDefinition.$": "$.params.ecs.TaskDefinition",
"NetworkConfiguration.$": "$.params.ecs.NetworkConfiguration",
"Overrides.$": "$.params.ecs.Overrides"
},
"ResultPath": "$.result",
"Retry": [
{
"ErrorEquals": [
"States.ALL"
],
"BackoffRate": 2,
"IntervalSeconds": 3,
"MaxAttempts": 3
}
],
"Catch": [
{
"ErrorEquals": [
"States.ALL"
],
"Next": "ErrorEnd",
"ResultPath": "$.result"
}
],
"Next": "scan-jskult-batch-run-manage",
"Comment": "ECSタスク起動"
},
"scan-jskult-batch-run-manage": {
"Type": "Task",
"Resource": "arn:aws:states:::dynamodb:getItem",
"Parameters": {
"TableName": "mbj-newdwh2021-#{ENV_NAME}-jskult-batch-run-manage",
"Key": {
"execution_id": {
"S.$": "$$.Execution.Id"
}
}
},
"Next": "Choice",
"ResultPath": "$.scan"
},
"Choice": {
"Type": "Choice",
"Choices": [
{
"Variable": "$.scan.Item",
"IsPresent": false,
"Next": "ErrorEnd",
"Comment": "バッチ実行管理テーブルにデータが存在しない場合"
},
{
"Variable": "$.scan.Item.batch_run_status.S",
"StringEquals": "retry",
"Next": "wait-for-retry",
"Comment": "バッチ実行管理テーブルのスターテスがリトライの場合"
}
],
"Default": "NormalEnd"
},
"wait-for-retry": {
"Type": "Wait",
"SecondsPath": "$.retryIntervalSecond",
"Next": "exec-ecs-task"
},
"NormalEnd": {
"Comment": "正常終了",
"Type": "Succeed"
},
"ErrorEnd": {
"Comment": "異常終了",
"Type": "Fail",
"Error": "StatesError",
"Cause": "StepFunctions ErrorEnd"
}
}
}