feat: 日次バッチの試作品をコミット

This commit is contained in:
shimoda.m@nds-tyo.co.jp 2023-03-30 14:14:38 +09:00
parent 67a6644a5f
commit 314342af6f
23 changed files with 1007 additions and 0 deletions

View File

@ -0,0 +1,12 @@
tests/*
.coverage
.env
.env.example
.report/*
.vscode/*
.pytest_cache/*
*/__pychache__/*
Dockerfile
pytest.ini
README.md
*.sql

View File

@ -0,0 +1,6 @@
DB_HOST=************
DB_PORT=************
DB_USERNAME=************
DB_PASSWORD=************
DB_SCHEMA=src05
LOG_LEVEL=INFO

4
ecs/jskult-batch-daily/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
__pycache__
.vscode/settings.json
.env

View File

@ -0,0 +1,16 @@
{
// IntelliSense 使
//
// : https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Batch Sample",
"type": "python",
"request": "launch",
"program": "entrypoint.py",
"console": "integratedTerminal",
"justMyCode": true
}
]
}

View File

@ -0,0 +1,18 @@
{
"[python]": {
"editor.defaultFormatter": null,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
//
"python.defaultInterpreterPath": "<pythonインタプリターのパス>",
"python.linting.lintOnSave": true,
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"python.linting.flake8Args": ["--max-line-length=120"],
"python.formatting.provider": "autopep8",
"python.formatting.autopep8Args": ["--max-line-length", "120"]
}

View File

@ -0,0 +1,20 @@
FROM python:3.9
ENV TZ="Asia/Tokyo"
WORKDIR /usr/src/app
COPY Pipfile Pipfile.lock ./
RUN \
apt update -y && \
# パッケージのセキュリティアップデートのみを適用するコマンド
apt install -y unattended-upgrades && \
unattended-upgrades && \
pip install --upgrade pip wheel setuptools && \
pip install pipenv --no-cache-dir && \
pipenv install --system --deploy && \
pip uninstall -y pipenv virtualenv-clone virtualenv
COPY src ./src
COPY entrypoint.py entrypoint.py
CMD ["python", "entrypoint.py"]

View File

@ -0,0 +1,21 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
boto3 = "*"
PyMySQL = "*"
sqlalchemy = "*"
tenacity = "*"
prettytable = "*"
[dev-packages]
autopep8 = "*"
flake8 = "*"
[requires]
python_version = "3.9"
[pipenv]
allow_prereleases = true

278
ecs/jskult-batch-daily/Pipfile.lock generated Normal file
View File

@ -0,0 +1,278 @@
{
"_meta": {
"hash": {
"sha256": "38a488e60226a959c48dea23146954688e68eee8280a868998a2f659c2a46217"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"boto3": {
"hashes": [
"sha256:7017102c58b9984749bef3b9f476940593c311504354b9ee9dd7bb0b4657a77d",
"sha256:f961aa704bd7aeefc186ede52cabc3ef4c336979bb4098d3aad7ca922d55fc27"
],
"index": "pypi",
"version": "==1.26.96"
},
"botocore": {
"hashes": [
"sha256:b9781108810e33f8406942c3e3aab748650c59d5cddb7c9d323f4e2682e7b0b6",
"sha256:c449d7050e9bc4a8b8a62ae492cbdc931b786bf5752b792867f1276967fadaed"
],
"markers": "python_version >= '3.7'",
"version": "==1.29.96"
},
"greenlet": {
"hashes": [
"sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a",
"sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a",
"sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43",
"sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33",
"sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8",
"sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088",
"sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca",
"sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343",
"sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645",
"sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db",
"sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df",
"sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3",
"sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86",
"sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2",
"sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a",
"sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf",
"sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7",
"sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394",
"sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40",
"sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3",
"sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6",
"sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74",
"sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0",
"sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3",
"sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91",
"sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5",
"sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9",
"sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8",
"sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b",
"sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6",
"sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb",
"sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73",
"sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b",
"sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df",
"sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9",
"sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f",
"sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0",
"sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857",
"sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a",
"sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249",
"sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30",
"sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292",
"sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b",
"sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d",
"sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b",
"sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c",
"sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca",
"sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7",
"sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75",
"sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae",
"sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b",
"sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470",
"sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564",
"sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9",
"sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099",
"sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0",
"sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5",
"sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19",
"sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1",
"sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"
],
"markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))",
"version": "==2.0.2"
},
"jmespath": {
"hashes": [
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
],
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
"prettytable": {
"hashes": [
"sha256:2e0026af955b4ea67b22122f310b90eae890738c08cb0458693a49b6221530ac",
"sha256:3b767129491767a3a5108e6f305cbaa650f8020a7db5dfe994a2df7ef7bad0fe"
],
"index": "pypi",
"version": "==3.6.0"
},
"pymysql": {
"hashes": [
"sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641",
"sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"
],
"index": "pypi",
"version": "==1.0.2"
},
"python-dateutil": {
"hashes": [
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.2"
},
"s3transfer": {
"hashes": [
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
],
"markers": "python_version >= '3.7'",
"version": "==0.6.0"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sqlalchemy": {
"hashes": [
"sha256:013f4f330001e84a2b0ef1f2c9bd73169c79d582e54e1a144be1be1dbc911711",
"sha256:0789e199fbce8cb1775337afc631ed12bcc5463dd77d7a06b8dafd758cde51f8",
"sha256:0b698440c477c00bdedff87348b19a79630a235864a8f4378098d61079c16ce9",
"sha256:0eac488be90dd3f7a655d2e34fa59e1305fccabc4abfbd002e3a72ae10bd2f89",
"sha256:14854bdb2a35af536d14f77dfa8dbc20e1bb1972996d64c4147e0d3165c9aaf5",
"sha256:18795e87601b4244fd08b542cd6bff9ef674b17bcd34e4a3c9935398e2cc762c",
"sha256:32f508fef9c5a7d19411d94ef64cf5405e42c4689e51ddbb81ac9a7be045cce8",
"sha256:33f73cc45ffa050f5c3b60ff4490e0ae9e02701461c1600d5ede1b008076b1b9",
"sha256:38e26cf6b9b4c6c37846f7e31b42e4d664b35f055691265f07e06aeb6167c494",
"sha256:3da3dff8d9833a7d7f66a3c45a79a3955f775c79f47bb7eea266d0b4c267b17a",
"sha256:432cfd77642771ee7ea0dd0f3fb664f18506a3625eab6e6d5d1d771569171270",
"sha256:4339110be209fea37a2bb4f35f1127c7562a0393e9e6df5d9a65cc4f5c167cb6",
"sha256:486015a58c9a67f65a15b4f19468b35b97cee074ae55386a9c240f1da308fbfe",
"sha256:494db0026918e3f707466a1200a5dedbf254a4bce01a3115fd95f04ba8258f09",
"sha256:57b80e877eb6ec63295835f8a3b86ca3a44829f80c4748e1b019e03adea550fc",
"sha256:5f7c40ec2e3b31293184020daba95850832bea523a08496ac89b27a5276ec804",
"sha256:6d44ff7573016fc26311b5a5c54d5656fb9e0c39e138bc8b81cb7c8667485203",
"sha256:774965c41b71c8ebe3c5728bf5b9a948231fc3a0422d9fdace0686f5bb689ad6",
"sha256:7917632606fc5d4be661dcde45cc415df835e594e2c50cc999a44f24b6bf6d92",
"sha256:9020125e3be677c64d4dda7048e247343f1663089cf268a4cc98c957adb7dbe0",
"sha256:921485d1f69ed016e1f756de67d02ad4f143eb6b92b9776bfff78786d8978ab5",
"sha256:94556a2a7fc3de094ea056b62845e2e6e271e26d1e1b2540a1cd2d2506257a10",
"sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649",
"sha256:a65a8fd09bdffd63fa23b39cd902e6a4ca23d86ecfe129513e43767a1f3e91fb",
"sha256:a6f7d1debb233f1567d700ebcdde0781a0b63db0ef266246dfbf75ae41bfdf85",
"sha256:b0995b92612979d208189245bf87349ad9243b97b49652347a28ddee0803225a",
"sha256:b8ab8f90f4a13c979e6c41c9f011b655c1b9ae2df6cffa8fa2c7c4d740f3512e",
"sha256:bc370d53fee7408330099c4bcc2573a107757b203bc61f114467dfe586a0c7bd",
"sha256:c38641f5c3714505d65dbbd8fb1350408b9ad8461769ec8e440e1177f9c92d1d",
"sha256:cc337b96ec59ef29907eeadc2ac11188739281568f14c719e61550ca6d201a41",
"sha256:ce076e25f1170000b4ecdc57a1ff8a70dbe4a5648ec3da0563ef3064e8db4f15",
"sha256:cebd161f964af58290596523c65e41a5a161a99f7212b1ae675e288a4b5e0a7c",
"sha256:d2e7411d5ea164c6f4d003f5d4f5e72e202956aaa7496b95bb4a4c39669e001c",
"sha256:e735a635126b2338dfd3a0863b675437cb53d85885a7602b8cffb24345df33ed",
"sha256:e7e61e2e4dfe175dc3510889e44eda1c32f55870d6950ef40519640cb266704d",
"sha256:e90f0be674e0845c5c1ccfa5e31c9ee28fd406546a61afc734355cc7ea1f8f8b",
"sha256:ea1c63e61b5c13161c8468305f0a5837c80aae2070e33654c68dd12572b638eb",
"sha256:ea9461f6955f3cf9eff6eeec271686caed7792c76f5b966886a36a42ea46e6b2",
"sha256:f15c54713a8dd57a01c974c9f96476688f6f6374d348819ed7e459535844b614",
"sha256:fb649c5473f79c9a7b6133f53a31f4d87de14755c79224007eb7ec76e628551e",
"sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"
],
"index": "pypi",
"version": "==2.0.7"
},
"tenacity": {
"hashes": [
"sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0",
"sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"
],
"index": "pypi",
"version": "==8.2.2"
},
"typing-extensions": {
"hashes": [
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
],
"markers": "python_version >= '3.7'",
"version": "==4.5.0"
},
"urllib3": {
"hashes": [
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305",
"sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.26.15"
},
"wcwidth": {
"hashes": [
"sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e",
"sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"
],
"version": "==0.2.6"
}
},
"develop": {
"autopep8": {
"hashes": [
"sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1",
"sha256:f9849cdd62108cb739dbcdbfb7fdcc9a30d1b63c4cc3e1c1f893b5360941b61c"
],
"index": "pypi",
"version": "==2.0.2"
},
"flake8": {
"hashes": [
"sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7",
"sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"
],
"index": "pypi",
"version": "==6.0.0"
},
"mccabe": {
"hashes": [
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
],
"markers": "python_version >= '3.6'",
"version": "==0.7.0"
},
"pycodestyle": {
"hashes": [
"sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053",
"sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"
],
"markers": "python_version >= '3.6'",
"version": "==2.10.0"
},
"pyflakes": {
"hashes": [
"sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf",
"sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"
],
"markers": "python_version >= '3.6'",
"version": "==3.0.1"
},
"tomli": {
"hashes": [
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
],
"markers": "python_version < '3.11'",
"version": "==2.0.1"
}
}
}

View File

@ -0,0 +1,10 @@
"""実消化&アルトマーク 日次バッチのエントリーポイント"""
from src.jobctrl_daily import batch_process
if __name__ == '__main__':
try:
exit(batch_process())
except Exception:
# エラーが起きても、正常系のコードで返す。
# エラーが起きた事実はbatch_process内でログを出す。
exit(0)

View File

View File

@ -0,0 +1,56 @@
"""バッチ処理の共通関数"""
import textwrap
from datetime import datetime
from src.db.database import Database
from src.error.exceptions import BatchOperationException, DBException
def get_syor_date() -> str:
"""DBから処理日を取得します
Raises:
BatchOperationException: 日付テーブルが取得できないとき何らかのエラーが発生したとき
Returns:
str: hdke_tbl.syor_date
"""
db = Database.get_instance()
db.connect()
sql = 'SELECT syor_date FROM src05.hdke_tbl'
try:
syor_date_result = db.execute_select(sql)
except DBException as e:
raise BatchOperationException(e)
db.disconnect()
if len(syor_date_result) == 0:
raise BatchOperationException('日付テーブルが取得できませんでした')
# 必ず一件取れる
syor_date_record = syor_date_result[0]
syor_date_str = syor_date_record['syor_date']
return syor_date_str
def get_syor_date_as_date_format() -> str:
"""DBから処理日を取得し、yyyy/mm/ddのフォーマットにして返します
Raises:
BatchOperationException: 日付テーブルが取得できないとき何らかのエラーが発生したとき
Returns:
str: hdke_tbl.syor_dateをyyyy/mm/ddにフォーマットした文字列
"""
syor_date_str = get_syor_date()
syor_date = datetime.strptime(syor_date_str, '%Y%m%d')
return syor_date.strftime('%Y/%m/%d')
def logging_sql(logger, sql):
"""SQL文をデバッグログで出力する
Args:
logger (logging.Logger): ロガー
sql (str): SQL文
"""
logger.debug(f'\n{"-"*15}\n{textwrap.dedent(sql)[1:-1]}\n{"-"*15}')

View File

@ -0,0 +1,210 @@
from prettytable import PrettyTable
from src.batch.batch_functions import get_syor_date_as_date_format, logging_sql
from src.db.database import Database
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.time.elapsed_time import ElapsedTime
logger = get_logger('48-施設担当者マスタ洗替')
def batch_process():
db = Database.get_instance()
db.connect()
logger.info('##########################')
logger.info('START Changing Employee in charge of institution PGM.')
# 日付テーブルを取得
syor_date = get_syor_date_as_date_format(db)
# `emp_chg_inst_lau`をTruncate
truncate_emp_chg_inst_lau(db)
# emp_chg_inst から、`emp_chg_inst_lau`へInsert
insert_into_emp_chg_inst_lau_from_emp_chg_inst(db)
# vop_hco_merge_vから、emp_chg_inst_lauをUpdate
update_emp_chg_inst_lau_from_vop_hco_merge_v(db, syor_date)
# dcf_inst_mergeから、emp_chg_inst_lauをUpdate
update_dcf_inst_merge_from_emp_chg_inst_lau(db, syor_date)
db.disconnect()
logger.info('##########################')
logger.info('End All Processing PGM.')
def truncate_emp_chg_inst_lau(db: Database):
logger.info("##########################")
try:
db.execute("TRUNCATE TABLE src05.emp_chg_inst_lau")
except Exception as e:
logger.info("Error! Truncate Table `emp_chg_inst_lau` is Failed!!!")
raise BatchOperationException(e)
logger.info("Table `emp_chg_inst_lau` was truncated!")
return
def insert_into_emp_chg_inst_lau_from_emp_chg_inst(db: Database):
logger.info("##########################")
try:
elapsed_time = ElapsedTime()
sql = """
INSERT INTO
src05.emp_chg_inst_lau
SELECT
inst_cd,
ta_cd,emp_cd,
bu_cd,
start_date,
end_date,
main_chg_flg,
enabled_flg,
creater,
create_date,
updater,
update_date,
NULL -- lua_ope_dt
FROM
src05.emp_chg_inst
WHERE
enabled_flg = 'Y'
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.info("Error! Insert into `emp_chg_inst_lau` from `emp_chg_inst` was failed!!!")
raise BatchOperationException(e)
logger.info("Success! Insert into `emp_chg_inst_lau` from `emp_chg_inst` was inserted!")
return
def update_emp_chg_inst_lau_from_vop_hco_merge_v(db: Database, syor_date: str):
# vop_hco_merge_vはデータが作られないため、この洗い替え処理は基本空振りする
logger.info("##########################")
try:
select_result = db.execute_select(
"""
SELECT
COUNT(v_inst_cd) AS row_count
FROM
src05.vop_hco_merge_v
WHERE
STR_TO_DATE(apply_dt, '%Y-%m-%d') <= :syor_date
""",
{'syor_date': syor_date}
)
except Exception as e:
logger.info("Error! `vop_hco_merge_v` Table count error!")
raise BatchOperationException(e)
count = [row for row in select_result][0]['row_count']
if count == 0:
logger.info('vop_hco_merge_v Table Data is not exists!')
return
logger.info('vop_hco_merge_v Table Data is exists!')
# vop_hco_merge_v から、emp_chg_inst_lauをUpdateします
result = db.execute_select(
"""
SELECT
v_inst_cd,
v_inst_cd_merg
FROM
src05.vop_hco_merge_v
WHERE
STR_TO_DATE(apply_dt, '%Y-%m-%d') <= :syor_date
ORDER BY
STR_TO_DATE(apply_dt, '%Y-%m-%d') ASC
""",
{'syor_date': syor_date}
)
for row in result:
v_inst_cd = row['v_inst_cd']
v_inst_cd_merge = row['v_inst_cd_merg']
try:
elapsed_time = ElapsedTime()
update_sql = """
UPDATE
emp_chg_inst_lau
SET
inst_cd = :v_inst_merge,
lua_ope_dt = SYSDATE()
WHERE
inst_cd = :v_inst_cd;
"""
update_result = db.execute(
update_sql,
{'v_inst_cd': v_inst_cd, 'v_inst_cd_merg': v_inst_cd_merge}
)
logging_sql(logger, update_sql)
logger.info(f'Query OK, {update_result.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.info(f"emp_chg_inst_lau v_inst_cd could not set from {v_inst_cd_merge} to {v_inst_cd_merge}!")
raise BatchOperationException(e)
logger.info(f"Success! emp_chg_inst_lau v_inst_cd was set from {v_inst_cd} to {v_inst_cd_merge}!")
return
def update_dcf_inst_merge_from_emp_chg_inst_lau(db: Database, syor_date: str):
# dcf_inst_mergeから、emp_chg_inst_lauをUpdate
# Get count from DCF_INST_MERGE
logger.info("##########################")
try:
select_result = db.execute_select(
"""
SELECT
COUNT(dcf_inst_cd) AS row_count
FROM
src05.dcf_inst_merge
WHERE
muko_flg = '0'
AND dcf_inst_cd_new IS NOT NULL
AND enabled_flg = 'Y'
AND STR_TO_DATE(CONCAT(tekiyo_month, '01'), '%Y%m%d') <= :syor_date -- TODO: tekiyo_monthはいっぴにする
""",
{'syor_date': syor_date}
)
except Exception as e:
logger.info("Error! Getting Count of dcf_inst_merge was failed!")
raise BatchOperationException(e)
count = [row for row in select_result][0]['row_count']
if count == 0:
logger.info('dcf_inst_merge Table Data is not exists!')
return
logger.info('dcf_inst_merge Table Data is exists!')
# dcf_inst_mergeから、emp_chg_inst_lauをUpdate
logger.info("##########################")
logger.info("#### UPDATE DATA #########")
logger.info("##########################")
try:
elapsed_time = ElapsedTime()
update_sql = """
UPDATE
src05.emp_chg_inst_lau el,
(
SELECT
dcf_inst_cd,
dcf_inst_cd_new
FROM
src05.dcf_inst_merge
WHERE
muko_flg = '0'
AND dcf_inst_cd_new IS NOT NULL
AND enabled_flg = 'Y'
AND STR_TO_DATE(CONCAT(tekiyo_month, '01'), '%Y%m%d') <= :syor_date
) dm
SET
el.inst_cd = dm.dcf_inst_cd_new,
el.lua_ope_dt = SYSDATE()
WHERE
el.inst_cd = dm.dcf_inst_cd
"""
res = db.execute(
update_sql,
{'syor_date': syor_date}
)
logging_sql(logger, update_sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.info("emp_chg_inst_lau.v_inst_cd could not set!")
raise BatchOperationException(e)
logger.info("emp_chg_inst_lau.v_inst_cd was set!")
return

View File

@ -0,0 +1,11 @@
from src.batch.datachange import emp_chg_inst_lau
from src.logging.get_logger import get_logger
logger = get_logger('実績洗替')
def batch_process():
"""実績洗替処理"""
logger.info('Start Jisseki Araigae Batch PGM.')
# 施設担当者洗替
emp_chg_inst_lau.batch_process()

View File

@ -0,0 +1,150 @@
from sqlalchemy import (Connection, CursorResult, Engine, QueuePool,
create_engine, text)
from sqlalchemy.engine.create import create_engine
from sqlalchemy.engine.url import URL
from tenacity import retry, stop_after_attempt, wait_exponential
from src.error.exceptions import DBException
from src.logging.get_logger import get_logger
from src.system_var import environment
logger = get_logger(__name__)
class Database:
"""データベース操作クラス"""
__connection: Connection = None
__engine: Engine = None
__host: str = None
__port: str = None
__username: str = None
__password: str = None
__schema: str = None
__connection_string:str = None
def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None:
"""このクラスの新たなインスタンスを初期化します
Args:
username (str): DBユーザー名
password (str): DBパスワード
host (str): DBホスト名
port (int): DBポート
schema (str): DBスキーマ名
"""
self.__username = username
self.__password = password
self.__host = host
self.__port = int(port)
self.__schema = schema
self.__connection_string = URL.create(
drivername='mysql+pymysql',
username=self.__username,
password=self.__password,
host=self.__host,
port=self.__port,
database=self.__schema,
query={"charset": "utf8mb4"}
)
self.__engine = create_engine(
self.__connection_string,
pool_timeout=5,
poolclass=QueuePool,
isolation_level="AUTOCOMMIT"
)
@classmethod
def get_instance(cls):
"""インスタンスを取得します
Returns:
Database: DB操作クラスインスタンス
"""
return cls(
username=environment.DB_USERNAME,
password=environment.DB_PASSWORD,
host=environment.DB_HOST,
port=environment.DB_PORT,
schema=environment.DB_SCHEMA
)
@retry(
wait=wait_exponential(
multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT,
min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS,
max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS
),
stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT))
def connect(self):
"""
DBに接続します接続に失敗した場合リトライします
Raises:
DBException: 接続失敗
"""
self.__connection = self.__engine.connect()
def execute_select(self, select_query: str, parameters=None) -> list[dict]:
"""SELECTクエリを実行します。
Args:
select_query (str): SELECT文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
list[dict]: カラム名: 値の辞書リスト
"""
if self.__connection is None:
raise DBException('DBに接続していません')
try:
result = self.__connection.execute(text(select_query), parameters=parameters)
except Exception as e:
raise DBException(e)
result_rows = result.mappings().all()
return result_rows
def execute(self, query: str, parameters=None) -> CursorResult:
"""SQLクエリを実行します。
Args:
query (str): SQL文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
CursorResult: 取得結果
"""
if self.__connection is None:
raise DBException('DBに接続していません')
try:
result = self.__connection.execute(text(query), parameters=parameters)
except Exception as e:
raise DBException(e)
return result
def begin(self):
"""トランザクションを開始します。"""
if not self.__connection.in_transaction():
self.__connection.begin()
def commit(self):
"""トランザクションをコミットします"""
if self.__connection.in_transaction():
self.__connection.commit()
def rollback(self):
"""トランザクションをロールバックします"""
if self.__connection.in_transaction():
self.__connection.rollback()
def disconnect(self):
"""DB接続を切断します。"""
if self.__connection is not None:
self.__connection.close()
self.__connection = None

View File

@ -0,0 +1,14 @@
from tenacity import RetryError
class MeDaCaException(Exception):
pass
class DBException(MeDaCaException):
pass
class BatchOperationException(MeDaCaException):
pass
class MaxRetryExceededException(MeDaCaException, RetryError):
pass

View File

@ -0,0 +1,117 @@
from src.batch import jissekiaraigae
from src.batch.batch_functions import get_syor_date
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
logger = get_logger('日次処理コントロール') # ここを処理IDとかにするといいかもしれない
def batch_process():
try:
logger.info('日次ジョブ:開始')
# logger.info('S3マウント状況確認')
# logger.error('S3マウントエラー:DWH異常終了')
# logger.error('S3マウントエラー:BIO異常終了')
# logger.info('データベース接続') # 実際には、ここでつなげているわけではないので、いらないと思う
# logger.error('データベース接続エラー(異常終了)') # 検査例外を捕まえて、共通的に出せばいいと思う
try:
logger.info('処理日取得')
syor_date = get_syor_date()
except BatchOperationException as e:
logger.error(f'処理日取得エラー(異常終了){e}')
logger.info(f'処理日={syor_date}')
# 休日判定ファイルを読み込み
logger.info('休日判定処理')
if True: # 休日判定
logger.info('非営業日かつ月、火、水以外です。') # 分岐
try:
# 処理中フラグ判定。ここでdumpのフラグも見る
logger.info('処理フラグ更新中')
logger.info('処理フラグ更新終了')
except BatchOperationException as e:
logger.error(f'処理フラグ更新処理エラー(異常終了){e}')
logger.info('日次ジョブ:終了(正常終了)')
try:
logger.info('日次ジョブ処理中判定')
if True: # 処理中判定
logger.error('処理フラグ処理中(異常終了)')
logger.info('処理中フラグの更新:起動')
logger.info('処理中フラグの更新:終了')
except BatchOperationException as e:
logger.error(f'日次ジョブ処理中エラー(異常終了){e}')
# ↓ここから、やらない↓
# logger.info('処理前バックアップ実行')
# logger.info('処理前バックアップ:起動')
# logger.info('処理前バックアップ:終了')
# logger.error('処理前バックアップ処理エラー(異常終了)', $ex->getMessage())
# ↑ここまで↑
logger.info('卸在庫データ取込判定')
if True: # 卸在庫日判定
logger.info('卸在庫データ取込日です')
logger.debug('卸在庫データファイル名: {_PATH_OROSHI_ZAIKO}')
if True: # 卸在庫ファイル存在確認なければ異常終了
logger.error('卸在庫データ存在確認エラー(異常終了)')
logger.info('卸在庫データ存在確認:取込処理開始')
logger.debug('卸在庫データファイル名作成: {read_filename}')
logger.debug('ファイル移動OK{_MOVE_OROSHI_ZAIKO}') # S3からダウンロード
logger.debug('ファイル解凍OK{sprintf(_ZIP_OROSHI_ZAIKO, $read_filename)}') # gunzip -fなので、gzipを使う
logger.debug('ファイル名変更OK {sprintf(_RENAME_OROSHI_ZAIKO, $read_filename)}') #S3にアップロード
try:
logger.info('卸在庫データ取込:起動')
logger.info('卸在庫データ取込:終了')
except BatchOperationException as e:
logger.error(f'卸在庫データ取込処理エラー(異常終了){e}')
logger.info('日次処理(アルトマーク)')
if True: # アルトマークなければ
logger.info('日次処理(アルトマーク)実行対象日でない為未実行')
try:
logger.info('アルトマーク取込:起動')
logger.info('アルトマーク取込:終了')
except BatchOperationException as e:
logger.error(f'アルトマーク取込処理エラー(異常終了){e}')
if True: # 休日判定
try:
logger.info('メルク施設マスタ作成')
logger.info('メルク施設マスタ作成終了')
except BatchOperationException as e:
logger.error(f'メルク施設マスタ作成エラー(異常終了){e}')
try:
logger.info('DCF施設統合マスタ作成')
logger.info('DCF施設統合マスタ作成終了')
except BatchOperationException as e:
logger.error(f'DCF施設統合マスタ作成エラー異常終了{e}')
# if False: # ($holiday === FALSE) とにかく毎日動かす
logger.info('V実消化連携データ存在確認')
if True:
logger.error('V実消化連携データ存在確認異常終了')
logger.info('日次処理V実消化')
try:
logger.info('V実消化取込起動')
logger.info('V実消化取込終了')
except BatchOperationException as e:
logger.exception(f'V実消化取込処理エラー異常終了{e}')
logger.info('日次処理(実績更新)')
try:
logger.info('実績更新:起動')
jissekiaraigae.batch_process()
logger.info('実績更新:終了')
except BatchOperationException as e:
logger.exception(f'実績更新処理エラー(異常終了){e}')
# ↓以下、ファイルのバックアップ以外はやらない↓
# logger.info('処理後バックアップ実行')
# logger.info('処理後バックアップ:起動')
# logger.info('処理後バックアップ:終了')
# logger.error('処理後バックアップ処理エラー(異常終了)', $ex->getMessage())
# ↑ここまでやらない↑
logger.info('処理中フラグの更新:非処理中')
try:
logger.info('処理中フラグの更新:起動')
logger.info('処理中フラグの更新:終了')
except BatchOperationException as e:
logger.exception(f'処理中フラグ更新エラー(異常終了){e}')
logger.info('ワークディレクトリクリーニング')
logger.info('日次ジョブ:終了(正常終了)')
return 0
except Exception as e:
raise e

View File

@ -0,0 +1,29 @@
import logging
from src.system_var.environment import LOG_LEVEL
# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する
for name in ["boto3", "botocore", "s3transfer", "urllib3"]:
logging.getLogger(name).setLevel(logging.WARNING)
# 共通ロガー
def get_logger(log_name):
logger = logging.getLogger(log_name)
level = logging.getLevelName(LOG_LEVEL)
if not isinstance(level, int):
level = logging.INFO
logger.setLevel(level)
if not logger.hasHandlers():
handler = logging.StreamHandler()
logger.addHandler(handler)
formatter = logging.Formatter(
'%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s',
'%Y-%m-%d %H:%M:%S'
)
for handler in logger.handlers:
handler.setFormatter(formatter)
return logger

View File

@ -0,0 +1,13 @@
import os
DB_HOST = os.environ['DB_HOST']
DB_PORT = int(os.environ['DB_PORT'])
DB_USERNAME = os.environ['DB_USERNAME']
DB_PASSWORD = os.environ['DB_PASSWORD']
DB_SCHEMA = os.environ['DB_SCHEMA']
LOG_LEVEL = os.environ['LOG_LEVEL']
DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4))
DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))

View File

@ -0,0 +1,22 @@
import time
class ElapsedTime:
"""処理実行時間計測クラス"""
def __init__(self) -> None:
"""このクラスの新たなインスタンスを初期化します。"""
self.__start = time.perf_counter()
@property
def of(self):
"""インスタンス化してからの経過時間をhh:mm:ssの形式にフォーマットして返す
Returns:
str: 時分秒形式の経過時間
"""
elapsed_time = time.perf_counter() - self.__start
h, rem = divmod(elapsed_time, 3600)
m, s = divmod(rem, 60)
h_str = f'{h:02.0f} hour ' if h > 0.0 else ''
m_str = f'{m:02.0f} min ' if m > 0.0 else ''
s_str = f'{s:06.02f} sec' if s > 0.0 else ''
return f"{h_str}{m_str}{s_str}"