feat: ARIS-J連携がなくなったことに伴い、月次バッチのソースを削除

This commit is contained in:
shimoda.m@nds-tyo.co.jp 2023-11-09 17:00:45 +09:00
parent ab7c6d89b4
commit 27134a9310
28 changed files with 0 additions and 1317 deletions

View File

@ -1,12 +0,0 @@
tests/*
.coverage
.env
.env.example
.report/*
.vscode/*
.pytest_cache/*
*/__pychache__/*
Dockerfile
pytest.ini
README.md
*.sql

View File

@ -1,19 +0,0 @@
DB_HOST=************
DB_PORT=3306
DB_USERNAME=************
DB_PASSWORD=************
DB_SCHEMA=src05
ARISJ_DATA_BUCKET=mbj-newdwh2021-staging-jskult-arisj
JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult
JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config
LOG_LEVEL=INFO
ARISJ_DATA_FOLDER=DATA
ARISJ_BACKUP_FOLDER=arisj
JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar
JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME=jskult_arisj_output_day_list.txt
DB_CONNECTION_MAX_RETRY_ATTEMPT=************
DB_CONNECTION_RETRY_INTERVAL_INIT=************
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=************
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=************

View File

@ -1,10 +0,0 @@
.vscode/settings.json
.env
# python
__pycache__
# python test
.pytest_cache
.coverage
.report/

View File

@ -1,16 +0,0 @@
{
// IntelliSense 使
//
// : https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "(DEBUG)jskult batch monthly",
"type": "python",
"request": "launch",
"program": "entrypoint.py",
"console": "integratedTerminal",
"justMyCode": true
}
]
}

View File

@ -1,31 +0,0 @@
{
"[python]": {
"editor.defaultFormatter": null,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
//
"python.defaultInterpreterPath": "<pythonインタプリターのパス>",
"python.linting.lintOnSave": true,
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"python.linting.flake8Args": [
"--max-line-length=200",
"--ignore=F541"
],
"python.formatting.provider": "autopep8",
"python.formatting.autopep8Path": "autopep8",
"python.formatting.autopep8Args": [
"--max-line-length", "200",
"--ignore=F541"
],
"python.testing.pytestArgs": [
"tests/batch/ultmarc"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}

View File

@ -1,20 +0,0 @@
FROM python:3.9
ENV TZ="Asia/Tokyo"
WORKDIR /usr/src/app
COPY Pipfile Pipfile.lock ./
RUN \
apt update -y && \
# パッケージのセキュリティアップデートのみを適用するコマンド
apt install -y unattended-upgrades && \
unattended-upgrades && \
pip install --upgrade pip wheel setuptools && \
pip install pipenv --no-cache-dir && \
pipenv install --system --deploy && \
pip uninstall -y pipenv virtualenv-clone virtualenv
COPY src ./src
COPY entrypoint.py entrypoint.py
CMD ["python", "entrypoint.py"]

View File

@ -1,20 +0,0 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
boto3 = "*"
sqlalchemy = "*"
tenacity = "*"
pymysql = "*"
[dev-packages]
autopep8 = "*"
flake8 = "*"
[requires]
python_version = "3.9"
[pipenv]
allow_prereleases = true

View File

@ -1,262 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "a2be870e254760b62220c10400b05fa66d24b2cc1bcd6f21044735e320a62e53"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"boto3": {
"hashes": [
"sha256:908f9c277325d68963dfcfce963a05336f0eb19505fc239c0ab9d01f4cba0296",
"sha256:e1e535e9fb23977252f13652ed2fa9b4f2d59a53b04a5f2fad3ee415b6a3b2b0"
],
"index": "pypi",
"version": "==1.27.0"
},
"botocore": {
"hashes": [
"sha256:b9cb5b78a289f0615a48d85066f01869029aa41b95993f2c0c55003df037c23f",
"sha256:cac1333f41ec98e6f75bbba3f2c74b9e76aa3847469ecea6e7773a0af0049bee"
],
"markers": "python_version >= '3.7'",
"version": "==1.30.0"
},
"greenlet": {
"hashes": [
"sha256:0a9dfcadc1d79696e90ccb1275c30ad4ec5fd3d1ab3ae6671286fac78ef33435",
"sha256:0f313771cb8ee0a04dfdf586b7d4076180d80c94be09049daeea018089b5b957",
"sha256:17503397bf6cbb5e364217143b6150c540020c51a3f6b08f9a20cd67c25e2ca8",
"sha256:180ec55cb127bc745669eddc9793ffab6e0cf7311e67e1592f183d6ca00d88c1",
"sha256:1b3f3568478bc21b85968e8038c4f98f4bf0039a692791bc324b5e0d1522f4b1",
"sha256:1bd4ea36f0aeb14ca335e0c9594a5aaefa1ac4e2db7d86ba38f0be96166b3102",
"sha256:21ebcb570e0d8501457d6a2695a44c5af3b6c2143dc6644ec73574beba067c90",
"sha256:24071eee113d75fedebaeb86264d94f04b5a24e311c5ba3e8003c07d00112a7e",
"sha256:270432cfdd6a50016b8259b3bbf398a3f7c06a06f2c68c7b93e49f53bc193bcf",
"sha256:271ed380389d2f7e4c1545b6e0837986e62504ab561edbaff05da9c9f3f98f96",
"sha256:2840187a94e258445e62ff1545e34f0b1a14aef4d0078e5c88246688d2b6515e",
"sha256:2cda110faee67613fed221f90467003f477088ef1cc84c8fc88537785a5b4de9",
"sha256:2e160a65cc6023a237be870f2072513747d512a1d018efa083acce0b673cccc0",
"sha256:2fcf7af83516db35af3d0ed5d182dea8585eddd891977adff1b74212f4bfd2fd",
"sha256:36cebce1f30964d5672fd956860e7e7b69772da69658d5743cb676b442eeff36",
"sha256:42bfe67824a9b53e73f568f982f0d1d4c7ac0f587d2e702a23f8a7b505d7b7c2",
"sha256:450a7e52a515402fd110ba807f1a7d464424bfa703be4effbcb97e1dfbfcc621",
"sha256:463d63ca5d8c236788284a9a44b9715372a64d5318a6b5eee36815df1ea0ba3d",
"sha256:4d0c0ffd732466ff324ced144fad55ed5deca36f6036c1d8f04cec69b084c9d6",
"sha256:4ff2a765f4861fc018827eab4df1992f7508d06c62de5d2fe8a6ac2233d4f1d0",
"sha256:53abf19b7dc62795c67b8d0a3d8ef866db166b21017632fff2624cf8fbf3481c",
"sha256:5552d7be37d878e9b6359bbffa0512d857bb9703616a4c0656b49c10739d5971",
"sha256:585810056a8adacd3152945ebfcd25deb58335d41f16ae4e0f3d768918957f9a",
"sha256:5942b1d6ba447cff1ec23a21ec525dde2288f00464950bc647f4e0f03bd537d1",
"sha256:5c355c99be5bb23e85d899b059a4f22fdf8a0741c57e7029425ee63eb436f689",
"sha256:5f61df4fe07864561f49b45c8bd4d2c42e3f03d2872ed05c844902a58b875028",
"sha256:665942d3a954c3e4c976581715f57fb3b86f4cf6bae3ac30b133f8ff777ac6c7",
"sha256:68368e908f14887fb202a81960bfbe3a02d97e6d3fa62b821556463084ffb131",
"sha256:6aac94ff957b5dea0216af71ab59c602e1b947b394e4f5e878a5a65643090038",
"sha256:889934aa8d72b6bfc46babd1dc4b817a56c97ec0f4a10ae7551fb60ab1f96fae",
"sha256:a00550757fca1b9cbc479f8eb1cf3514dbc0103b3f76eae46341c26ddcca67a9",
"sha256:a4a2d6ed0515c05afd5cc435361ced0baabd9ba4536ddfe8ad9a95bcb702c8ce",
"sha256:a8dd92fd76a61af2abc8ccad0c6c6069b3c4ebd4727ecc9a7c33aae37651c8c7",
"sha256:ab81f9ff3e3c2ca65e824454214c10985a846cd9bee5f4d04e15cd875d9fe13b",
"sha256:ac10196b8cde7a082e4e371ff171407270d3337c8d57ed43030094eb01d9c95c",
"sha256:b767930af686551dc96a5eb70af3736709d547ffa275c11a5e820bfb3ae61d8d",
"sha256:b9a1f4d256b81f59ba87bb7a29b9b38b1c018e052dba60a543cb0ddb5062d159",
"sha256:ba94c08321b5d345100fc64eb1ab235f42faf9aabba805cface55ebe677f1c2c",
"sha256:bab71f73001cd15723c4e2ca398f2f48e0a3f584c619eefddb1525e8986e06eb",
"sha256:bce5cf2b0f0b29680396c5c98ab39a011bd70f2dfa8b8a6811a69ee6d920cf9f",
"sha256:c02e514c72e745e49a3ae7e672a1018ba9b68460c21e0361054e956e5d595bc6",
"sha256:c3fb459ced6c5e3b2a895f23f1400f93e9b24d85c30fbe2d637d4f7706a1116b",
"sha256:cd31ab223e43ac64fd23f8f5dad249addadac2a459f040546200acbf7e84e353",
"sha256:ce70aa089ec589b5d5fab388af9f8c9f9dfe8fe4ad844820a92eb240d8628ddf",
"sha256:d47b2e1ad1429da9aa459ef189fbcd8a74ec28a16bc4c3f5f3cf3f88e36535eb",
"sha256:d61bad421c1f496f9fb6114dbd7c30a1dac0e9ff90e9be06f4472cbd8f7a1704",
"sha256:d7ba2e5cb119eddbc10874b41047ad99525e39e397f7aef500e6da0d6f46ab91",
"sha256:dde0ab052c7a1deee8d13d72c37f2afecee30ebdf6eb139790157eaddf04dd61",
"sha256:df34b52aa50a38d7a79f3abc9fda7e400791447aa0400ed895f275f6d8b0bb1f",
"sha256:e0fc20e6e6b298861035a5fc5dcf9fbaa0546318e8bda81112591861a7dcc28f",
"sha256:e20d5e8dc76b73db9280464d6e81bea05e51a99f4d4dd29c5f78dc79f294a5d3",
"sha256:e31d1a33dc9006b278f72cb0aacfe397606c2693aa2fdc0c2f2dcddbad9e0b53",
"sha256:e3a99f890f2cc5535e1b3a90049c6ca9ff9da9ec251cc130c8d269997f9d32ee",
"sha256:e7b192c3df761d0fdd17c2d42d41c28460f124f5922e8bd524018f1d35610682",
"sha256:ed0f4fad4c3656e34d20323a789b6a2d210a6bb82647d9c86dded372f55c58a1",
"sha256:f34ec09702be907727fd479046193725441aaaf7ed4636ca042734f469bb7451",
"sha256:f3530c0ec1fc98c43d5b7061781a8c55bd0db44f789f8152e19d9526cbed6021",
"sha256:f5672082576d0e9f52fa0fa732ff57254d65faeb4a471bc339fe54b58b3e79d2",
"sha256:ffb9f8969789771e95d3c982a36be81f0adfaa7302a1d56e29f168ca15e284b8"
],
"markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))",
"version": "==3.0.0a1"
},
"jmespath": {
"hashes": [
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
],
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
"pymysql": {
"hashes": [
"sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96",
"sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7"
],
"index": "pypi",
"version": "==1.1.0"
},
"python-dateutil": {
"hashes": [
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.2"
},
"s3transfer": {
"hashes": [
"sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346",
"sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"
],
"markers": "python_version >= '3.7'",
"version": "==0.6.1"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sqlalchemy": {
"hashes": [
"sha256:04383f1e3452f6739084184e427e9d5cb4e68ddc765d52157bf5ef30d5eca14f",
"sha256:125f9f7e62ddf8b590c069729080ffe18b68a20d9882eb0947f72e06274601d7",
"sha256:1822620c89779b85f7c23d535c8e04b79c517739ae07aaed48c81e591ed5498e",
"sha256:21583808d37f126a647652c90332ac1d3a102edf3c94bcc3319edcc0ea2300cc",
"sha256:218fb20c01e95004f50a3062bf4c447dcb360cab8274232f31947e254f118298",
"sha256:2269b1f9b8be47e52b70936069a25a3771eff53367aa5cc59bb94f28a6412e13",
"sha256:234678ed6576531b8e4be255b980f20368bf07241a2e67b84e6b0fe679edb9c4",
"sha256:28da17059ecde53e2d10ba813d38db942b9f6344360b2958b25872d5cb729d35",
"sha256:2c6ff5767d954f6091113fedcaaf49cdec2197ae4c5301fe83d5ae4393c82f33",
"sha256:36a87e26fe8fa8c466fae461a8fcb780d0a1cbf8206900759fc6fe874475a3ce",
"sha256:394ac3adf3676fad76d4b8fcecddf747627f17f0738dc94bac15f303d05b03d4",
"sha256:40a3dc52b2b16f08b5c16b9ee7646329e4b3411e9280e5e8d57b19eaa51cbef4",
"sha256:48111d56afea5699bab72c38ec95561796b81befff9e13d1dd5ce251ab25f51d",
"sha256:48b40dc2895841ea89d89df9eb3ac69e2950a659db20a369acf4259f68e6dc1f",
"sha256:513411d73503a6fc5804f01fae3b3d44f267c1b3a06cfeac02e9286a7330e857",
"sha256:51736cfb607cf4e8fafb693906f9bc4e5ee55be0b096d44bd7f20cd8489b8571",
"sha256:5f40e3a7d0a464f1c8593f2991e5520b2f5b26da24e88000bbd4423f86103d4f",
"sha256:6150560fcffc6aee5ec9a97419ac768c7a9f56baf7a7eb59cb4b1b6a4d463ad9",
"sha256:724355973297bbe547f3eb98b46ade65a67a3d5a6303f17ab59a2dc6fb938943",
"sha256:74ddcafb6488f382854a7da851c404c394be3729bb3d91b02ad86c5458140eff",
"sha256:7830e01b02d440c27f2a5be68296e74ccb55e6a5b5962ffafd360b98930b2e5e",
"sha256:7f31d4e7ca1dd8ca5a27fd5eaa0f9e2732fe769ff7dd35bf7bba179597e4df07",
"sha256:8741d3d401383e54b2aada37cbd10f55c5d444b360eae3a82f74a2be568a7710",
"sha256:910d45bf3673f0e4ef13858674bd23cfdafdc8368b45b948bf511797dbbb401d",
"sha256:aa995b21f853864996e4056d9fde479bcecf8b7bff4beb3555eebbbba815f35d",
"sha256:af7e2ba75bf84b64adb331918188dda634689a2abb151bc1a583e488363fd2f8",
"sha256:b0eaf82cc844f6b46defe15ad243ea00d1e39ed3859df61130c263dc7204da6e",
"sha256:b114a16bc03dfe20b625062e456affd7b9938286e05a3f904a025b9aacc29dd4",
"sha256:b47be4c6281a86670ea5cfbbbe6c3a65366a8742f5bc8b986f790533c60b5ddb",
"sha256:ba03518e64d86f000dc24ab3d3a1aa876bcbaa8aa15662ac2df5e81537fa3394",
"sha256:cc9c2630c423ac4973492821b2969f5fe99d9736f3025da670095668fbfcd4d5",
"sha256:cf07ff9920cb3ca9d73525dfd4f36ddf9e1a83734ea8b4f724edfd9a2c6e82d9",
"sha256:cf175d26f6787cce30fe6c04303ca0aeeb0ad40eeb22e3391f24b32ec432a1e1",
"sha256:d0aeb3afaa19f187a70fa592fbe3c20a056b57662691fd3abf60f016aa5c1848",
"sha256:e186e9e95fb5d993b075c33fe4f38a22105f7ce11cecb5c17b5618181e356702",
"sha256:e2d5c3596254cf1a96474b98e7ce20041c74c008b0f101c1cb4f8261cb77c6d3",
"sha256:e3189432db2f5753b4fde1aa90a61c69976f4e7e31d1cf4611bfe3514ed07478",
"sha256:e3a6b2788f193756076061626679c5c5a6d600ddf8324f986bc72004c3e9d92e",
"sha256:ead58cae2a089eee1b0569060999cb5f2b2462109498a0937cc230a7556945a1",
"sha256:f2f389f77c68dc22cb51f026619291c4a38aeb4b7ecb5f998fd145b2d81ca513",
"sha256:f593170fc09c5abb1205a738290b39532f7380094dc151805009a07ae0e85330"
],
"index": "pypi",
"version": "==2.0.17"
},
"tenacity": {
"hashes": [
"sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0",
"sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"
],
"index": "pypi",
"version": "==8.2.2"
},
"typing-extensions": {
"hashes": [
"sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36",
"sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"
],
"markers": "python_version >= '3.7'",
"version": "==4.7.1"
},
"urllib3": {
"hashes": [
"sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f",
"sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.26.16"
}
},
"develop": {
"autopep8": {
"hashes": [
"sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1",
"sha256:f9849cdd62108cb739dbcdbfb7fdcc9a30d1b63c4cc3e1c1f893b5360941b61c"
],
"index": "pypi",
"version": "==2.0.2"
},
"flake8": {
"hashes": [
"sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7",
"sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"
],
"index": "pypi",
"version": "==6.0.0"
},
"mccabe": {
"hashes": [
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
],
"markers": "python_version >= '3.6'",
"version": "==0.7.0"
},
"pycodestyle": {
"hashes": [
"sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053",
"sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"
],
"markers": "python_version >= '3.6'",
"version": "==2.10.0"
},
"pyflakes": {
"hashes": [
"sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf",
"sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"
],
"markers": "python_version >= '3.6'",
"version": "==3.0.1"
},
"tomli": {
"hashes": [
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
],
"markers": "python_version < '3.11'",
"version": "==2.0.1"
}
}
}

View File

@ -1,48 +0,0 @@
# 実消化&アルトマーク 月次バッチ
## 概要
実消化&アルトマークの月次バッチ処理。
## 環境情報
- Python 3.9
- MySQL 8.23
- VSCode
## 環境構築
- Python の構築
- Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照
- 「Pipenv の導入」までを行っておくこと
- 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する
- `pipenv install --dev --python <pyenvでインストールしたpythonバージョン>`
- この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく
- MySQL の環境構築
- Windows の場合、以下のリンクからダウンロードする
- <https://dev.mysql.com/downloads/installer/>
- Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利
- 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると
- `docker-compose up -d`
- Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと
- データを投入する
- 立ち上げたデータベースに「src05」スキーマを作成する
- [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する
- `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql`
- 環境変数の設定
- 「.env.example」ファイルをコピーし、「.env」ファイルを作成する
- 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください
- VSCode の設定
- 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する
- 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する
## 実行
- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。
- 「entrypoint.py」が、バッチ処理のエントリーポイント。
- 実際の処理は、「src/jobctrl_monthly.py」で行っている。
## フォルダ構成(工事中)

View File

@ -1,10 +0,0 @@
"""実消化&アルトマーク 月次バッチのエントリーポイント"""
from src import jobctrl_monthly
if __name__ == '__main__':
try:
exit(jobctrl_monthly.exec())
except Exception:
# エラーが起きても、正常系のコードで返す。
# エラーが起きた事実はbatch_process内でログを出す。
exit(0)

View File

@ -1,95 +0,0 @@
import os.path as path
import tempfile
import boto3
from src.system_var import environment
class S3Client:
__s3_client = boto3.client('s3')
_bucket_name: str
def list_objects(self, bucket_name: str, folder_name: str):
response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name)
if response['KeyCount'] == 0:
return []
contents = response['Contents']
# 末尾がスラッシュで終わるものはフォルダとみなしてスキップする
objects = [{'filename': content['Key'], 'size': content['Size']} for content in contents if not content['Key'].endswith('/')]
return objects
def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None:
copy_source = {'Bucket': src_bucket, 'Key': src_key}
self.__s3_client.copy(copy_source, dest_bucket, dest_key)
return
def download_file(self, bucket_name: str, file_key: str, file):
self.__s3_client.download_fileobj(
Bucket=bucket_name,
Key=file_key,
Fileobj=file
)
return
def upload_file(self, local_file_path: str, bucket_name: str, file_key: str):
self.__s3_client.upload_file(
local_file_path,
Bucket=bucket_name,
Key=file_key
)
def delete_file(self, bucket_name: str, file_key: str):
self.__s3_client.delete_object(
Bucket=bucket_name,
Key=file_key
)
class S3Bucket():
_s3_client = S3Client()
_bucket_name: str = None
class ConfigBucket(S3Bucket):
_bucket_name = environment.JSKULT_CONFIG_BUCKET
def download_arisj_output_day_list(self):
# 一時ファイルとして保存する
temporary_dir = tempfile.mkdtemp()
temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME)
arisj_output_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME}'
with open(temporary_file_path, mode='wb') as f:
self._s3_client.download_file(self._bucket_name, arisj_output_day_list_key, f)
f.seek(0)
return temporary_file_path
class ArisjBucket(S3Bucket):
_bucket_name = environment.ARISJ_DATA_BUCKET
_folder = environment.ARISJ_BACKUP_FOLDER
def upload_arisj_csv_file(self, arisj_create_csv: str, csv_file_path: str):
# s3にCSVファイルをUPする
Bucket = environment.ARISJ_DATA_BUCKET
folder = environment.ARISJ_DATA_FOLDER
csv_file_name = f'{folder}/{arisj_create_csv}'
s3_client = S3Client()
s3_client.upload_file(csv_file_path, Bucket, csv_file_name)
return
def backup_arisj_csv_file(self, dat_file_key: str, datetime_key: str):
# バックアップバケットにコピー
arisj_backup_bucket = ArisjBackupBucket()
folder = environment.ARISJ_DATA_FOLDER
dat_file_key = f'{folder}/{dat_file_key}'
backup_key = f'{arisj_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}'
self._s3_client.copy(self._bucket_name, dat_file_key, arisj_backup_bucket._bucket_name, backup_key)
class JskUltBackupBucket(S3Bucket):
_bucket_name = environment.JSKULT_BACKUP_BUCKET
class ArisjBackupBucket(JskUltBackupBucket):
_folder = environment.ARISJ_BACKUP_FOLDER

View File

@ -1,51 +0,0 @@
"""バッチ処理の共通関数"""
import logging
import textwrap
from datetime import datetime
from src.db.database import Database
from src.error.exceptions import BatchOperationException, DBException
def get_batch_statuses() -> tuple[str, str]:
"""日付テーブルから、以下を取得して返す。
- バッチ処理中フラグ
- 処理日YYYY/MM/DD
Raises:
BatchOperationException: 日付テーブルが取得できないとき何らかのエラーが発生したとき
Returns:
tuple[str, str]: [0]バッチ処理中フラグ[1]処理日
"""
db = Database.get_instance()
sql = 'SELECT bch_actf, src05.get_syor_date() AS syor_date FROM src05.hdke_tbl'
try:
db.connect()
hdke_tbl_result = db.execute_select(sql)
except DBException as e:
raise BatchOperationException(e)
finally:
db.disconnect()
if len(hdke_tbl_result) == 0:
raise BatchOperationException('日付テーブルが取得できませんでした')
# 必ず1件取れる
hdke_tbl_record = hdke_tbl_result[0]
batch_processing_flag = hdke_tbl_record['bch_actf']
syor_date = hdke_tbl_record['syor_date']
# 処理日を文字列に変換する
syor_date_str = datetime.strftime(syor_date, '%Y/%m/%d')
return batch_processing_flag, syor_date_str
def logging_sql(logger: logging.Logger, sql: str) -> None:
"""SQL文をデバッグログで出力する
Args:
logger (logging.Logger): ロガー
sql (str): SQL文
"""
logger.debug(f'\n{"-" * 15}\n{textwrap.dedent(sql)[1:-1]}\n{"-" * 15}')

View File

@ -1,29 +0,0 @@
class BatchContext:
__instance = None
__syor_date: str # 処理日(yyyy/mm/dd形式)
__is_arisj_output_day: bool # 月次バッチ起動日フラグ
def __init__(self) -> None:
self.__is_arisj_output_day = False
@classmethod
def get_instance(cls):
if cls.__instance is None:
cls.__instance = cls()
return cls.__instance
@property
def syor_date(self):
return self.__syor_date
@syor_date.setter
def syor_date(self, syor_date_str: str):
self.__syor_date = syor_date_str
@property
def is_arisj_output_day(self):
return self.__is_arisj_output_day
@is_arisj_output_day.setter
def is_arisj_output_day(self, flag: bool):
self.__is_arisj_output_day = flag

View File

@ -1,32 +0,0 @@
from src.system_var import constants
class CalendarFile:
"""カレンダーファイル"""
__calendar_file_lines: list[str]
def __init__(self, calendar_file_path):
with open(calendar_file_path) as f:
self.__calendar_file_lines: list[str] = f.readlines()
def compare_date(self, date_str: str) -> bool:
"""与えられた日付がカレンダーファイル内に含まれているかどうか
カレンダーファイル内の日付はyyyy/mm/ddで書かれている前提
コメント#)が含まれている行は無視される
Args:
date_str (str): yyyy/mm/dd文字列
Returns:
bool: 含まれていればTrue
"""
for calendar_date in self.__calendar_file_lines:
# コメント行が含まれている場合はスキップ
if constants.CALENDAR_COMMENT_SYMBOL in calendar_date:
continue
if date_str in calendar_date:
return True
return False

View File

@ -1,307 +0,0 @@
from datetime import datetime
from src.db.database import Database
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.aws.s3 import ArisjBucket
from src.batch.common.batch_context import BatchContext
import tempfile
import os.path as path
import csv
logger = get_logger('ARIS-J連携データ出力')
sql_err_msg = "SQL実行エラーです。"
def exec():
""" 実消化&アルトマーク月次バッチ """
create_date = datetime.now().strftime('%Y%m%d%H%M%S')
arisj_csv_file_name = f'D0004_ARIS_M_DCF_{create_date}.csv'
try:
logger.info('バッチ処理を開始しました。')
try:
db = Database.get_instance()
# DB接続
db.connect()
except Exception as e:
logger.info('DB接続エラーです')
raise e
# タイムゾーンをJSTに変更
db.to_jst()
# トランザクションの開始
db.begin()
# 正常系データの反映
# 前回保管した施設IFワークを削除する
delete_previous_wk_inst_aris_if_record(db)
# 正常系データを取得しWKテーブルに保存する。
insert_normal_record_into_wk_inst_aris_if(db)
# 正常系データの件数を取得
suc_count = count_wk_inst_aris_if_record(db)
# 警告系データの反映
# 前回保管した施設IF警告ワークを削除する
delete_previous_wk_inst_aris_if_wrn_record(db)
# 異常系データを取得しWKテーブルに保存する。
insert_abnormal_record_into_wk_inst_aris_if_wrn(db)
# 異常系データの件数を取得
wrn_count = count_wk_inst_aris_if_wrn_record(db)
# CSVファイルの作成用のSQL実行
record_csv = csv_data_select(db)
# CSVファイル作成
csv_file_path = make_csv_data(record_csv, arisj_csv_file_name)
# トランザクションの終了
db.commit()
# ログに処理件数を出力
sum_count = suc_count + wrn_count
logger.info(f'(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})')
arisj_bucket = ArisjBucket()
# CSVファイル移動処理
try:
arisj_bucket.upload_arisj_csv_file(arisj_csv_file_name, csv_file_path)
except Exception as e:
logger.info('S3バケットArisjへのCSVデータ、移動できませんでした。')
raise e
# 処理後ファイルをバックアップ
try:
batch_context = BatchContext.get_instance()
arisj_bucket.backup_arisj_csv_file(arisj_csv_file_name, batch_context.syor_date)
except Exception as e:
logger.info('S3バケットArisjバックアップへCSVデータ、コピーできませんでした。')
raise e
logger.info('バッチ処理を終了しました。')
except Exception as e:
raise BatchOperationException(e)
finally:
# 終了時に必ずコミットする
db.commit()
db.disconnect()
def delete_previous_wk_inst_aris_if_record(db):
# 前回保管した施設IFワークを削除する
try:
# WKテーブルの過去分削除SQL
sql = """\
DELETE FROM src05.wk_inst_aris_if
"""
db.execute(sql)
return
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def insert_normal_record_into_wk_inst_aris_if(db):
# 正常系データを取得しWKテーブルに保存する。
try:
# 正常系データを取得しWKテーブルに保存SQL
sql = """\
INSERT src05.wk_inst_aris_if
SELECT
TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd
,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form
,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name
,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form
,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd
,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name
,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd
,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name
,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address
,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name))
,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no
,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd))
,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd))
,DATE_FORMAT(ci.sys_update_date,'%Y%m%d') AS update_date
,DATE_FORMAT(ci.abolish_ymd,'%Y%m%d') AS delete_date
,sysdate()
FROM src05.com_inst ci
LEFT JOIN src05.mst_prefc cp
ON ci.prefc_cd = cp.prefc_cd
LEFT JOIN src05.mst_city cc
ON ci.prefc_cd = cc.prefc_cd
AND ci.city_cd = cc.city_cd
LEFT OUTER JOIN src05.com_inst_div cd
ON ci.inst_div_cd = cd.inst_div_cd
WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%'
AND ci.dcf_dsf_inst_cd IS NOT NULL
AND ci.form_inst_name_kanji IS NOT NULL
AND ci.prefc_cd IS NOT NULL
AND cp.prefc_name IS NOT NULL
AND cc.city_name IS NOT NULL
AND ci.inst_addr IS NOT NULL
ORDER BY ci.dcf_dsf_inst_cd
"""
db.execute(sql)
return
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def count_wk_inst_aris_if_record(db):
# 正常系データの件数を取得
try:
# 正常系データの件数を取得SQL
sql = """\
SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if
"""
record_count = db.execute_select(sql)
return record_count[0]['countNum']
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def delete_previous_wk_inst_aris_if_wrn_record(db):
# 前回保管した施設IF警告ワークを削除する
try:
# 異常系WKテーブルの過去分削除SQL
sql = """\
DELETE FROM src05.wk_inst_aris_if_wrn
"""
db.execute(sql)
return
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def insert_abnormal_record_into_wk_inst_aris_if_wrn(db):
# 異常系データを取得しWKテーブルに保存する。
try:
# 異常系データを取得しWKテーブルに保存SQL
sql = """\
INSERT src05.wk_inst_aris_if_wrn
SELECT
TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd
,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form
,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name
,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form
,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd
,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name
,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd
,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name
,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address
,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name))
,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no
,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd))
,TRIM(' ' FROM TRIM(' ' from ci.manage_cd))
,DATE_FORMAT(ci.sys_update_date,'%Y%m%d') AS update_date
,DATE_FORMAT(ci.abolish_ymd,'%Y%m%d') AS delete_date
,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd
,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form
,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd
,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name
,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name
,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address
,sysdate()
FROM src05.com_inst ci
LEFT JOIN src05.mst_prefc cp
ON ci.prefc_cd = cp.prefc_cd
LEFT JOIN src05.mst_city cc
ON ci.prefc_cd = cc.prefc_cd
AND ci.city_cd = cc.city_cd
LEFT OUTER JOIN src05.com_inst_div cd
ON ci.inst_div_cd = cd.inst_div_cd
WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%'
AND( ci.dcf_dsf_inst_cd IS NULL
OR ci.form_inst_name_kanji IS NULL
OR ci.prefc_cd IS NULL
OR cp.prefc_name IS NULL
OR cc.city_name IS NULL
OR ci.inst_addr IS NULL)
ORDER BY ci.dcf_dsf_inst_cd
"""
db.execute(sql)
return
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def count_wk_inst_aris_if_wrn_record(db):
# 異常系データの件数を取得
try:
# 異常系データの件数を取得SQL
sql = """\
SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn
"""
record_count = db.execute_select(sql)
return record_count[0]['countNum']
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def csv_data_select(db):
# CSVファイルの作成用のSQL実行
try:
# CSVファイルの作成用のSQL
sql = """\
SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name,
postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd,
'', inst_delete_date
FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd
"""
return db.execute_select(sql)
except Exception as e:
logger.debug(f'{sql_err_msg}')
raise e
def make_csv_data(record_csv: list, arisj_csv_file_name: str):
# 一時ファイルとして保存する(CSVファイル)
try:
temporary_dir = tempfile.mkdtemp()
csv_file_path = path.join(temporary_dir, arisj_csv_file_name)
head_str = ['TC_HOSPITAL', 'TJ_HOSPITAL', 'TJ_HOSPITALSHORT', 'TK_HOSPITAL',
'TC_PREFECTURE', 'TJ_PREFECTURE', 'TJ_ZIPCODE', 'TJ_CITY', 'TJ_ADDRESS', 'TJ_DEPARTMENT',
'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', 'TD_UPDATE']
with open(csv_file_path, mode='w', encoding='cp932') as csv_file:
# ヘッダ行書き込みくくり文字をつけない為にwriterowではなく、writeを使用しています
csv_file.write(f"{','.join(head_str)}\n")
# Shift-JIS、CRLF、価囲いありで書き込む
writer = csv.writer(csv_file, delimiter=',', lineterminator='\n',
quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL,
strict=True
)
# データ部分書き込み
for record_data in record_csv:
record_value = list(record_data.values())
csv_data = ['' if n is None else n for n in record_value]
writer.writerow(csv_data)
except Exception as e:
logger.info('ワークデータの作成に失敗しました。')
logger.info('バッチ処理を異常終了しました。')
raise e
return csv_file_path

View File

@ -1,182 +0,0 @@
from sqlalchemy import (Connection, CursorResult, Engine, QueuePool,
create_engine, text)
from sqlalchemy.engine.url import URL
from src.error.exceptions import DBException
from src.logging.get_logger import get_logger
from src.system_var import environment
from tenacity import retry, stop_after_attempt, wait_exponential
logger = get_logger(__name__)
class Database:
"""データベース操作クラス"""
__connection: Connection = None
__engine: Engine = None
__host: str = None
__port: str = None
__username: str = None
__password: str = None
__schema: str = None
__connection_string: str = None
def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None:
"""このクラスの新たなインスタンスを初期化します
Args:
username (str): DBユーザー名
password (str): DBパスワード
host (str): DBホスト名
port (int): DBポート
schema (str): DBスキーマ名
"""
self.__username = username
self.__password = password
self.__host = host
self.__port = int(port)
self.__schema = schema
self.__connection_string = URL.create(
drivername='mysql+pymysql',
username=self.__username,
password=self.__password,
host=self.__host,
port=self.__port,
database=self.__schema,
query={"charset": "utf8mb4"}
)
self.__engine = create_engine(
self.__connection_string,
pool_timeout=5,
poolclass=QueuePool
)
@classmethod
def get_instance(cls):
"""インスタンスを取得します
Returns:
Database: DB操作クラスインスタンス
"""
return cls(
username=environment.DB_USERNAME,
password=environment.DB_PASSWORD,
host=environment.DB_HOST,
port=environment.DB_PORT,
schema=environment.DB_SCHEMA
)
@retry(
wait=wait_exponential(
multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT,
min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS,
max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS
),
stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT),
retry_error_cls=DBException
)
def connect(self):
"""
DBに接続します接続に失敗した場合リトライします
Raises:
DBException: 接続失敗
"""
try:
self.__connection = self.__engine.connect()
except Exception as e:
raise DBException(e)
def execute_select(self, select_query: str, parameters=None) -> list[dict]:
"""SELECTクエリを実行します。
Args:
select_query (str): SELECT文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
list[dict]: カラム名: 値の辞書リスト
"""
if self.__connection is None:
raise DBException('DBに接続していません')
result = None
try:
# トランザクションが開始している場合は、トランザクションを引き継ぐ
if self.__connection.in_transaction():
result = self.__connection.execute(text(select_query), parameters)
else:
# トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。
result = self.__execute_with_transaction(select_query, parameters)
except Exception as e:
raise DBException(f'SQL Error: {e}')
result_rows = result.mappings().all()
return result_rows
def execute(self, query: str, parameters=None) -> CursorResult:
"""SQLクエリを実行します。
Args:
query (str): SQL文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
CursorResult: 取得結果
"""
if self.__connection is None:
raise DBException('DBに接続していません')
result = None
try:
# トランザクションが開始している場合は、トランザクションを引き継ぐ
if self.__connection.in_transaction():
result = self.__connection.execute(text(query), parameters)
else:
# トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。
result = self.__execute_with_transaction(query, parameters)
except Exception as e:
raise DBException(f'SQL Error: {e}')
return result
def begin(self):
"""トランザクションを開始します。"""
if not self.__connection.in_transaction():
self.__connection.begin()
def commit(self):
"""トランザクションをコミットします"""
if self.__connection.in_transaction():
self.__connection.commit()
def rollback(self):
"""トランザクションをロールバックします"""
if self.__connection.in_transaction():
self.__connection.rollback()
def disconnect(self):
"""DB接続を切断します。"""
if self.__connection is not None:
self.__connection.close()
self.__connection = None
def to_jst(self):
self.execute('SET time_zone = "+9:00"')
def __execute_with_transaction(self, query: str, parameters: dict):
# トランザクションを開始してクエリを実行する
with self.__connection.begin():
try:
result = self.__connection.execute(text(query), parameters=parameters)
except Exception as e:
self.__connection.rollback()
raise e
# ここでコミットされる
return result

View File

@ -1,10 +0,0 @@
class MeDaCaException(Exception):
pass
class DBException(MeDaCaException):
pass
class BatchOperationException(MeDaCaException):
pass

View File

@ -1,72 +0,0 @@
"""実消化&アルトマーク 月次バッチ処理"""
from src.aws.s3 import ConfigBucket
from src.aws.s3 import ArisjBackupBucket
from src.batch.batch_functions import get_batch_statuses
from src.batch.common.batch_context import BatchContext
from src.batch.common.calendar_file import CalendarFile
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.system_var import constants
from src.batch import output_arisj_file_process
logger = get_logger('月次処理コントロールARIS-J')
# バッチ共通設定を取得
batch_context = BatchContext.get_instance()
arisj_bucket = ArisjBackupBucket()
def exec():
try:
logger.info('月次バッチ:開始')
try:
logger.info('処理日取得')
# 月次バッチ処置中フラグ、処理日を取得
batch_processing_flag, syor_date = get_batch_statuses()
except BatchOperationException as e:
logger.exception(f'日次ジョブ取得エラー(異常終了){e}')
return constants.BATCH_EXIT_CODE_SUCCESS
# 日次バッチ処理中の場合、後続の処理は行わない
logger.info('日次ジョブ処理中判定')
if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING:
logger.error('日次ジョブ処理中エラー(異常終了)')
return constants.BATCH_EXIT_CODE_SUCCESS
# バッチ共通設定に処理日を追加
batch_context.syor_date = syor_date
logger.info(f'処理日取得={syor_date}')
# 稼働日かかどうかを、実消化&アルトマーク月次バッチ稼働日ファイルをダウンロードして判定
try:
arisj_output_day_list_file_path = ConfigBucket().download_arisj_output_day_list()
arisj_output_day_calendar = CalendarFile(arisj_output_day_list_file_path)
batch_context.is_arisj_output_day = arisj_output_day_calendar.compare_date(syor_date)
except Exception as e:
logger.exception(f'処理日取得エラー(異常終了){e}')
return constants.BATCH_EXIT_CODE_SUCCESS
# 調査目的で実消化&アルトマーク月次バッチ稼働日かどうかをログ出力
if not batch_context.is_arisj_output_day:
logger.info('ARIS-J連携データ出力日でない為、処理終了')
return constants.BATCH_EXIT_CODE_SUCCESS
logger.info('ARIS-J連携データ出力日です')
try:
logger.info('ARIS-J連携データ出力起動')
output_arisj_file_process.exec()
logger.info('ARIS-J連携データ出力終了')
except BatchOperationException as e:
logger.exception(f'ARIS-J連携データ出力異常終了{e}')
return constants.BATCH_EXIT_CODE_SUCCESS
# 正常終了を保守ユーザーに通知
logger.info('[NOTICE]月次バッチ:終了(正常終了)')
return constants.BATCH_EXIT_CODE_SUCCESS
except Exception as e:
logger.exception(f'月次バッチ処理中に想定外のエラーが発生しました {e}')
raise e

View File

@ -1,37 +0,0 @@
import logging
from src.system_var.environment import LOG_LEVEL
# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する
for name in ["boto3", "botocore", "s3transfer", "urllib3"]:
logging.getLogger(name).setLevel(logging.WARNING)
def get_logger(log_name: str) -> logging.Logger:
"""一意のログ出力モジュールを取得します。
Args:
log_name (str): ロガー名
Returns:
_type_: _description_
"""
logger = logging.getLogger(log_name)
level = logging.getLevelName(LOG_LEVEL)
if not isinstance(level, int):
level = logging.INFO
logger.setLevel(level)
if not logger.hasHandlers():
handler = logging.StreamHandler()
logger.addHandler(handler)
formatter = logging.Formatter(
'%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s',
'%Y-%m-%d %H:%M:%S'
)
for handler in logger.handlers:
handler.setFormatter(formatter)
return logger

View File

@ -1,8 +0,0 @@
# バッチ正常終了コード
BATCH_EXIT_CODE_SUCCESS = 0
# バッチ処理中フラグ:処理中
BATCH_ACTF_BATCH_IN_PROCESSING = '1'
# カレンダーファイルのコメントシンボル
CALENDAR_COMMENT_SYMBOL = '#'

View File

@ -1,24 +0,0 @@
import os
# Database
DB_HOST = os.environ['DB_HOST']
DB_PORT = int(os.environ['DB_PORT'])
DB_USERNAME = os.environ['DB_USERNAME']
DB_PASSWORD = os.environ['DB_PASSWORD']
DB_SCHEMA = os.environ['DB_SCHEMA']
# AWS
ARISJ_DATA_BUCKET = os.environ['ARISJ_DATA_BUCKET']
JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET']
JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET']
ARISJ_DATA_FOLDER = os.environ['ARISJ_DATA_FOLDER']
ARISJ_BACKUP_FOLDER = os.environ['ARISJ_BACKUP_FOLDER']
JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER']
JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME']
# 初期値がある環境変数
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')
DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4))
DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))

View File

@ -1,22 +0,0 @@
import time
class ElapsedTime:
"""処理実行時間計測クラス"""
def __init__(self) -> None:
"""このクラスの新たなインスタンスを初期化します。"""
self.__start = time.perf_counter()
@property
def of(self):
"""インスタンス化してからの経過時間をhh:mm:ssの形式にフォーマットして返す
Returns:
str: 時分秒形式の経過時間
"""
elapsed_time = time.perf_counter() - self.__start
h, rem = divmod(elapsed_time, 3600)
m, s = divmod(rem, 60)
h_str = f'{h:02.0f} hour ' if h > 0.0 else ''
m_str = f'{m:02.0f} min ' if m > 0.0 else ''
s_str = f'{s:06.02f} sec' if s > 0.0 else ''
return f"{h_str}{m_str}{s_str}"