From ebaa71842d5a0e7d935a45b9c4c40016e2dc7036 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Mon, 26 May 2025 10:20:41 +0900 Subject: [PATCH 1/8] first commit --- .../jskult-batch-daily/.dockerignore | 12 + .../jskult-batch-daily/.env.example | 26 ++ .../jskult-batch-daily/.gitignore | 10 + .../jskult-batch-daily/.vscode/launch.json | 16 + .../.vscode/recommended_settings.json | 31 ++ .../jskult-batch-daily/Dockerfile | 20 + .../jskult-batch-daily/Pipfile | 29 ++ .../jskult-batch-daily/Pipfile.lock | 397 ++++++++++++++++++ .../jskult-batch-daily/README.md | 292 +++++++++++++ .../jskult-batch-daily/entrypoint.py | 10 + .../jskult-batch-daily/pytest.ini | 3 + .../jskult-batch-daily/src/__init__.py | 0 .../jskult-batch-daily/src/aws/__init__.py | 0 .../jskult-batch-daily/src/aws/s3.py | 185 ++++++++ .../src/batch/common/__init__.py | 0 .../jskult-batch-daily/src/db/__init__.py | 0 .../jskult-batch-daily/src/db/database.py | 195 +++++++++ .../jskult-batch-daily/src/error/__init__.py | 0 .../src/error/exceptions.py | 10 + .../src/jobctrl_dcfInstMergeIo.py | 4 + .../src/logging/get_logger.py | 37 ++ .../src/system_var/__init__.py | 0 .../src/system_var/constants.py | 17 + .../src/system_var/environment.py | 38 ++ 24 files changed, 1332 insertions(+) create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py create mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore new file mode 100644 index 00000000..8b9da402 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore @@ -0,0 +1,12 @@ +tests/* +.coverage +.env +.env.example +.report/* +.vscode/* +.pytest_cache/* +*/__pychache__/* +Dockerfile +pytest.ini +README.md +*.sql diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example new file mode 100644 index 00000000..500f843d --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example @@ -0,0 +1,26 @@ +DB_HOST=************ +DB_PORT=************ +DB_USERNAME=************ +DB_PASSWORD=************ +DB_SCHEMA=src05 +LOG_LEVEL=INFO +ULTMARC_DATA_BUCKET=**************** +ULTMARC_DATA_FOLDER=recv +JSKULT_BACKUP_BUCKET=**************** +ULTMARC_BACKUP_FOLDER=ultmarc +VJSK_BACKUP_FOLDER=vjsk +JSKULT_CONFIG_BUCKET=********************** +JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar +JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +VJSK_DATA_SEND_FOLDER=send +VJSK_DATA_RECEIVE_FOLDER=recv +VJSK_DATA_BUCKET=************* +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt +JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert +JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore new file mode 100644 index 00000000..bd0b37f8 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore @@ -0,0 +1,10 @@ +.vscode/settings.json +.env + +# python +__pycache__ + +# python test +.pytest_cache +.coverage +.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json new file mode 100644 index 00000000..9dbaa9c6 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // IntelliSense を使用して利用可能な属性を学べます。 + // 既存の属性の説明をホバーして表示します。 + // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(DEBUG)jskult batch daily", + "type": "python", + "request": "launch", + "program": "entrypoint.py", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json new file mode 100644 index 00000000..2fde8732 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json @@ -0,0 +1,31 @@ +{ + "[python]": { + "editor.defaultFormatter": null, + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + }, + // 自身の環境に合わせて変えてください + "python.defaultInterpreterPath": "", + "python.linting.lintOnSave": true, + "python.linting.enabled": true, + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": [ + "--max-line-length=200", + "--ignore=F541" + ], + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Path": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length", "200", + "--ignore=F541" + ], + "python.testing.pytestArgs": [ + "tests/batch/" + ], + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile new file mode 100644 index 00000000..fc0fde90 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.12-slim-bookworm + +ENV TZ="Asia/Tokyo" +# pythonの標準出力をバッファリングしないフラグ +ENV PYTHONUNBUFFERED=1 +# pythonのバイトコードを生成しないフラグ +ENV PYTHONDONTWRITEBYTECODE=1 + +WORKDIR /usr/src/app +COPY Pipfile Pipfile.lock ./ +RUN \ + apt update -y && \ + pip install pipenv --no-cache-dir && \ + pipenv install --system --deploy && \ + pip uninstall -y pipenv virtualenv-clone virtualenv + +COPY src ./src +COPY entrypoint.py entrypoint.py + +CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile new file mode 100644 index 00000000..a40e6c17 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile @@ -0,0 +1,29 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[scripts] +"test:ultmarc" = "pytest tests/batch/ultmarc/" +"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" +"test:vjsk" = "pytest tests/batch/vjsk/" +"test:vjsk:cov" = "pytest --cov=src/batch/vjsk/ --cov-branch --cov-report=term-missing tests/batch/vjsk/" + +[packages] +boto3 = "*" +PyMySQL = "*" +sqlalchemy = "*" +tenacity = "*" + +[dev-packages] +autopep8 = "*" +flake8 = "*" +pytest = "*" +pytest-cov = "*" +boto3 = "*" + +[requires] +python_version = "3.9" + +[pipenv] +allow_prereleases = true diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock new file mode 100644 index 00000000..60fdb517 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock @@ -0,0 +1,397 @@ +{ + "_meta": { + "hash": { + "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:6633bce2b73284acce1453ca85834c7c5a59e0dbcce1170be461cc079bdcdfcf", + "sha256:668400d13889d2d2fcd66ce785cc0b0fc040681f58a9c7f67daa9149a52b6c63" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==1.38.13" + }, + "botocore": { + "hashes": [ + "sha256:22feee15753cd3f9f7179d041604078a1024701497d27b22be7c6707e8d13ccb", + "sha256:de29fee43a1f02787fb5b3756ec09917d5661ed95b2b2d64797ab04196f69e14" + ], + "markers": "python_version >= '3.9'", + "version": "==1.38.13" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "pymysql": { + "hashes": [ + "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", + "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==1.1.1" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.9.0.post0" + }, + "s3transfer": { + "hashes": [ + "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18", + "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c" + ], + "markers": "python_version >= '3.9'", + "version": "==0.12.0" + }, + "six": { + "hashes": [ + "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", + "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.17.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a", + "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", + "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", + "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", + "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", + "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", + "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", + "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", + "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", + "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", + "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", + "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", + "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867", + "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", + "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff", + "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", + "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2", + "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5", + "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", + "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", + "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", + "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", + "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", + "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", + "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", + "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", + "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", + "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", + "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", + "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37", + "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", + "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625", + "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", + "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47", + "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", + "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", + "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", + "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", + "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", + "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96", + "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", + "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", + "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438", + "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", + "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e", + "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", + "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08", + "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3", + "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", + "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", + "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8", + "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", + "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", + "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", + "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", + "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", + "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==2.0.40" + }, + "tenacity": { + "hashes": [ + "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", + "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==9.1.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", + "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" + ], + "markers": "python_version >= '3.8'", + "version": "==4.13.2" + }, + "urllib3": { + "hashes": [ + "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.20" + } + }, + "develop": { + "autopep8": { + "hashes": [ + "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda", + "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==2.3.1" + }, + "boto3": { + "hashes": [ + "sha256:9edf49640c79a05b0a72f4c2d1e24dfc164344b680535a645f455ac624dc3680", + "sha256:db58348849a5af061f0f5ec9c3b699da5221ca83354059fdccb798e3ddb6b62a" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.35.57" + }, + "botocore": { + "hashes": [ + "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34", + "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327" + ], + "markers": "python_version >= '3.8'", + "version": "==1.35.57" + }, + "coverage": { + "extras": [ + "toml" + ], + "hashes": [ + "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376", + "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", + "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111", + "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172", + "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491", + "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546", + "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", + "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", + "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", + "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c", + "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", + "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", + "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", + "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0", + "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db", + "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf", + "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", + "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", + "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", + "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e", + "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522", + "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25", + "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", + "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea", + "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", + "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", + "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07", + "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", + "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa", + "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901", + "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b", + "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", + "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0", + "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", + "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19", + "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", + "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51", + "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", + "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3", + "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", + "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076", + "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", + "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718", + "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", + "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e", + "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", + "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", + "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09", + "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", + "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", + "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f", + "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72", + "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a", + "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", + "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b", + "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", + "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", + "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", + "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b", + "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", + "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", + "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858" + ], + "markers": "python_version >= '3.9'", + "version": "==7.6.4" + }, + "exceptiongroup": { + "hashes": [ + "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", + "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" + ], + "markers": "python_version < '3.11'", + "version": "==1.2.2" + }, + "flake8": { + "hashes": [ + "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38", + "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1'", + "version": "==7.1.1" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "packaging": { + "hashes": [ + "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", + "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" + ], + "markers": "python_version >= '3.8'", + "version": "==24.2" + }, + "pluggy": { + "hashes": [ + "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", + "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" + ], + "markers": "python_version >= '3.8'", + "version": "==1.5.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", + "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521" + ], + "markers": "python_version >= '3.8'", + "version": "==2.12.1" + }, + "pyflakes": { + "hashes": [ + "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", + "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" + ], + "markers": "python_version >= '3.8'", + "version": "==3.2.0" + }, + "pytest": { + "hashes": [ + "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", + "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==8.3.3" + }, + "pytest-cov": { + "hashes": [ + "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", + "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==6.0.0" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.9.0.post0" + }, + "s3transfer": { + "hashes": [ + "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d", + "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c" + ], + "markers": "python_version >= '3.8'", + "version": "==0.10.3" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "tomli": { + "hashes": [ + "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", + "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.2" + }, + "urllib3": { + "hashes": [ + "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" + ], + "markers": "python_version < '3.10'", + "version": "==1.26.20" + } + } +} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md new file mode 100644 index 00000000..144cf9b8 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md @@ -0,0 +1,292 @@ +# 実消化&アルトマーク 日次バッチ + +## 概要 + +実消化&アルトマークの日次バッチ処理。 + +## 環境情報 + +- Python 3.9 +- MySQL 8.23 +- VSCode + +## 環境構築 + +- Python の構築 + + - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 + - 「Pipenv の導入」までを行っておくこと + - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する + - `pipenv install --dev --python ` + - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく + +- MySQL の環境構築 + - Windows の場合、以下のリンクからダウンロードする + - + - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 + - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると + - `docker-compose up -d` + - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと + - データを投入する + - 立ち上げたデータベースに「src05」スキーマを作成する + - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する + - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` +- 環境変数の設定 + - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する + - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください +- VSCode の設定 + - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する + - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する + +## 実行 + +- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 +- 「entrypoint.py」が、バッチ処理のエントリーポイント。 +- 実際の処理は、「src/jobctrl_daily.py」で行っている。 + +## 単体テスト(アルトマーク取込処理) + +アルトマーク取込処理は、単体テストコードを使用してテスト自動化を行う + +### テスト準備 + +- VSCodeで以下の拡張機能をインストールする + - Python + - Python Test Explorer for Visual Studio Code + - Test Explorer UI +- VSCode 上でショートカット「ctrl」+「shift」+「P」でコマンドパレットを開く +- コマンドパレットの検索窓に「Python」と入力し、「Python: テストを構成する」を押下する +- 現在のワークスペースを選び、「pytest」を選択する +- 「tests」フォルダを選択する +- バックグランドで、pytest モジュールのインストールが始まれば成功 + +### テスト用のサブコマンド一覧 + +- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる + - `Pipfile`内の「scripts」セクションに宣言されている + +| コマンド | 概要 | +| ---------------- | -------------------------------------------------------------------------------------------- | +| test:ultmarc | tests/batch/ultmarc フォルダ配下のユニットテストを実行する | +| test:ultmarc:cov | tests/batch/ultmarc フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | + +### テスト共通関数の仕様 + +- tests/testing_utility.py内の共通関数の仕様について記載する + +#### create_ultmarc_test_data_from_csv + +- 引数 + - file_path: str +- 戻り値 + - src.batch.ultmarc.datfile.DatFileのインスタンス +- 処理概要 + - CSVファイルから、アルトマークのインプットデータを作成する + - データフォーマットは以下 + - 文字コード: UTF-8 + - 改行コード:LF + - ヘッダ: なし + - 値囲い: ダブルクォート + - アルトマークデータと文字コードを合わせるため、指定されたファイルを一時ディレクトリに、文字コード「cp932」で書き出してからテストデータとして読み込む + - テストデータそのものはUTF-8の文字コードで作成すること + +### create_db_data_from_csv + +- 引数 + - file_path: str +- 戻り値 + - テーブルのレコードに相当する辞書のリスト +- 処理概要 + - CSVファイルから、アルトマークテーブルに相当するテストデータを作成する + - テストの初期データ、期待値データを作成するのに利用する + - データフォーマットは以下 + - 文字コード: UTF-8 + - 改行コード:LF + - ヘッダ: なし + - 値囲い: ダブルクォート + - ファイル内の、以下の形式のデータを自動的に変換する + - `NULL` + - `None`に変換される + - `yyyy-mm-dd`もしくは、`yyyy/mm/dd`の文字 + - Date型に変換される + - `yyyy-mm-dd hh:mm:ss`もしくは、`yyyy/mm/dd hh:mm:ss`の文字 + - DateTime型に変換される + +### create_insert_sql_with_parameter + +- 引数 + - table_name: str テーブル名 + - column_names: list[str] カラム名のリスト + - test_data: list[str]: 値のリスト +- 戻り値 + - INSERT文とバインドパラメータ辞書 +- 処理概要 + - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でINSERT文、バインドパラメータを作成する + +### create_delete_sql_with_parameter + +- 引数 + - table_name: str テーブル名 + - column_names: list[str] カラム名のリスト + - test_data: list[str]: 値のリスト +- 戻り値 + - DELETE文とバインドパラメータ辞書 +- 処理概要 + - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でDELETE文、バインドパラメータを作成する + +### create_ultmarc_table_mapper_sut + +- 引数 + - line: src.batch.ultmarc.datfile.DatFileLine アルトマークデータファイルの1行 + - db: src.db.Database データベース操作クラス +- 戻り値 + - マッパークラス +- 処理概要 + - src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory.UltmarcTableMapperFactoryを通じて、テスト対象のマッパークラスを生成して返す + +### assert_table_results + +- 引数 + - actual_rows: list[dict] テスト結果の辞書リスト + - expect_rows: list[dict] 期待値の辞書リスト + - ignore_col_name: list 比較を無視するDBのカラム名. Default None. +- 戻り値 + - なし +- 処理概要 + - テスト結果データと期待値データを突き合わせ、期待値どおりとなっているかを確認する + - ignore_col_nameに指定したカラムは、呼び出し元のテストコード内で個別に突き合わせする + + +## 単体テスト(実消化データ取込処理) + +実消化データは、単体テストコードを使用してテスト自動化を行う + +### テスト準備 + +※単体テスト(アルトマーク取込処理)と同じ + +### テスト用のサブコマンド一覧 + +- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる + - `Pipfile`内の「scripts」セクションに宣言されている + +| コマンド | 概要 | +| ---------------- | -------------------------------------------------------------------------------------------- | +| test:vjsk | tests/batch/vjsk フォルダ配下のユニットテストを実行する | +| test:vjsk:cov | tests/batch/vjsk フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | + +### テスト共通関数の仕様 + +- tests/testing_vjsk_utility.py内の共通関数の仕様について記載する + +#### create_vjsk_assertion_list + +- 概要 + - DB登録期待値リストを作成する +- Args: + - file_path (str): DB登録期待値ファイル(tsvファイル)のパス + - memo: ※DB登録期待値ファイルの前提 + - memo: 受領データファイルと同じ + - memo: BOM付きtsv形式 + - memo: 一行目はカラム名になっているヘッダ行 + - Returns: + - List(dict) DB登録期待値辞書リスト + + +## フォルダ構成 + +```text +. +├── Pipfile -- Pythonモジュールの依存関係を管理するファイル +├── Dockerfile -- Dockerイメージを作成するためのファイル +├── Pipfile -- Pythonモジュールの依存関係を管理するファイル +├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル +├── README.md -- 当ファイル +├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル +├── src -- ソースコードの保管場所 +│ ├── aws -- AWS関連処理 +│ │ └── s3.py -- S3クライアントとバケット処理 +│ ├── batch -- バッチ処理関連ソース置き場 +│ │ ├── batch_functions.py -- バッチ処理共通関数置き場 +│ │ ├── datachange -- 実績洗替関連ソース置き場 +│ │ │ └── emp_chg_inst_lau.py -- 施設担当者マスタ洗替 +│ │ └── jissekiaraigae.py -- 実績洗替処理のエントリーポイント +│ │ └── ultmarc -- アルトマーク関連処理 +│ │ ├── ultmarc_process.py -- アルトマーク関連処理のエントリーポイント +│ │ ├── datfile.py -- データファイル読込 +│ │ └── utmp_tables -- アルトマークテーブルへの登録関連 +│ │ ├── table_mapper -- テーブルへのデータマッピング処理 +│ │ │ ├── concrete -- テーブルマッパーのマッピング処理を行う具象クラス(全テーブル分) +│ │ │ │ ├── com_alma_mapper.py +│ │ │ │ ├── ... +│ │ │ │ └── null_mapper.py -- テスト用、空振りするマッパークラス +│ │ │ └── ultmarc_table_mapper.py -- テーブルへの登録処理を行う抽象クラス +│ │ ├── tables -- アルトマークデータのDTOクラス(全テーブル分) +│ │ │ ├── com_alma.py +│ │ │ ├── ... +│ │ │ └── ultmarc_table.py -- アルトマークテーブルの抽象クラス +│ │ └── ultmarc_table_mapper_factory.py -- テーブルマッパー生成クラス +│ ├── db +│ │ └── database.py -- データベース操作共通処理 +│ ├── error +│ │ └── exceptions.py -- カスタム例外 +│ ├── jobctrl_daily.py -- 日次バッチ処理のエントリーポイント。「entrypoint.py」 から呼ばれる。 +│ ├── logging +│ │ └── get_logger.py -- ログ出力の共通処理 +│ ├── system_var +│ │ └── environment.py -- 環境変数 +│ └── time +│ └── elapsed_time.py -- 実行時間計測用 +└── tests -- ユニットテストのルートディレクト + ├── batch + │ └── ultmarc -- アルトマーク関連のユニットテストを格納する + │ │ └── utmp_tables + │ │ └── table_mapper -- 以下、マッパークラス単位でフォルダを切る + │ │ └── com_alma + │ │ ├── test_com_alma_mapper.py -- テストコード本体 + │ │ ├── com_alma_insert.csv -- S3に配置される想定のテストCSVデータ。ケースごとに用意する。 + │ │ ... + │ │ ├── db_com_alma_before_update.csv -- テスト時に事前にDBに登録しておくデータ。CSVで用意する。 + │ │ ... + │ │ ├── expect_com_alma_insert.csv -- テストの期待値データ。CSVで用意する。 + │ │ ... + │ └─vjsk -- 実消化データ取込処理関連のユニットテストを格納する + │ │ + │ ├─vjsk_file_check -- 受領ファイルチェック処理関連のユニットテストを格納する + │ │ ├─conftest.py -- テスト内で共通利用できるフィクスチャの宣言 + │ │ └─test_vjsk_file_check.py -- テストクラス本体 + │ │ + │ └─vjsk_load -- 受領データ登録処理関連のユニットテストを格納する + │ │ conftest.py -- テスト内で共通利用できるフィクスチャの宣言 + │ │ test_vjsk_load.py -- テストクラス本体 + │ │ + │ └─testdata -- テストモジュールが使用するテストデータを格納する + │ │ bio_slip_data_202304280000.tsv -- 正常ケースの単体確認用 + │ │ ... -- *20230428* は新規4件の登録確認用 + │ │ whs_mst_202304290000.tsv -- *20230429* は更新2件+追加新規2件の登録確認用 + │ │ + │ ├─NoData -- 正常ケースの単体確認用 + │ │ bio_slip_data_nodatarecord.tsv -- ヘッダ行のみでデータが0件の動作確認用 + │ │ ... + │ │ whs_mst_nodatarecord.tsv + │ │ + │ ├─TestFormatErrorFile -- 異常ケースの単体確認用 + │ │ bio_slip_data_formaterror.tsv -- 末尾行のタブ数が想定と異なる(ファイル欠落がある)ときの動作確認用 + │ │ ... + │ │ whs_mst_formaterror.tsv + │ │ + │ ├─TestImportFileToDb -- 正常ケースの単体確認用 + │ │ bio_slip_data_202304270000.gz -- 対向元システムから送られてきた状態(gz圧縮)の受領データファイルの動作確認用 + │ │ ... + │ │ whs_mst_202304270000.gz + │ │ + │ └─UnzipError -- 異常ケースの単体確認用 + │ bio_slip_data_202304270000.gz -- gz圧縮ファイルが解凍できないときの動作確認用 + │ ... + │ whs_mst_202304270000.gz + │ + ├── conftest.py -- テスト内で共通利用できるフィクスチャを宣言する(執筆時点ではDBのみ) + ├── testing_utility.py -- テストの共通関数 + └── testing_vjsk_utility.py -- テストの共通関数(実消化データ取込処理関連) +``` + diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py new file mode 100644 index 00000000..62891bf7 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py @@ -0,0 +1,10 @@ +"""実消化&アルトマーク DCF施設削除新規マスタ作成のエントリーポイント""" +from src import jobctrl_jobctrl_dcfInstMergeIo + +if __name__ == '__main__': + try: + exit(jobctrl_jobctrl_dcfInstMergeIo.exec()) + except Exception: + # エラーが起きても、正常系のコードで返す。 + # エラーが起きた事実はbatch_process内でログを出す。 + exit(0) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini new file mode 100644 index 00000000..5dbe2661 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +log_format = %(levelname)s %(asctime)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py new file mode 100644 index 00000000..6203868d --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py @@ -0,0 +1,185 @@ +import gzip +import os +import os.path as path +import shutil +import tempfile + +import boto3 + +from src.system_var import environment + + +class S3Client: + __s3_client = boto3.client('s3') + _bucket_name: str + + def list_objects(self, bucket_name: str, folder_name: str): + response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + if response['KeyCount'] == 0: + return [] + contents = response['Contents'] + # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする + objects = [{'filename': content['Key'], 'size': content['Size']} + for content in contents if not content['Key'].endswith('/')] + return objects + + def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: + copy_source = {'Bucket': src_bucket, 'Key': src_key} + self.__s3_client.copy(copy_source, dest_bucket, dest_key) + return + + def download_file(self, bucket_name: str, file_key: str, file): + self.__s3_client.download_fileobj( + Bucket=bucket_name, + Key=file_key, + Fileobj=file + ) + return + + def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): + self.__s3_client.upload_file( + local_file_path, + Bucket=bucket_name, + Key=file_key + ) + + def delete_file(self, bucket_name: str, file_key: str): + self.__s3_client.delete_object( + Bucket=bucket_name, + Key=file_key + ) + + +class S3Bucket(): + _s3_client = S3Client() + _bucket_name: str = None + + +class UltmarcBucket(S3Bucket): + _bucket_name = environment.ULTMARC_DATA_BUCKET + _folder = environment.ULTMARC_DATA_FOLDER + + def list_dat_file(self): + return self._s3_client.list_objects(self._bucket_name, self._folder) + + def download_dat_file(self, dat_filename: str): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, dat_filename, f) + f.seek(0) + return temporary_file_path + + def backup_dat_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + ultmarc_backup_bucket = UltmarcBackupBucket() + backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) + # コピー元のファイルを削除 + self._s3_client.delete_file(self._bucket_name, dat_file_key) + + +class ConfigBucket(S3Bucket): + _bucket_name = environment.JSKULT_CONFIG_BUCKET + + def download_holiday_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) + holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + f.seek(0) + return temporary_file_path + + def download_wholesaler_stock_input_day_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) + f.seek(0) + return temporary_file_path + + def download_ultmarc_hex_convert_config(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) + hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f) + f.seek(0) + return temporary_file_path + + +class JskUltBackupBucket(S3Bucket): + _bucket_name = environment.JSKULT_BACKUP_BUCKET + + +class UltmarcBackupBucket(JskUltBackupBucket): + _folder = environment.ULTMARC_BACKUP_FOLDER + + +class VjskBackupBucket(JskUltBackupBucket): + _folder = environment.VJSK_BACKUP_FOLDER + + +class VjskReceiveBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER + + _s3_file_list = None + + def get_s3_file_list(self): + self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) + return self._s3_file_list + + def download_data_file(self, data_filename: str): + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, data_filename, f) + f.seek(0) + return temporary_file_path + + def unzip_data_file(self, filename: str): + temp_dir = os.path.dirname(filename) + decompress_filename = os.path.basename(filename).replace('.gz', '') + decompress_file_path = os.path.join(temp_dir, decompress_filename) + with gzip.open(filename, 'rb') as gz: + with open(decompress_file_path, 'wb') as decompressed_file: + shutil.copyfileobj(gz, decompressed_file) + + ret = [decompress_file_path] + return ret + + def backup_dat_file(self, target_files: list, datetime_key: str): + jskult_backup_bucket = VjskBackupBucket() + for target_file in target_files: + backup_from_file_path = target_file.get("filename") + backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") + backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' + self._s3_client.copy(self._bucket_name, backup_from_file_path, + jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.delete_file(self._bucket_name, backup_from_file_path) + + +class VjskSendBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _send_folder = environment.VJSK_DATA_SEND_FOLDER + + def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + # S3バケットにファイルを移動 + csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + return + + def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + vjsk_backup_bucket = VjskBackupBucket() + dat_key = f'{self._send_folder}/{dat_file_key}' + backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py new file mode 100644 index 00000000..5ddaba4e --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py @@ -0,0 +1,195 @@ +from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, + create_engine, text) +from sqlalchemy.engine.url import URL +from tenacity import retry, stop_after_attempt, wait_exponential + +from src.error.exceptions import DBException +from src.logging.get_logger import get_logger +from src.system_var import environment + +logger = get_logger(__name__) + + +class Database: + """データベース操作クラス""" + __connection: Connection = None + __transactional_engine: Engine = None + __autocommit_engine: Engine = None + __host: str = None + __port: str = None + __username: str = None + __password: str = None + __schema: str = None + __autocommit: bool = None + __connection_string: str = None + + def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: + """このクラスの新たなインスタンスを初期化します + + Args: + username (str): DBユーザー名 + password (str): DBパスワード + host (str): DBホスト名 + port (int): DBポート + schema (str): DBスキーマ名 + autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. + """ + self.__username = username + self.__password = password + self.__host = host + self.__port = int(port) + self.__schema = schema + self.__autocommit = autocommit + + self.__connection_string = URL.create( + drivername='mysql+pymysql', + username=self.__username, + password=self.__password, + host=self.__host, + port=self.__port, + database=self.__schema, + query={"charset": "utf8mb4", "local_infile": "1"}, + ) + + self.__transactional_engine = create_engine( + self.__connection_string, + pool_timeout=5, + poolclass=QueuePool + ) + + self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + + @classmethod + def get_instance(cls, autocommit=False): + """インスタンスを取得します + + Args: + autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. + Returns: + Database: DB操作クラスインスタンス + """ + return cls( + username=environment.DB_USERNAME, + password=environment.DB_PASSWORD, + host=environment.DB_HOST, + port=environment.DB_PORT, + schema=environment.DB_SCHEMA, + autocommit=autocommit + ) + + @retry( + wait=wait_exponential( + multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, + min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, + max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS + ), + stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT), + retry_error_cls=DBException + ) + def connect(self): + """ + DBに接続します。接続に失敗した場合、リトライします。\n + インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) + Raises: + DBException: 接続失敗 + """ + try: + self.__connection = ( + self.__autocommit_engine.connect() if self.__autocommit is True + else self.__transactional_engine.connect()) + except Exception as e: + raise DBException(e) + + def execute_select(self, select_query: str, parameters=None) -> list[dict]: + """SELECTクエリを実行します。 + + Args: + select_query (str): SELECT文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + list[dict]: カラム名: 値の辞書リスト + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(select_query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(select_query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + result_rows = result.mappings().all() + return result_rows + + def execute(self, query: str, parameters=None) -> CursorResult: + """SQLクエリを実行します。 + + Args: + query (str): SQL文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + CursorResult: 取得結果 + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + return result + + def begin(self): + """トランザクションを開始します。""" + if not self.__connection.in_transaction(): + self.__connection.begin() + + def commit(self): + """トランザクションをコミットします""" + if self.__connection.in_transaction(): + self.__connection.commit() + + def rollback(self): + """トランザクションをロールバックします""" + if self.__connection.in_transaction(): + self.__connection.rollback() + + def disconnect(self): + """DB接続を切断します。""" + if self.__connection is not None: + self.__connection.close() + self.__connection = None + + def to_jst(self): + self.execute('SET time_zone = "+9:00"') + + def __execute_with_transaction(self, query: str, parameters: dict): + # トランザクションを開始してクエリを実行する + with self.__connection.begin(): + try: + result = self.__connection.execute(text(query), parameters=parameters) + except Exception as e: + self.__connection.rollback() + raise e + # ここでコミットされる + return result diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py new file mode 100644 index 00000000..055c24f6 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py @@ -0,0 +1,10 @@ +class MeDaCaException(Exception): + pass + + +class DBException(MeDaCaException): + pass + + +class BatchOperationException(MeDaCaException): + pass diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py new file mode 100644 index 00000000..9c29840c --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py @@ -0,0 +1,4 @@ +"""実消化&アルトマーク DCF施設削除新規マスタ作成""" + +def exec(): + pass \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py new file mode 100644 index 00000000..f36f1199 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py @@ -0,0 +1,37 @@ +import logging + +from src.system_var.environment import LOG_LEVEL + +# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する +for name in ["boto3", "botocore", "s3transfer", "urllib3"]: + logging.getLogger(name).setLevel(logging.WARNING) + + +def get_logger(log_name: str) -> logging.Logger: + """一意のログ出力モジュールを取得します。 + + Args: + log_name (str): ロガー名 + + Returns: + _type_: _description_ + """ + logger = logging.getLogger(log_name) + level = logging.getLevelName(LOG_LEVEL) + if not isinstance(level, int): + level = logging.INFO + logger.setLevel(level) + + if not logger.hasHandlers(): + handler = logging.StreamHandler() + logger.addHandler(handler) + + formatter = logging.Formatter( + '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', + '%Y-%m-%d %H:%M:%S' + ) + + for handler in logger.handlers: + handler.setFormatter(formatter) + + return logger diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py new file mode 100644 index 00000000..8a0ccbb3 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py @@ -0,0 +1,17 @@ +# バッチ正常終了コード +BATCH_EXIT_CODE_SUCCESS = 0 + +# バッチ処理中フラグ:未処理 +BATCH_ACTF_BATCH_UNPROCESSED = '0' +# バッチ処理中フラグ:処理中 +BATCH_ACTF_BATCH_IN_PROCESSING = '1' +# dump取得状態区分:未処理 +DUMP_STATUS_KBN_UNPROCESSED = '0' +# dump取得状態区分:dump取得正常終了 +DUMP_STATUS_KBN_COMPLETE = '2' + +# カレンダーファイルのコメントシンボル +CALENDAR_COMMENT_SYMBOL = '#' + +# 月曜日(datetime.weekday()で月曜日を表す数字) +WEEKDAY_MONDAY = 0 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py new file mode 100644 index 00000000..0af7a118 --- /dev/null +++ b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py @@ -0,0 +1,38 @@ +import os + +# Database +DB_HOST = os.environ['DB_HOST'] +DB_PORT = int(os.environ['DB_PORT']) +DB_USERNAME = os.environ['DB_USERNAME'] +DB_PASSWORD = os.environ['DB_PASSWORD'] +DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS +ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] +ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] +JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] +ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] +VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] +JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] +JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] +VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] +JSKULT_CONFIG_CONVERT_FOLDER = os.environ['JSKULT_CONFIG_CONVERT_FOLDER'] +JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME = os.environ['JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME'] +VJSK_DATA_RECEIVE_FOLDER = os.environ['VJSK_DATA_RECEIVE_FOLDER'] + +# 初期値がある環境変数 +LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') +DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) + +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] From b24d1b0eb9e7f39ec3e50258bbe06433dbf147d9 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Mon, 26 May 2025 19:52:09 +0900 Subject: [PATCH 2/8] =?UTF-8?q?=E8=AA=A4=E3=81=A3=E3=81=A6=E8=BF=BD?= =?UTF-8?q?=E5=8A=A0=E3=81=97=E3=81=9F=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=81=AE=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../jskult-batch-daily/.dockerignore | 12 - .../jskult-batch-daily/.env.example | 26 -- .../jskult-batch-daily/.gitignore | 10 - .../jskult-batch-daily/.vscode/launch.json | 16 - .../.vscode/recommended_settings.json | 31 -- .../jskult-batch-daily/Dockerfile | 20 - .../jskult-batch-daily/Pipfile | 29 -- .../jskult-batch-daily/Pipfile.lock | 397 ------------------ .../jskult-batch-daily/README.md | 292 ------------- .../jskult-batch-daily/entrypoint.py | 10 - .../jskult-batch-daily/pytest.ini | 3 - .../jskult-batch-daily/src/__init__.py | 0 .../jskult-batch-daily/src/aws/__init__.py | 0 .../jskult-batch-daily/src/aws/s3.py | 185 -------- .../src/batch/common/__init__.py | 0 .../jskult-batch-daily/src/db/__init__.py | 0 .../jskult-batch-daily/src/db/database.py | 195 --------- .../jskult-batch-daily/src/error/__init__.py | 0 .../src/error/exceptions.py | 10 - .../src/jobctrl_dcfInstMergeIo.py | 4 - .../src/logging/get_logger.py | 37 -- .../src/system_var/__init__.py | 0 .../src/system_var/constants.py | 17 - .../src/system_var/environment.py | 38 -- 24 files changed, 1332 deletions(-) delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py delete mode 100644 ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore deleted file mode 100644 index 8b9da402..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.dockerignore +++ /dev/null @@ -1,12 +0,0 @@ -tests/* -.coverage -.env -.env.example -.report/* -.vscode/* -.pytest_cache/* -*/__pychache__/* -Dockerfile -pytest.ini -README.md -*.sql diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example deleted file mode 100644 index 500f843d..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.env.example +++ /dev/null @@ -1,26 +0,0 @@ -DB_HOST=************ -DB_PORT=************ -DB_USERNAME=************ -DB_PASSWORD=************ -DB_SCHEMA=src05 -LOG_LEVEL=INFO -ULTMARC_DATA_BUCKET=**************** -ULTMARC_DATA_FOLDER=recv -JSKULT_BACKUP_BUCKET=**************** -ULTMARC_BACKUP_FOLDER=ultmarc -VJSK_BACKUP_FOLDER=vjsk -JSKULT_CONFIG_BUCKET=********************** -JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt -VJSK_DATA_SEND_FOLDER=send -VJSK_DATA_RECEIVE_FOLDER=recv -VJSK_DATA_BUCKET=************* -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt -JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert -JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json -# 連携データ抽出期間 -SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 -# 洗替対象テーブル名 -SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau -# 卸実績洗替で作成するデータの期間(年単位) -SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore deleted file mode 100644 index bd0b37f8..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -.vscode/settings.json -.env - -# python -__pycache__ - -# python test -.pytest_cache -.coverage -.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json deleted file mode 100644 index 9dbaa9c6..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/launch.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - // IntelliSense を使用して利用可能な属性を学べます。 - // 既存の属性の説明をホバーして表示します。 - // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "(DEBUG)jskult batch daily", - "type": "python", - "request": "launch", - "program": "entrypoint.py", - "console": "integratedTerminal", - "justMyCode": true - } - ] -} \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json deleted file mode 100644 index 2fde8732..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/.vscode/recommended_settings.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "[python]": { - "editor.defaultFormatter": null, - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.organizeImports": true - } - }, - // 自身の環境に合わせて変えてください - "python.defaultInterpreterPath": "", - "python.linting.lintOnSave": true, - "python.linting.enabled": true, - "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": true, - "python.linting.flake8Args": [ - "--max-line-length=200", - "--ignore=F541" - ], - "python.formatting.provider": "autopep8", - "python.formatting.autopep8Path": "autopep8", - "python.formatting.autopep8Args": [ - "--max-line-length", "200", - "--ignore=F541" - ], - "python.testing.pytestArgs": [ - "tests/batch/" - ], - - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true -} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile deleted file mode 100644 index fc0fde90..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.12-slim-bookworm - -ENV TZ="Asia/Tokyo" -# pythonの標準出力をバッファリングしないフラグ -ENV PYTHONUNBUFFERED=1 -# pythonのバイトコードを生成しないフラグ -ENV PYTHONDONTWRITEBYTECODE=1 - -WORKDIR /usr/src/app -COPY Pipfile Pipfile.lock ./ -RUN \ - apt update -y && \ - pip install pipenv --no-cache-dir && \ - pipenv install --system --deploy && \ - pip uninstall -y pipenv virtualenv-clone virtualenv - -COPY src ./src -COPY entrypoint.py entrypoint.py - -CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile deleted file mode 100644 index a40e6c17..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile +++ /dev/null @@ -1,29 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[scripts] -"test:ultmarc" = "pytest tests/batch/ultmarc/" -"test:ultmarc:cov" = "pytest --cov=src/batch/ultmarc/ --cov-branch --cov-report=term-missing tests/batch/ultmarc/" -"test:vjsk" = "pytest tests/batch/vjsk/" -"test:vjsk:cov" = "pytest --cov=src/batch/vjsk/ --cov-branch --cov-report=term-missing tests/batch/vjsk/" - -[packages] -boto3 = "*" -PyMySQL = "*" -sqlalchemy = "*" -tenacity = "*" - -[dev-packages] -autopep8 = "*" -flake8 = "*" -pytest = "*" -pytest-cov = "*" -boto3 = "*" - -[requires] -python_version = "3.9" - -[pipenv] -allow_prereleases = true diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock deleted file mode 100644 index 60fdb517..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/Pipfile.lock +++ /dev/null @@ -1,397 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.9" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "boto3": { - "hashes": [ - "sha256:6633bce2b73284acce1453ca85834c7c5a59e0dbcce1170be461cc079bdcdfcf", - "sha256:668400d13889d2d2fcd66ce785cc0b0fc040681f58a9c7f67daa9149a52b6c63" - ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==1.38.13" - }, - "botocore": { - "hashes": [ - "sha256:22feee15753cd3f9f7179d041604078a1024701497d27b22be7c6707e8d13ccb", - "sha256:de29fee43a1f02787fb5b3756ec09917d5661ed95b2b2d64797ab04196f69e14" - ], - "markers": "python_version >= '3.9'", - "version": "==1.38.13" - }, - "jmespath": { - "hashes": [ - "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", - "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" - ], - "markers": "python_version >= '3.7'", - "version": "==1.0.1" - }, - "pymysql": { - "hashes": [ - "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", - "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0" - ], - "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==1.1.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", - "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.9.0.post0" - }, - "s3transfer": { - "hashes": [ - "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18", - "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c" - ], - "markers": "python_version >= '3.9'", - "version": "==0.12.0" - }, - "six": { - "hashes": [ - "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", - "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==1.17.0" - }, - "sqlalchemy": { - "hashes": [ - "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a", - "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", - "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", - "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", - "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", - "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", - "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", - "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", - "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", - "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", - "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", - "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", - "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867", - "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", - "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff", - "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", - "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2", - "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5", - "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", - "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", - "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", - "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", - "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", - "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", - "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", - "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", - "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", - "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", - "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", - "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37", - "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", - "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625", - "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", - "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47", - "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", - "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", - "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", - "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", - "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", - "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96", - "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", - "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", - "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438", - "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", - "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e", - "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", - "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08", - "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3", - "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", - "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", - "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8", - "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", - "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", - "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", - "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", - "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", - "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106" - ], - "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==2.0.40" - }, - "tenacity": { - "hashes": [ - "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", - "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138" - ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==9.1.2" - }, - "typing-extensions": { - "hashes": [ - "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", - "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" - ], - "markers": "python_version >= '3.8'", - "version": "==4.13.2" - }, - "urllib3": { - "hashes": [ - "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", - "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.20" - } - }, - "develop": { - "autopep8": { - "hashes": [ - "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda", - "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.3.1" - }, - "boto3": { - "hashes": [ - "sha256:9edf49640c79a05b0a72f4c2d1e24dfc164344b680535a645f455ac624dc3680", - "sha256:db58348849a5af061f0f5ec9c3b699da5221ca83354059fdccb798e3ddb6b62a" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==1.35.57" - }, - "botocore": { - "hashes": [ - "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34", - "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327" - ], - "markers": "python_version >= '3.8'", - "version": "==1.35.57" - }, - "coverage": { - "extras": [ - "toml" - ], - "hashes": [ - "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376", - "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", - "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111", - "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172", - "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491", - "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546", - "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", - "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", - "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", - "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c", - "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", - "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", - "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", - "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0", - "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db", - "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf", - "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", - "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", - "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", - "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e", - "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522", - "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25", - "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", - "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea", - "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", - "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", - "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07", - "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", - "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa", - "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901", - "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b", - "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", - "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0", - "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", - "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19", - "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", - "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51", - "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", - "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3", - "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", - "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076", - "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", - "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718", - "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", - "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e", - "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", - "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", - "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09", - "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", - "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", - "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f", - "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72", - "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a", - "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", - "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b", - "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", - "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", - "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", - "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b", - "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", - "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", - "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858" - ], - "markers": "python_version >= '3.9'", - "version": "==7.6.4" - }, - "exceptiongroup": { - "hashes": [ - "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", - "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" - ], - "markers": "python_version < '3.11'", - "version": "==1.2.2" - }, - "flake8": { - "hashes": [ - "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38", - "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213" - ], - "index": "pypi", - "markers": "python_full_version >= '3.8.1'", - "version": "==7.1.1" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "jmespath": { - "hashes": [ - "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", - "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" - ], - "markers": "python_version >= '3.7'", - "version": "==1.0.1" - }, - "mccabe": { - "hashes": [ - "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", - "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" - ], - "markers": "python_version >= '3.6'", - "version": "==0.7.0" - }, - "packaging": { - "hashes": [ - "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", - "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" - ], - "markers": "python_version >= '3.8'", - "version": "==24.2" - }, - "pluggy": { - "hashes": [ - "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", - "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" - ], - "markers": "python_version >= '3.8'", - "version": "==1.5.0" - }, - "pycodestyle": { - "hashes": [ - "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", - "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521" - ], - "markers": "python_version >= '3.8'", - "version": "==2.12.1" - }, - "pyflakes": { - "hashes": [ - "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", - "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" - ], - "markers": "python_version >= '3.8'", - "version": "==3.2.0" - }, - "pytest": { - "hashes": [ - "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", - "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==8.3.3" - }, - "pytest-cov": { - "hashes": [ - "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", - "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0" - ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==6.0.0" - }, - "python-dateutil": { - "hashes": [ - "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", - "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.9.0.post0" - }, - "s3transfer": { - "hashes": [ - "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d", - "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c" - ], - "markers": "python_version >= '3.8'", - "version": "==0.10.3" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "tomli": { - "hashes": [ - "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", - "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.2" - }, - "urllib3": { - "hashes": [ - "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", - "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" - ], - "markers": "python_version < '3.10'", - "version": "==1.26.20" - } - } -} diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md deleted file mode 100644 index 144cf9b8..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/README.md +++ /dev/null @@ -1,292 +0,0 @@ -# 実消化&アルトマーク 日次バッチ - -## 概要 - -実消化&アルトマークの日次バッチ処理。 - -## 環境情報 - -- Python 3.9 -- MySQL 8.23 -- VSCode - -## 環境構築 - -- Python の構築 - - - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 - - 「Pipenv の導入」までを行っておくこと - - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する - - `pipenv install --dev --python ` - - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく - -- MySQL の環境構築 - - Windows の場合、以下のリンクからダウンロードする - - - - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 - - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると - - `docker-compose up -d` - - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと - - データを投入する - - 立ち上げたデータベースに「src05」スキーマを作成する - - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する - - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` -- 環境変数の設定 - - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する - - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください -- VSCode の設定 - - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する - - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する - -## 実行 - -- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 -- 「entrypoint.py」が、バッチ処理のエントリーポイント。 -- 実際の処理は、「src/jobctrl_daily.py」で行っている。 - -## 単体テスト(アルトマーク取込処理) - -アルトマーク取込処理は、単体テストコードを使用してテスト自動化を行う - -### テスト準備 - -- VSCodeで以下の拡張機能をインストールする - - Python - - Python Test Explorer for Visual Studio Code - - Test Explorer UI -- VSCode 上でショートカット「ctrl」+「shift」+「P」でコマンドパレットを開く -- コマンドパレットの検索窓に「Python」と入力し、「Python: テストを構成する」を押下する -- 現在のワークスペースを選び、「pytest」を選択する -- 「tests」フォルダを選択する -- バックグランドで、pytest モジュールのインストールが始まれば成功 - -### テスト用のサブコマンド一覧 - -- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる - - `Pipfile`内の「scripts」セクションに宣言されている - -| コマンド | 概要 | -| ---------------- | -------------------------------------------------------------------------------------------- | -| test:ultmarc | tests/batch/ultmarc フォルダ配下のユニットテストを実行する | -| test:ultmarc:cov | tests/batch/ultmarc フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | - -### テスト共通関数の仕様 - -- tests/testing_utility.py内の共通関数の仕様について記載する - -#### create_ultmarc_test_data_from_csv - -- 引数 - - file_path: str -- 戻り値 - - src.batch.ultmarc.datfile.DatFileのインスタンス -- 処理概要 - - CSVファイルから、アルトマークのインプットデータを作成する - - データフォーマットは以下 - - 文字コード: UTF-8 - - 改行コード:LF - - ヘッダ: なし - - 値囲い: ダブルクォート - - アルトマークデータと文字コードを合わせるため、指定されたファイルを一時ディレクトリに、文字コード「cp932」で書き出してからテストデータとして読み込む - - テストデータそのものはUTF-8の文字コードで作成すること - -### create_db_data_from_csv - -- 引数 - - file_path: str -- 戻り値 - - テーブルのレコードに相当する辞書のリスト -- 処理概要 - - CSVファイルから、アルトマークテーブルに相当するテストデータを作成する - - テストの初期データ、期待値データを作成するのに利用する - - データフォーマットは以下 - - 文字コード: UTF-8 - - 改行コード:LF - - ヘッダ: なし - - 値囲い: ダブルクォート - - ファイル内の、以下の形式のデータを自動的に変換する - - `NULL` - - `None`に変換される - - `yyyy-mm-dd`もしくは、`yyyy/mm/dd`の文字 - - Date型に変換される - - `yyyy-mm-dd hh:mm:ss`もしくは、`yyyy/mm/dd hh:mm:ss`の文字 - - DateTime型に変換される - -### create_insert_sql_with_parameter - -- 引数 - - table_name: str テーブル名 - - column_names: list[str] カラム名のリスト - - test_data: list[str]: 値のリスト -- 戻り値 - - INSERT文とバインドパラメータ辞書 -- 処理概要 - - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でINSERT文、バインドパラメータを作成する - -### create_delete_sql_with_parameter - -- 引数 - - table_name: str テーブル名 - - column_names: list[str] カラム名のリスト - - test_data: list[str]: 値のリスト -- 戻り値 - - DELETE文とバインドパラメータ辞書 -- 処理概要 - - 引数を使用して、`src.db.Database#execute`メソッドで実行可能な形でDELETE文、バインドパラメータを作成する - -### create_ultmarc_table_mapper_sut - -- 引数 - - line: src.batch.ultmarc.datfile.DatFileLine アルトマークデータファイルの1行 - - db: src.db.Database データベース操作クラス -- 戻り値 - - マッパークラス -- 処理概要 - - src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory.UltmarcTableMapperFactoryを通じて、テスト対象のマッパークラスを生成して返す - -### assert_table_results - -- 引数 - - actual_rows: list[dict] テスト結果の辞書リスト - - expect_rows: list[dict] 期待値の辞書リスト - - ignore_col_name: list 比較を無視するDBのカラム名. Default None. -- 戻り値 - - なし -- 処理概要 - - テスト結果データと期待値データを突き合わせ、期待値どおりとなっているかを確認する - - ignore_col_nameに指定したカラムは、呼び出し元のテストコード内で個別に突き合わせする - - -## 単体テスト(実消化データ取込処理) - -実消化データは、単体テストコードを使用してテスト自動化を行う - -### テスト準備 - -※単体テスト(アルトマーク取込処理)と同じ - -### テスト用のサブコマンド一覧 - -- `pipenv run`のあとに、サブコマンドとしてユーザー定義スクリプトを実行できる - - `Pipfile`内の「scripts」セクションに宣言されている - -| コマンド | 概要 | -| ---------------- | -------------------------------------------------------------------------------------------- | -| test:vjsk | tests/batch/vjsk フォルダ配下のユニットテストを実行する | -| test:vjsk:cov | tests/batch/vjsk フォルダ配下のユニットテストを実行し、テストカバレッジを取得する(C0, C1) | - -### テスト共通関数の仕様 - -- tests/testing_vjsk_utility.py内の共通関数の仕様について記載する - -#### create_vjsk_assertion_list - -- 概要 - - DB登録期待値リストを作成する -- Args: - - file_path (str): DB登録期待値ファイル(tsvファイル)のパス - - memo: ※DB登録期待値ファイルの前提 - - memo: 受領データファイルと同じ - - memo: BOM付きtsv形式 - - memo: 一行目はカラム名になっているヘッダ行 - - Returns: - - List(dict) DB登録期待値辞書リスト - - -## フォルダ構成 - -```text -. -├── Pipfile -- Pythonモジュールの依存関係を管理するファイル -├── Dockerfile -- Dockerイメージを作成するためのファイル -├── Pipfile -- Pythonモジュールの依存関係を管理するファイル -├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル -├── README.md -- 当ファイル -├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル -├── src -- ソースコードの保管場所 -│ ├── aws -- AWS関連処理 -│ │ └── s3.py -- S3クライアントとバケット処理 -│ ├── batch -- バッチ処理関連ソース置き場 -│ │ ├── batch_functions.py -- バッチ処理共通関数置き場 -│ │ ├── datachange -- 実績洗替関連ソース置き場 -│ │ │ └── emp_chg_inst_lau.py -- 施設担当者マスタ洗替 -│ │ └── jissekiaraigae.py -- 実績洗替処理のエントリーポイント -│ │ └── ultmarc -- アルトマーク関連処理 -│ │ ├── ultmarc_process.py -- アルトマーク関連処理のエントリーポイント -│ │ ├── datfile.py -- データファイル読込 -│ │ └── utmp_tables -- アルトマークテーブルへの登録関連 -│ │ ├── table_mapper -- テーブルへのデータマッピング処理 -│ │ │ ├── concrete -- テーブルマッパーのマッピング処理を行う具象クラス(全テーブル分) -│ │ │ │ ├── com_alma_mapper.py -│ │ │ │ ├── ... -│ │ │ │ └── null_mapper.py -- テスト用、空振りするマッパークラス -│ │ │ └── ultmarc_table_mapper.py -- テーブルへの登録処理を行う抽象クラス -│ │ ├── tables -- アルトマークデータのDTOクラス(全テーブル分) -│ │ │ ├── com_alma.py -│ │ │ ├── ... -│ │ │ └── ultmarc_table.py -- アルトマークテーブルの抽象クラス -│ │ └── ultmarc_table_mapper_factory.py -- テーブルマッパー生成クラス -│ ├── db -│ │ └── database.py -- データベース操作共通処理 -│ ├── error -│ │ └── exceptions.py -- カスタム例外 -│ ├── jobctrl_daily.py -- 日次バッチ処理のエントリーポイント。「entrypoint.py」 から呼ばれる。 -│ ├── logging -│ │ └── get_logger.py -- ログ出力の共通処理 -│ ├── system_var -│ │ └── environment.py -- 環境変数 -│ └── time -│ └── elapsed_time.py -- 実行時間計測用 -└── tests -- ユニットテストのルートディレクト - ├── batch - │ └── ultmarc -- アルトマーク関連のユニットテストを格納する - │ │ └── utmp_tables - │ │ └── table_mapper -- 以下、マッパークラス単位でフォルダを切る - │ │ └── com_alma - │ │ ├── test_com_alma_mapper.py -- テストコード本体 - │ │ ├── com_alma_insert.csv -- S3に配置される想定のテストCSVデータ。ケースごとに用意する。 - │ │ ... - │ │ ├── db_com_alma_before_update.csv -- テスト時に事前にDBに登録しておくデータ。CSVで用意する。 - │ │ ... - │ │ ├── expect_com_alma_insert.csv -- テストの期待値データ。CSVで用意する。 - │ │ ... - │ └─vjsk -- 実消化データ取込処理関連のユニットテストを格納する - │ │ - │ ├─vjsk_file_check -- 受領ファイルチェック処理関連のユニットテストを格納する - │ │ ├─conftest.py -- テスト内で共通利用できるフィクスチャの宣言 - │ │ └─test_vjsk_file_check.py -- テストクラス本体 - │ │ - │ └─vjsk_load -- 受領データ登録処理関連のユニットテストを格納する - │ │ conftest.py -- テスト内で共通利用できるフィクスチャの宣言 - │ │ test_vjsk_load.py -- テストクラス本体 - │ │ - │ └─testdata -- テストモジュールが使用するテストデータを格納する - │ │ bio_slip_data_202304280000.tsv -- 正常ケースの単体確認用 - │ │ ... -- *20230428* は新規4件の登録確認用 - │ │ whs_mst_202304290000.tsv -- *20230429* は更新2件+追加新規2件の登録確認用 - │ │ - │ ├─NoData -- 正常ケースの単体確認用 - │ │ bio_slip_data_nodatarecord.tsv -- ヘッダ行のみでデータが0件の動作確認用 - │ │ ... - │ │ whs_mst_nodatarecord.tsv - │ │ - │ ├─TestFormatErrorFile -- 異常ケースの単体確認用 - │ │ bio_slip_data_formaterror.tsv -- 末尾行のタブ数が想定と異なる(ファイル欠落がある)ときの動作確認用 - │ │ ... - │ │ whs_mst_formaterror.tsv - │ │ - │ ├─TestImportFileToDb -- 正常ケースの単体確認用 - │ │ bio_slip_data_202304270000.gz -- 対向元システムから送られてきた状態(gz圧縮)の受領データファイルの動作確認用 - │ │ ... - │ │ whs_mst_202304270000.gz - │ │ - │ └─UnzipError -- 異常ケースの単体確認用 - │ bio_slip_data_202304270000.gz -- gz圧縮ファイルが解凍できないときの動作確認用 - │ ... - │ whs_mst_202304270000.gz - │ - ├── conftest.py -- テスト内で共通利用できるフィクスチャを宣言する(執筆時点ではDBのみ) - ├── testing_utility.py -- テストの共通関数 - └── testing_vjsk_utility.py -- テストの共通関数(実消化データ取込処理関連) -``` - diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py deleted file mode 100644 index 62891bf7..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/entrypoint.py +++ /dev/null @@ -1,10 +0,0 @@ -"""実消化&アルトマーク DCF施設削除新規マスタ作成のエントリーポイント""" -from src import jobctrl_jobctrl_dcfInstMergeIo - -if __name__ == '__main__': - try: - exit(jobctrl_jobctrl_dcfInstMergeIo.exec()) - except Exception: - # エラーが起きても、正常系のコードで返す。 - # エラーが起きた事実はbatch_process内でログを出す。 - exit(0) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini deleted file mode 100644 index 5dbe2661..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -log_format = %(levelname)s %(asctime)s %(message)s -log_date_format = %Y-%m-%d %H:%M:%S diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py deleted file mode 100644 index 6203868d..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/aws/s3.py +++ /dev/null @@ -1,185 +0,0 @@ -import gzip -import os -import os.path as path -import shutil -import tempfile - -import boto3 - -from src.system_var import environment - - -class S3Client: - __s3_client = boto3.client('s3') - _bucket_name: str - - def list_objects(self, bucket_name: str, folder_name: str): - response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) - if response['KeyCount'] == 0: - return [] - contents = response['Contents'] - # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする - objects = [{'filename': content['Key'], 'size': content['Size']} - for content in contents if not content['Key'].endswith('/')] - return objects - - def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: - copy_source = {'Bucket': src_bucket, 'Key': src_key} - self.__s3_client.copy(copy_source, dest_bucket, dest_key) - return - - def download_file(self, bucket_name: str, file_key: str, file): - self.__s3_client.download_fileobj( - Bucket=bucket_name, - Key=file_key, - Fileobj=file - ) - return - - def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): - self.__s3_client.upload_file( - local_file_path, - Bucket=bucket_name, - Key=file_key - ) - - def delete_file(self, bucket_name: str, file_key: str): - self.__s3_client.delete_object( - Bucket=bucket_name, - Key=file_key - ) - - -class S3Bucket(): - _s3_client = S3Client() - _bucket_name: str = None - - -class UltmarcBucket(S3Bucket): - _bucket_name = environment.ULTMARC_DATA_BUCKET - _folder = environment.ULTMARC_DATA_FOLDER - - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def download_dat_file(self, dat_filename: str): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, dat_filename, f) - f.seek(0) - return temporary_file_path - - def backup_dat_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - ultmarc_backup_bucket = UltmarcBackupBucket() - backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) - # コピー元のファイルを削除 - self._s3_client.delete_file(self._bucket_name, dat_file_key) - - -class ConfigBucket(S3Bucket): - _bucket_name = environment.JSKULT_CONFIG_BUCKET - - def download_holiday_list(self): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) - holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}' - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, holiday_list_key, f) - f.seek(0) - return temporary_file_path - - def download_wholesaler_stock_input_day_list(self): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) - wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) - f.seek(0) - return temporary_file_path - - def download_ultmarc_hex_convert_config(self): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) - hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}' - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f) - f.seek(0) - return temporary_file_path - - -class JskUltBackupBucket(S3Bucket): - _bucket_name = environment.JSKULT_BACKUP_BUCKET - - -class UltmarcBackupBucket(JskUltBackupBucket): - _folder = environment.ULTMARC_BACKUP_FOLDER - - -class VjskBackupBucket(JskUltBackupBucket): - _folder = environment.VJSK_BACKUP_FOLDER - - -class VjskReceiveBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER - - _s3_file_list = None - - def get_s3_file_list(self): - self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) - return self._s3_file_list - - def download_data_file(self, data_filename: str): - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, data_filename, f) - f.seek(0) - return temporary_file_path - - def unzip_data_file(self, filename: str): - temp_dir = os.path.dirname(filename) - decompress_filename = os.path.basename(filename).replace('.gz', '') - decompress_file_path = os.path.join(temp_dir, decompress_filename) - with gzip.open(filename, 'rb') as gz: - with open(decompress_file_path, 'wb') as decompressed_file: - shutil.copyfileobj(gz, decompressed_file) - - ret = [decompress_file_path] - return ret - - def backup_dat_file(self, target_files: list, datetime_key: str): - jskult_backup_bucket = VjskBackupBucket() - for target_file in target_files: - backup_from_file_path = target_file.get("filename") - backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") - backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' - self._s3_client.copy(self._bucket_name, backup_from_file_path, - jskult_backup_bucket._bucket_name, backup_key) - self._s3_client.delete_file(self._bucket_name, backup_from_file_path) - - -class VjskSendBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _send_folder = environment.VJSK_DATA_SEND_FOLDER - - def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): - # S3バケットにファイルを移動 - csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' - s3_client = S3Client() - s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) - return - - def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - vjsk_backup_bucket = VjskBackupBucket() - dat_key = f'{self._send_folder}/{dat_file_key}' - backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/batch/common/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py deleted file mode 100644 index 5ddaba4e..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/db/database.py +++ /dev/null @@ -1,195 +0,0 @@ -from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, - create_engine, text) -from sqlalchemy.engine.url import URL -from tenacity import retry, stop_after_attempt, wait_exponential - -from src.error.exceptions import DBException -from src.logging.get_logger import get_logger -from src.system_var import environment - -logger = get_logger(__name__) - - -class Database: - """データベース操作クラス""" - __connection: Connection = None - __transactional_engine: Engine = None - __autocommit_engine: Engine = None - __host: str = None - __port: str = None - __username: str = None - __password: str = None - __schema: str = None - __autocommit: bool = None - __connection_string: str = None - - def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: - """このクラスの新たなインスタンスを初期化します - - Args: - username (str): DBユーザー名 - password (str): DBパスワード - host (str): DBホスト名 - port (int): DBポート - schema (str): DBスキーマ名 - autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. - """ - self.__username = username - self.__password = password - self.__host = host - self.__port = int(port) - self.__schema = schema - self.__autocommit = autocommit - - self.__connection_string = URL.create( - drivername='mysql+pymysql', - username=self.__username, - password=self.__password, - host=self.__host, - port=self.__port, - database=self.__schema, - query={"charset": "utf8mb4", "local_infile": "1"}, - ) - - self.__transactional_engine = create_engine( - self.__connection_string, - pool_timeout=5, - poolclass=QueuePool - ) - - self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') - - @classmethod - def get_instance(cls, autocommit=False): - """インスタンスを取得します - - Args: - autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. - Returns: - Database: DB操作クラスインスタンス - """ - return cls( - username=environment.DB_USERNAME, - password=environment.DB_PASSWORD, - host=environment.DB_HOST, - port=environment.DB_PORT, - schema=environment.DB_SCHEMA, - autocommit=autocommit - ) - - @retry( - wait=wait_exponential( - multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, - min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, - max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS - ), - stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT), - retry_error_cls=DBException - ) - def connect(self): - """ - DBに接続します。接続に失敗した場合、リトライします。\n - インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) - Raises: - DBException: 接続失敗 - """ - try: - self.__connection = ( - self.__autocommit_engine.connect() if self.__autocommit is True - else self.__transactional_engine.connect()) - except Exception as e: - raise DBException(e) - - def execute_select(self, select_query: str, parameters=None) -> list[dict]: - """SELECTクエリを実行します。 - - Args: - select_query (str): SELECT文 - parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. - - Raises: - DBException: DBエラー - - Returns: - list[dict]: カラム名: 値の辞書リスト - """ - if self.__connection is None: - raise DBException('DBに接続していません') - - result = None - try: - # トランザクションが開始している場合は、トランザクションを引き継ぐ - if self.__connection.in_transaction(): - result = self.__connection.execute(text(select_query), parameters) - else: - # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 - result = self.__execute_with_transaction(select_query, parameters) - except Exception as e: - raise DBException(f'SQL Error: {e}') - - result_rows = result.mappings().all() - return result_rows - - def execute(self, query: str, parameters=None) -> CursorResult: - """SQLクエリを実行します。 - - Args: - query (str): SQL文 - parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. - - Raises: - DBException: DBエラー - - Returns: - CursorResult: 取得結果 - """ - if self.__connection is None: - raise DBException('DBに接続していません') - - result = None - try: - # トランザクションが開始している場合は、トランザクションを引き継ぐ - if self.__connection.in_transaction(): - result = self.__connection.execute(text(query), parameters) - else: - # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 - result = self.__execute_with_transaction(query, parameters) - except Exception as e: - raise DBException(f'SQL Error: {e}') - - return result - - def begin(self): - """トランザクションを開始します。""" - if not self.__connection.in_transaction(): - self.__connection.begin() - - def commit(self): - """トランザクションをコミットします""" - if self.__connection.in_transaction(): - self.__connection.commit() - - def rollback(self): - """トランザクションをロールバックします""" - if self.__connection.in_transaction(): - self.__connection.rollback() - - def disconnect(self): - """DB接続を切断します。""" - if self.__connection is not None: - self.__connection.close() - self.__connection = None - - def to_jst(self): - self.execute('SET time_zone = "+9:00"') - - def __execute_with_transaction(self, query: str, parameters: dict): - # トランザクションを開始してクエリを実行する - with self.__connection.begin(): - try: - result = self.__connection.execute(text(query), parameters=parameters) - except Exception as e: - self.__connection.rollback() - raise e - # ここでコミットされる - return result diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py deleted file mode 100644 index 055c24f6..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/error/exceptions.py +++ /dev/null @@ -1,10 +0,0 @@ -class MeDaCaException(Exception): - pass - - -class DBException(MeDaCaException): - pass - - -class BatchOperationException(MeDaCaException): - pass diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py deleted file mode 100644 index 9c29840c..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/jobctrl_dcfInstMergeIo.py +++ /dev/null @@ -1,4 +0,0 @@ -"""実消化&アルトマーク DCF施設削除新規マスタ作成""" - -def exec(): - pass \ No newline at end of file diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py deleted file mode 100644 index f36f1199..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/logging/get_logger.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging - -from src.system_var.environment import LOG_LEVEL - -# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する -for name in ["boto3", "botocore", "s3transfer", "urllib3"]: - logging.getLogger(name).setLevel(logging.WARNING) - - -def get_logger(log_name: str) -> logging.Logger: - """一意のログ出力モジュールを取得します。 - - Args: - log_name (str): ロガー名 - - Returns: - _type_: _description_ - """ - logger = logging.getLogger(log_name) - level = logging.getLevelName(LOG_LEVEL) - if not isinstance(level, int): - level = logging.INFO - logger.setLevel(level) - - if not logger.hasHandlers(): - handler = logging.StreamHandler() - logger.addHandler(handler) - - formatter = logging.Formatter( - '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', - '%Y-%m-%d %H:%M:%S' - ) - - for handler in logger.handlers: - handler.setFormatter(formatter) - - return logger diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py deleted file mode 100644 index 8a0ccbb3..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/constants.py +++ /dev/null @@ -1,17 +0,0 @@ -# バッチ正常終了コード -BATCH_EXIT_CODE_SUCCESS = 0 - -# バッチ処理中フラグ:未処理 -BATCH_ACTF_BATCH_UNPROCESSED = '0' -# バッチ処理中フラグ:処理中 -BATCH_ACTF_BATCH_IN_PROCESSING = '1' -# dump取得状態区分:未処理 -DUMP_STATUS_KBN_UNPROCESSED = '0' -# dump取得状態区分:dump取得正常終了 -DUMP_STATUS_KBN_COMPLETE = '2' - -# カレンダーファイルのコメントシンボル -CALENDAR_COMMENT_SYMBOL = '#' - -# 月曜日(datetime.weekday()で月曜日を表す数字) -WEEKDAY_MONDAY = 0 diff --git a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py deleted file mode 100644 index 0af7a118..00000000 --- a/ecs/jskult-batch-dcf-inst-merge-io/jskult-batch-daily/src/system_var/environment.py +++ /dev/null @@ -1,38 +0,0 @@ -import os - -# Database -DB_HOST = os.environ['DB_HOST'] -DB_PORT = int(os.environ['DB_PORT']) -DB_USERNAME = os.environ['DB_USERNAME'] -DB_PASSWORD = os.environ['DB_PASSWORD'] -DB_SCHEMA = os.environ['DB_SCHEMA'] - -# AWS -ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] -ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] -JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] -ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] -VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] -JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] -JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] -VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] -VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] -JSKULT_CONFIG_CONVERT_FOLDER = os.environ['JSKULT_CONFIG_CONVERT_FOLDER'] -JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME = os.environ['JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME'] -VJSK_DATA_RECEIVE_FOLDER = os.environ['VJSK_DATA_RECEIVE_FOLDER'] - -# 初期値がある環境変数 -LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') -DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) -DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) - -# 連携データ抽出期間 -SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) -# 洗替対象テーブル名 -SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] -# 卸実績洗替で作成するデータの期間(年単位) -SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] From cd1e663b4a133daf948114efbd385ba4ad37d656 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Mon, 26 May 2025 19:54:23 +0900 Subject: [PATCH 3/8] =?UTF-8?q?DCF=E6=96=BD=E8=A8=AD=E5=89=8A=E9=99=A4?= =?UTF-8?q?=E6=96=B0=E8=A6=8F=E3=83=9E=E3=82=B9=E3=82=BF=E3=81=AE=E4=BD=9C?= =?UTF-8?q?=E6=88=90=E3=81=A8DCF=E6=96=BD=E8=A8=AD=E7=B5=B1=E5=90=88?= =?UTF-8?q?=E3=83=9E=E3=82=B9=E3=82=BF(DCF=5FINST=5FMERGE)=E3=81=AE?= =?UTF-8?q?=E5=8F=96=E3=82=8A=E8=BE=BC=E3=81=BF=E3=80=81=E3=83=AD=E3=82=B0?= =?UTF-8?q?=E3=81=AE=E5=87=BA=E5=8A=9B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/dcf_inst_merge_io.py | 191 +++++++++++++++++- 1 file changed, 188 insertions(+), 3 deletions(-) diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index 710380c9..fcb104bc 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,10 +1,195 @@ -from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint +import os +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint +from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager +from src.manager.jskult_batch_status_manager import JskultBatchStatusManager +from src.logging.get_logger import get_logger + +logger = get_logger('DCF削除新規マスタ作成') + +LOG_LEVEL = os.environ["LOG_LEVEL"] +PROCESS_NAME = os.environ["PROCESS_NAME"] +POST_PROCESS = os.environ["POST_PROCESS"] +MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] +RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] +JSK_DATA_SEND_FOLDER = os.environ["JSK_DATA_SEND_FOLDER"] +JSK_BACKUP_FOLDER = os.environ["JSK_BACKUP_FOLDER"] +TRANSFER_RESULT_FOLDER = os.environ["TRANSFER_RESULT_FOLDER"] +DCF_INST_MERGE_SEND_FILE_NAME = os.environ["DCF_INST_MERGE_SEND_FILE_NAME"] +DB_CONNECTION_MAX_RETRY_ATTEMPT = os.environ["DB_CONNECTION_MAX_RETRY_ATTEMPT"] +DB_CONNECTION_RETRY_INTERVAL_INIT = os.environ["DB_CONNECTION_RETRY_INTERVAL_INIT"] +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS"] +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS"] class DcfInstMergeIO(JskultBatchEntrypoint): def __init__(self): super().__init__() + def execute(self): - # TODO: ここでDCF削除新規マスタ作成/データ出力処理を実行する - pass + jskultHdkeTblManager = JskultHdkeTblManager() + + if not jskultHdkeTblManager.can_run_process(): + return + + jskultBatchStatusManager = JskultBatchStatusManager( + PROCESS_NAME, + POST_PROCESS, + MAX_RUN_COUNT_FLG, + RECEIVE_FILE_COUNT + ) + + if not jskultBatchStatusManager.can_run_post_process(): + + # 処理ステータスを「処理待」に設定 + jskultBatchStatusManager.set_process_status("retry") + return + + # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 + if jskultBatchStatusManager.is_done_ultmarc_import(): + try: + self._db = Database.get_instance() + self._db.connect() + self._db.begin() + self._db.to_jst() + (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + if is_add_dcf_inst_merge: + _output_add_dcf_inst_merge_log(duplication_inst_records) + + except Exception as e: + self._db.rollback() + raise BatchOperationException(e) + finally: + self._db.disconnect() + + # TODO DCF施設削除新規マスタをS3に出力 + + + + def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: + sql ="""\ + SELECT + ci.DCF_DSF_INST_CD, + ci.FORM_INST_NAME_KANJI, + ci.DELETE_SCHE_REASON_CD, + ci.DUP_OPP_CD, + ci.SYS_UPDATE_DATE + FROM + COM_INST AS ci + WHERE + ci.DUP_OPP_CD IS NOT NULL + AND + ci.DELETE_SCHE_REASON_CD = 'D' + AND + ci.DELETE_DATA IS NULL + AND + ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() + AND + NOT EXISTS ( + SELECT + dim.DCF_INST_CD + FROM + DCF_INST_MERGE AS dim + WHERE + dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + ) + AND + (ci.DCF_DSF_INST_CD EXISTS( + SELECT + mia.INST_CD + FROM + MST_INST_ASSN as mia + WHERE + mia.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + ap.PRSB_INST_CD + FROM + ATC_PHARM AS ap + WHERE + ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + vtrd.INST_CD + FROM + VW_TRN_RESULT_DATA AS vtrd + WHERE + vtrd.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + ; + + """ + duplication_inst_records = self._db.execute_select(sql) + + # DCF施設統合マスタ取り込み + values_clauses = [] + params = {} + for clauses_no, row in enumerate(duplication_inst_records, start=1): + dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" + dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" + values_clause = f"""(:{dcf_inst_cd_arr}, + :{dup_opp_cd_arr}, + DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), + NULL, + NULL, + NULL, + "Y", + batchuser, + SYSDATE(), + batchuser, + SYSDATE() + )""" + values_clauses.append(values_clause) + params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] + params[dup_opp_cd_arr] = row['DUP_OPP_CD'] + insert_sql = f""" + INSERT INTO + src07.dcf_inst_merge ( + DCF_INST_CD, + DUP_OPP_CD, + START_MONTH, + INVALID_FLG, + REMARKS, + DCF_INST_CD_NEW, + ENABLED_FLG, + CREATER, + CREATE_DATE, + UPDATER, + UPDATE_DATE + ) + VALUES + {','.join(values_clauses)} + """ + return (True, duplication_inst_records) + + + def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): + sys_update_date = duplication_inst_records[0]['sys_update_date'] + set_year_month = '{set_year}年{set_month}月'.format( + set_year=sys_update_date[0:4], + set_month=sys_update_date[-2:] + ) + + add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' + add_dct_inst_merge_list = [] + for row in duplication_inst_records: + add_dct_inst_merge_list.append(add_dct_inst_merge.format(**row)) + add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) + + # 顧客報告用にログ出力 + logger.info( + f"""DCF削除新規マスタが追加されました。 +********************************************************** +適用月度 {set_year_month} +********************************************************** +{add_dct_inst_merge_list} +********************************************************** +合計 {len(duplication_inst_records)}件""" + ) + return \ No newline at end of file From e50b827d9f0d36d3770edbde3665044810aab50e Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Tue, 27 May 2025 16:38:47 +0900 Subject: [PATCH 4/8] =?UTF-8?q?=E5=8B=95=E4=BD=9C=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E5=89=8D=E3=82=B3=E3=83=9F=E3=83=83=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/src/aws/s3.py | 95 +----- .../src/batch/common/batch_context.py | 48 +++ .../src/batch/dcf_inst_merge_io.py | 285 ++++++++++-------- 3 files changed, 226 insertions(+), 202 deletions(-) create mode 100644 ecs/jskult-batch/src/batch/common/batch_context.py diff --git a/ecs/jskult-batch/src/aws/s3.py b/ecs/jskult-batch/src/aws/s3.py index 6203868d..f70cc049 100644 --- a/ecs/jskult-batch/src/aws/s3.py +++ b/ecs/jskult-batch/src/aws/s3.py @@ -54,33 +54,8 @@ class S3Bucket(): _s3_client = S3Client() _bucket_name: str = None - -class UltmarcBucket(S3Bucket): - _bucket_name = environment.ULTMARC_DATA_BUCKET - _folder = environment.ULTMARC_DATA_FOLDER - - def list_dat_file(self): - return self._s3_client.list_objects(self._bucket_name, self._folder) - - def download_dat_file(self, dat_filename: str): - # 一時ファイルとして保存する - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{dat_filename.replace(f"{self._folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, dat_filename, f) - f.seek(0) - return temporary_file_path - - def backup_dat_file(self, dat_file_key: str, datetime_key: str): - # バックアップバケットにコピー - ultmarc_backup_bucket = UltmarcBackupBucket() - backup_key = f'{ultmarc_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_file_key, ultmarc_backup_bucket._bucket_name, backup_key) - # コピー元のファイルを削除 - self._s3_client.delete_file(self._bucket_name, dat_file_key) - - class ConfigBucket(S3Bucket): + # TODO 日付更新処理で内容の修正を行う _bucket_name = environment.JSKULT_CONFIG_BUCKET def download_holiday_list(self): @@ -118,68 +93,24 @@ class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET -class UltmarcBackupBucket(JskUltBackupBucket): - _folder = environment.ULTMARC_BACKUP_FOLDER - -class VjskBackupBucket(JskUltBackupBucket): +# TODO 設定値をecsタスク定義書から確認 +class JskBackupBucket(JskUltBackupBucket): _folder = environment.VJSK_BACKUP_FOLDER +class JskSendBucket(S3Bucket): + _bucket_name = environment.JSKULT_DATA_BUCKET + _send_folder = environment.JSKULT_DATA_SEND_FOLDER -class VjskReceiveBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER - - _s3_file_list = None - - def get_s3_file_list(self): - self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) - return self._s3_file_list - - def download_data_file(self, data_filename: str): - temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') - with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, data_filename, f) - f.seek(0) - return temporary_file_path - - def unzip_data_file(self, filename: str): - temp_dir = os.path.dirname(filename) - decompress_filename = os.path.basename(filename).replace('.gz', '') - decompress_file_path = os.path.join(temp_dir, decompress_filename) - with gzip.open(filename, 'rb') as gz: - with open(decompress_file_path, 'wb') as decompressed_file: - shutil.copyfileobj(gz, decompressed_file) - - ret = [decompress_file_path] - return ret - - def backup_dat_file(self, target_files: list, datetime_key: str): - jskult_backup_bucket = VjskBackupBucket() - for target_file in target_files: - backup_from_file_path = target_file.get("filename") - backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") - backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' - self._s3_client.copy(self._bucket_name, backup_from_file_path, - jskult_backup_bucket._bucket_name, backup_key) - self._s3_client.delete_file(self._bucket_name, backup_from_file_path) - - -class VjskSendBucket(S3Bucket): - _bucket_name = environment.VJSK_DATA_BUCKET - _send_folder = environment.VJSK_DATA_SEND_FOLDER - - def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + def upload_dcf_inst_merge_csv_file(self, jskult_create_csv: str, csv_file_path: str): # S3バケットにファイルを移動 - csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' - s3_client = S3Client() - s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + csv_file_name = f'{self._send_folder}/{jskult_create_csv}' + self._s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) return - def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): + def backup_dcf_inst_merge_csv_file(self, dat_file_key: str, datetime_key: str): # バックアップバケットにコピー - vjsk_backup_bucket = VjskBackupBucket() + jskult_backup_bucket = JskUltBackupBucket() dat_key = f'{self._send_folder}/{dat_file_key}' - backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) + backup_key = f'{jskult_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, jskult_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch/src/batch/common/batch_context.py b/ecs/jskult-batch/src/batch/common/batch_context.py new file mode 100644 index 00000000..b3fc4967 --- /dev/null +++ b/ecs/jskult-batch/src/batch/common/batch_context.py @@ -0,0 +1,48 @@ +class BatchContext: + __instance = None + __syor_date: str # 処理日(yyyy/mm/dd形式) + __is_not_business_day: bool # 日次バッチ起動日フラグ + __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ + __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ + + def __init__(self) -> None: + self.__is_not_business_day = False + self.__is_ultmarc_imported = False + + @classmethod + def get_instance(cls): + if cls.__instance is None: + cls.__instance = cls() + return cls.__instance + + @property + def syor_date(self): + return self.__syor_date + + @syor_date.setter + def syor_date(self, syor_date_str: str): + self.__syor_date = syor_date_str + + @property + def is_not_business_day(self): + return self.__is_not_business_day + + @is_not_business_day.setter + def is_not_business_day(self, flag: bool): + self.__is_not_business_day = flag + + @property + def is_ultmarc_imported(self): + return self.__is_ultmarc_imported + + @is_ultmarc_imported.setter + def is_ultmarc_imported(self, flag: bool): + self.__is_ultmarc_imported = flag + + @property + def is_vjsk_stock_import_day(self): + return self.__is_vjsk_stock_import_day + + @is_vjsk_stock_import_day.setter + def is_vjsk_stock_import_day(self, flag: bool): + self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index fcb104bc..5a0199f0 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,6 +1,12 @@ import os +import csv +import os.path as path +import tempfile +from src.aws.s3 import JskSendBucket + from src.db.database import Database from src.error.exceptions import BatchOperationException +from src.batch.common.batch_context import BatchContext from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager @@ -9,19 +15,11 @@ from src.logging.get_logger import get_logger logger = get_logger('DCF削除新規マスタ作成') -LOG_LEVEL = os.environ["LOG_LEVEL"] PROCESS_NAME = os.environ["PROCESS_NAME"] POST_PROCESS = os.environ["POST_PROCESS"] MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] -JSK_DATA_SEND_FOLDER = os.environ["JSK_DATA_SEND_FOLDER"] -JSK_BACKUP_FOLDER = os.environ["JSK_BACKUP_FOLDER"] -TRANSFER_RESULT_FOLDER = os.environ["TRANSFER_RESULT_FOLDER"] -DCF_INST_MERGE_SEND_FILE_NAME = os.environ["DCF_INST_MERGE_SEND_FILE_NAME"] -DB_CONNECTION_MAX_RETRY_ATTEMPT = os.environ["DB_CONNECTION_MAX_RETRY_ATTEMPT"] -DB_CONNECTION_RETRY_INTERVAL_INIT = os.environ["DB_CONNECTION_RETRY_INTERVAL_INIT"] -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS"] -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = os.environ["DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS"] +CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] class DcfInstMergeIO(JskultBatchEntrypoint): def __init__(self): @@ -49,125 +47,135 @@ class DcfInstMergeIO(JskultBatchEntrypoint): # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 if jskultBatchStatusManager.is_done_ultmarc_import(): + + (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + if is_add_dcf_inst_merge: + + # COM_施設からDCF削除新規マスタに登録 + _output_add_dcf_inst_merge_log(duplication_inst_records) + + # CSV出力 + file_path = _make_csv_data(CSV_FILE_NAME) + + # CSVをS3にアップロード + _upload_dcf_inst_merge_csv_file(CSV_FILE_NAME, file_path) + + def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: + # com_instからdcf_inst_mergeにinsert try: self._db = Database.get_instance() self._db.connect() self._db.begin() self._db.to_jst() - (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) - if is_add_dcf_inst_merge: - _output_add_dcf_inst_merge_log(duplication_inst_records) + sql ="""\ + SELECT + ci.DCF_DSF_INST_CD, + ci.FORM_INST_NAME_KANJI, + ci.DELETE_SCHE_REASON_CD, + ci.DUP_OPP_CD, + ci.SYS_UPDATE_DATE + FROM + src05.COM_INST AS ci + WHERE + ci.DUP_OPP_CD IS NOT NULL + AND + ci.DELETE_SCHE_REASON_CD = 'D' + AND + ci.DELETE_DATA IS NULL + AND + ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() + AND + NOT EXISTS ( + SELECT + dim.DCF_INST_CD + FROM + src07.DCF_INST_MERGE AS dim + WHERE + dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + ) + AND + + (ci.DCF_DSF_INST_CD EXISTS( + SELECT + mia.INST_CD + FROM + src07.MST_INST_ASSN as mia + WHERE + mia.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + ap.PRSB_INST_CD + FROM + src07.ATC_PHARM AS ap + WHERE + ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD + ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + trd.INST_CD + FROM + src07.TRN_RESULT_DATA AS trd + WHERE + trd.INST_CD = ci.DCF_DSF_INST_CD + ) + ) + ; + + """ + duplication_inst_records = self._db.execute_select(sql) + + # DCF削除新規マスタ取り込み + values_clauses = [] + params = {} + for clauses_no, row in enumerate(duplication_inst_records, start=1): + dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" + dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" + values_clause = f"""(:{dcf_inst_cd_arr}, + :{dup_opp_cd_arr}, + DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), + NULL, + NULL, + NULL, + "Y", + batchuser, + SYSDATE(), + batchuser, + SYSDATE() + )""" + values_clauses.append(values_clause) + params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] + params[dup_opp_cd_arr] = row['DUP_OPP_CD'] + insert_sql = f""" + INSERT INTO + src07.dcf_inst_merge ( + DCF_INST_CD, + DUP_OPP_CD, + START_MONTH, + INVALID_FLG, + REMARKS, + DCF_INST_CD_NEW, + ENABLED_FLG, + CREATER, + CREATE_DATE, + UPDATER, + UPDATE_DATE + ) + VALUES + {','.join(values_clauses)} + """ + + self._db.execute(insert_sql, params) + + return (True, duplication_inst_records) except Exception as e: self._db.rollback() raise BatchOperationException(e) finally: self._db.disconnect() - # TODO DCF施設削除新規マスタをS3に出力 - - - - def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: - sql ="""\ - SELECT - ci.DCF_DSF_INST_CD, - ci.FORM_INST_NAME_KANJI, - ci.DELETE_SCHE_REASON_CD, - ci.DUP_OPP_CD, - ci.SYS_UPDATE_DATE - FROM - COM_INST AS ci - WHERE - ci.DUP_OPP_CD IS NOT NULL - AND - ci.DELETE_SCHE_REASON_CD = 'D' - AND - ci.DELETE_DATA IS NULL - AND - ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() - AND - NOT EXISTS ( - SELECT - dim.DCF_INST_CD - FROM - DCF_INST_MERGE AS dim - WHERE - dim.DCF_INST_CD = ci.DCF_DSF_INST_CD - ) - AND - (ci.DCF_DSF_INST_CD EXISTS( - SELECT - mia.INST_CD - FROM - MST_INST_ASSN as mia - WHERE - mia.INST_CD = ci.DCF_DSF_INST_CD - ) - ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - ap.PRSB_INST_CD - FROM - ATC_PHARM AS ap - WHERE - ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD - ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - vtrd.INST_CD - FROM - VW_TRN_RESULT_DATA AS vtrd - WHERE - vtrd.INST_CD = ci.DCF_DSF_INST_CD - ) - ) - ; - - """ - duplication_inst_records = self._db.execute_select(sql) - - # DCF施設統合マスタ取り込み - values_clauses = [] - params = {} - for clauses_no, row in enumerate(duplication_inst_records, start=1): - dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" - dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" - values_clause = f"""(:{dcf_inst_cd_arr}, - :{dup_opp_cd_arr}, - DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), - NULL, - NULL, - NULL, - "Y", - batchuser, - SYSDATE(), - batchuser, - SYSDATE() - )""" - values_clauses.append(values_clause) - params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] - params[dup_opp_cd_arr] = row['DUP_OPP_CD'] - insert_sql = f""" - INSERT INTO - src07.dcf_inst_merge ( - DCF_INST_CD, - DUP_OPP_CD, - START_MONTH, - INVALID_FLG, - REMARKS, - DCF_INST_CD_NEW, - ENABLED_FLG, - CREATER, - CREATE_DATE, - UPDATER, - UPDATE_DATE - ) - VALUES - {','.join(values_clauses)} - """ - return (True, duplication_inst_records) - def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): sys_update_date = duplication_inst_records[0]['sys_update_date'] @@ -181,15 +189,52 @@ class DcfInstMergeIO(JskultBatchEntrypoint): for row in duplication_inst_records: add_dct_inst_merge_list.append(add_dct_inst_merge.format(**row)) add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) - # 顧客報告用にログ出力 logger.info( - f"""DCF削除新規マスタが追加されました。 + f"""DCF施設統合マスタが追加されました。 ********************************************************** 適用月度 {set_year_month} ********************************************************** {add_dct_inst_merge_list} ********************************************************** 合計 {len(duplication_inst_records)}件""" - ) - return \ No newline at end of file + ) + return + + + def _make_csv_data(record_inst: list, csv_file_name: str): + # CSVファイルを作成する + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, csv_file_name) + + head_str = ['DCF_INST_CD','DUP_OPP_CD','START_MONTH', + 'INVALID_FLG','REMARKS','DCF_INST_CD_NEW','ENABLED_FLG', + 'CREATER','CREATE_DATE','UPDATER','UPDATE_DATE'] + + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") + + # Shift-JIS、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = ['' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) + + return csv_file_path + + def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): + # S3バケットにファイルを移動 + jsk_send_bucket = JskSendBucket() + # バッチ共通設定を取得 + batch_context = BatchContext.get_instance() + + jsk_send_bucket.upload_dcf_inst_merge_csv_file(csv_file_name, csv_file_path) + jsk_send_bucket.backup_dcf_inst_merge_csv_file(csv_file_name, batch_context.syor_date) + return \ No newline at end of file From 0086486841576d3f50a301814fc2aff4e09d7614 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Tue, 27 May 2025 20:14:03 +0900 Subject: [PATCH 5/8] =?UTF-8?q?=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C=20entr?= =?UTF-8?q?ypoint=E5=AE=9F=E8=A1=8C=E6=B8=88=20DB=E6=8E=A5=E7=B6=9A?= =?UTF-8?q?=E3=81=AE=E9=83=A8=E5=88=86=E3=81=AF=E6=9C=AA=E7=A2=BA=E8=AA=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/.vscode/launch.json | 3 +- ecs/jskult-batch/src/aws/s3.py | 37 ++-- .../src/batch/dcf_inst_merge_io.py | 176 +++++++++++------- .../src/batch/jskult_batch_entrypoint.py | 2 +- ecs/jskult-batch/src/error/exceptions.py | 4 + .../src/system_var/environment.py | 23 ++- ecs/jskult-batch/test.py | 0 7 files changed, 159 insertions(+), 86 deletions(-) create mode 100644 ecs/jskult-batch/test.py diff --git a/ecs/jskult-batch/.vscode/launch.json b/ecs/jskult-batch/.vscode/launch.json index bcd1c6dd..8bbb94a0 100644 --- a/ecs/jskult-batch/.vscode/launch.json +++ b/ecs/jskult-batch/.vscode/launch.json @@ -10,7 +10,8 @@ "request": "launch", "program": "entrypoint.py", "console": "integratedTerminal", - "justMyCode": true + "justMyCode": true, + "envFile": "${workspaceFolder}/.env" } ] } \ No newline at end of file diff --git a/ecs/jskult-batch/src/aws/s3.py b/ecs/jskult-batch/src/aws/s3.py index f70cc049..66032e1c 100644 --- a/ecs/jskult-batch/src/aws/s3.py +++ b/ecs/jskult-batch/src/aws/s3.py @@ -14,7 +14,8 @@ class S3Client: _bucket_name: str def list_objects(self, bucket_name: str, folder_name: str): - response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + response = self.__s3_client.list_objects_v2( + Bucket=bucket_name, Prefix=folder_name) if response['KeyCount'] == 0: return [] contents = response['Contents'] @@ -54,6 +55,7 @@ class S3Bucket(): _s3_client = S3Client() _bucket_name: str = None + class ConfigBucket(S3Bucket): # TODO 日付更新処理で内容の修正を行う _bucket_name = environment.JSKULT_CONFIG_BUCKET @@ -61,30 +63,36 @@ class ConfigBucket(S3Bucket): def download_holiday_list(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) + temporary_file_path = path.join( + temporary_dir, environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME) holiday_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, holiday_list_key, f) + self._s3_client.download_file( + self._bucket_name, holiday_list_key, f) f.seek(0) return temporary_file_path def download_wholesaler_stock_input_day_list(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + temporary_file_path = path.join( + temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) + self._s3_client.download_file( + self._bucket_name, wholesaler_stock_input_day_list_key, f) f.seek(0) return temporary_file_path def download_ultmarc_hex_convert_config(self): # 一時ファイルとして保存する temporary_dir = tempfile.mkdtemp() - temporary_file_path = path.join(temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) + temporary_file_path = path.join( + temporary_dir, environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME) hex_convert_config_key = f'{environment.JSKULT_CONFIG_CONVERT_FOLDER}/{environment.JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME}' with open(temporary_file_path, mode='wb') as f: - self._s3_client.download_file(self._bucket_name, hex_convert_config_key, f) + self._s3_client.download_file( + self._bucket_name, hex_convert_config_key, f) f.seek(0) return temporary_file_path @@ -93,19 +101,19 @@ class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET - -# TODO 設定値をecsタスク定義書から確認 class JskBackupBucket(JskUltBackupBucket): - _folder = environment.VJSK_BACKUP_FOLDER + _folder = environment.JSKULT_BACKUP_BUCKET + class JskSendBucket(S3Bucket): - _bucket_name = environment.JSKULT_DATA_BUCKET - _send_folder = environment.JSKULT_DATA_SEND_FOLDER + _bucket_name = environment.JSK_IO_BUCKET + _send_folder = environment.JSK_DATA_SEND_FOLDER def upload_dcf_inst_merge_csv_file(self, jskult_create_csv: str, csv_file_path: str): # S3バケットにファイルを移動 csv_file_name = f'{self._send_folder}/{jskult_create_csv}' - self._s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + self._s3_client.upload_file( + csv_file_path, self._bucket_name, csv_file_name) return def backup_dcf_inst_merge_csv_file(self, dat_file_key: str, datetime_key: str): @@ -113,4 +121,5 @@ class JskSendBucket(S3Bucket): jskult_backup_bucket = JskUltBackupBucket() dat_key = f'{self._send_folder}/{dat_file_key}' backup_key = f'{jskult_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' - self._s3_client.copy(self._bucket_name, dat_key, jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.copy(self._bucket_name, dat_key, + jskult_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index 5a0199f0..e3e7ed12 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,64 +1,110 @@ -import os import csv import os.path as path import tempfile + from src.aws.s3 import JskSendBucket - -from src.db.database import Database -from src.error.exceptions import BatchOperationException from src.batch.common.batch_context import BatchContext - from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint +from src.db.database import Database +from src.error.exceptions import BatchOperationException, MaxRunCountReachedException +from src.manager.jskult_batch_run_manager import JskultBatchRunManager from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager from src.manager.jskult_batch_status_manager import JskultBatchStatusManager +from src.system_var import environment from src.logging.get_logger import get_logger logger = get_logger('DCF削除新規マスタ作成') -PROCESS_NAME = os.environ["PROCESS_NAME"] -POST_PROCESS = os.environ["POST_PROCESS"] -MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] -RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] -CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] class DcfInstMergeIO(JskultBatchEntrypoint): def __init__(self): super().__init__() - def execute(self): - jskultHdkeTblManager = JskultHdkeTblManager() - - if not jskultHdkeTblManager.can_run_process(): - return - + jskultBatchRunManager = JskultBatchRunManager( + environment.BATCH_EXECUTION_ID) jskultBatchStatusManager = JskultBatchStatusManager( - PROCESS_NAME, - POST_PROCESS, - MAX_RUN_COUNT_FLG, - RECEIVE_FILE_COUNT + environment.PROCESS_NAME, + environment.POST_PROCESS, + environment.MAX_RUN_COUNT_FLG, + environment.RECEIVE_FILE_COUNT ) - if not jskultBatchStatusManager.can_run_post_process(): + try: + jskultHdkeTblManager = JskultHdkeTblManager() - # 処理ステータスを「処理待」に設定 - jskultBatchStatusManager.set_process_status("retry") - return - - # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 - if jskultBatchStatusManager.is_done_ultmarc_import(): - - (is_add_dcf_inst_merge, duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + if not jskultHdkeTblManager.can_run_process(): + logger.error( + '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') + return + + jskultBatchStatusManager.set_process_status("start") + try: + if not jskultBatchStatusManager.can_run_post_process(): + # リトライ判断された場合 + # 処理ステータスを「処理待」に設定 + jskultBatchStatusManager.set_process_status("waiting") + + # バッチ実行管理テーブルに「retry」で登録 + jskultBatchRunManager.batch_retry() + + return + except MaxRunCountReachedException as e: + logger.info('最大起動回数に到達したため、DCF削除新規マスタ作成処理を実行します。') + + jskultBatchStatusManager.set_process_status("doing") + + # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 + if jskultBatchStatusManager.is_done_ultmarc_import(): + + # + (is_add_dcf_inst_merge, + duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) if is_add_dcf_inst_merge: - + # COM_施設からDCF削除新規マスタに登録 _output_add_dcf_inst_merge_log(duplication_inst_records) - - # CSV出力 - file_path = _make_csv_data(CSV_FILE_NAME) + dcf_inst_merge_all_records = _select_dcf_inst_merge_all() + # CSV出力 + file_path = _make_csv_data( + dcf_inst_merge_all_records, environment.CSV_FILE_NAME) - # CSVをS3にアップロード - _upload_dcf_inst_merge_csv_file(CSV_FILE_NAME, file_path) + # CSVをS3にアップロード + _upload_dcf_inst_merge_csv_file( + file_path, environment.CSV_FILE_NAME) + + # 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録 + logger.info("DCF削除新規マスタ作成処理を正常終了します。") + + jskultBatchRunManager.batch_success() + jskultBatchStatusManager.set_process_status("done") + + except: + # 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録 + logger.error("エラーが発生したため、DCF削除新規マスタ作成処理を終了します。") + jskultBatchRunManager.batch_failed() + jskultBatchStatusManager.set_process_status("failed") + + def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: + try: + self._db = Database.get_instance() + self._db.connect() + self._db.begin() + self._db.to_jst() + sql = """\ + SELECT + * + FROM + src07.dcf_inst_merge + """ + dcf_inst_merge_all_records = self._db.execute_select(sql) + return dcf_inst_merge_all_records + + except Exception as e: + self._db.rollback() + raise BatchOperationException(e) + finally: + self._db.disconnect() def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: # com_instからdcf_inst_mergeにinsert @@ -68,7 +114,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): self._db.begin() self._db.to_jst() - sql ="""\ + sql = """\ SELECT ci.DCF_DSF_INST_CD, ci.FORM_INST_NAME_KANJI, @@ -176,18 +222,18 @@ class DcfInstMergeIO(JskultBatchEntrypoint): finally: self._db.disconnect() - def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): sys_update_date = duplication_inst_records[0]['sys_update_date'] set_year_month = '{set_year}年{set_month}月'.format( set_year=sys_update_date[0:4], set_month=sys_update_date[-2:] - ) + ) add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' add_dct_inst_merge_list = [] for row in duplication_inst_records: - add_dct_inst_merge_list.append(add_dct_inst_merge.format(**row)) + add_dct_inst_merge_list.append( + add_dct_inst_merge.format(**row)) add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) # 顧客報告用にログ出力 logger.info( @@ -201,40 +247,42 @@ class DcfInstMergeIO(JskultBatchEntrypoint): ) return + def _make_csv_data(csv_file_name: str, record_inst: list): + # CSVファイルを作成する + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, csv_file_name) - def _make_csv_data(record_inst: list, csv_file_name: str): - # CSVファイルを作成する - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, csv_file_name) + head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH', + 'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG', + 'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE'] - head_str = ['DCF_INST_CD','DUP_OPP_CD','START_MONTH', - 'INVALID_FLG','REMARKS','DCF_INST_CD_NEW','ENABLED_FLG', - 'CREATER','CREATE_DATE','UPDATER','UPDATE_DATE'] + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") - with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: - # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) - csv_file.write(f"{','.join(head_str)}\n") + # Shift-JIS、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) - # Shift-JIS、CRLF、価囲いありで書き込む - writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', - quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, - strict=True - ) + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = [ + '' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) - # データ部分書き込み(施設) - for record_inst_data in record_inst: - record_inst_value = list(record_inst_data.values()) - csv_data = ['' if n is None else n for n in record_inst_value] - writer.writerow(csv_data) + return csv_file_path - return csv_file_path - def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): # S3バケットにファイルを移動 jsk_send_bucket = JskSendBucket() # バッチ共通設定を取得 batch_context = BatchContext.get_instance() - jsk_send_bucket.upload_dcf_inst_merge_csv_file(csv_file_name, csv_file_path) - jsk_send_bucket.backup_dcf_inst_merge_csv_file(csv_file_name, batch_context.syor_date) - return \ No newline at end of file + jsk_send_bucket.upload_dcf_inst_merge_csv_file( + csv_file_name, csv_file_path) + jsk_send_bucket.backup_dcf_inst_merge_csv_file( + csv_file_name, batch_context.syor_date) + return diff --git a/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py b/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py index 291a8d1f..47f34952 100644 --- a/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py +++ b/ecs/jskult-batch/src/batch/jskult_batch_entrypoint.py @@ -3,6 +3,6 @@ import abc class JskultBatchEntrypoint(metaclass=abc.ABCMeta): - @abc.abstractmethod() + @abc.abstractmethod def execute(self): pass diff --git a/ecs/jskult-batch/src/error/exceptions.py b/ecs/jskult-batch/src/error/exceptions.py index 055c24f6..aa5f9be6 100644 --- a/ecs/jskult-batch/src/error/exceptions.py +++ b/ecs/jskult-batch/src/error/exceptions.py @@ -8,3 +8,7 @@ class DBException(MeDaCaException): class BatchOperationException(MeDaCaException): pass + + +class MaxRunCountReachedException(MeDaCaException): + pass diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index e70d8bb4..2d5d5f41 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -6,13 +6,24 @@ DB_PORT = int(os.environ['DB_PORT']) DB_USERNAME = os.environ['DB_USERNAME'] DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] - -# 処理名 +JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] +POST_PROCESS = os.environ["POST_PROCESS"] +MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] +RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] +CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] +JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] +JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET'] +JSK_DATA_SEND_FOLDER = os.environ['JSK_DATA_SEND_FOLDER'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') -DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) -DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) -DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) -DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) +DB_CONNECTION_MAX_RETRY_ATTEMPT = int( + os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int( + os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int( + os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int( + os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) diff --git a/ecs/jskult-batch/test.py b/ecs/jskult-batch/test.py new file mode 100644 index 00000000..e69de29b From d243768e5858400f6341bdd1d6730f23971ed682 Mon Sep 17 00:00:00 2001 From: "mori.k" Date: Wed, 28 May 2025 12:09:43 +0900 Subject: [PATCH 6/8] =?UTF-8?q?=E3=83=AC=E3=83=93=E3=83=A5=E3=83=BC?= =?UTF-8?q?=E6=8C=87=E6=91=98=E5=AF=BE=E5=BF=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/batch/common/batch_context.py | 48 --- .../src/batch/dcf_inst_merge_io.py | 403 +++++++++--------- .../src/system_var/environment.py | 4 +- 3 files changed, 202 insertions(+), 253 deletions(-) delete mode 100644 ecs/jskult-batch/src/batch/common/batch_context.py diff --git a/ecs/jskult-batch/src/batch/common/batch_context.py b/ecs/jskult-batch/src/batch/common/batch_context.py deleted file mode 100644 index b3fc4967..00000000 --- a/ecs/jskult-batch/src/batch/common/batch_context.py +++ /dev/null @@ -1,48 +0,0 @@ -class BatchContext: - __instance = None - __syor_date: str # 処理日(yyyy/mm/dd形式) - __is_not_business_day: bool # 日次バッチ起動日フラグ - __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ - __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ - - def __init__(self) -> None: - self.__is_not_business_day = False - self.__is_ultmarc_imported = False - - @classmethod - def get_instance(cls): - if cls.__instance is None: - cls.__instance = cls() - return cls.__instance - - @property - def syor_date(self): - return self.__syor_date - - @syor_date.setter - def syor_date(self, syor_date_str: str): - self.__syor_date = syor_date_str - - @property - def is_not_business_day(self): - return self.__is_not_business_day - - @is_not_business_day.setter - def is_not_business_day(self, flag: bool): - self.__is_not_business_day = flag - - @property - def is_ultmarc_imported(self): - return self.__is_ultmarc_imported - - @is_ultmarc_imported.setter - def is_ultmarc_imported(self, flag: bool): - self.__is_ultmarc_imported = flag - - @property - def is_vjsk_stock_import_day(self): - return self.__is_vjsk_stock_import_day - - @is_vjsk_stock_import_day.setter - def is_vjsk_stock_import_day(self, flag: bool): - self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index e3e7ed12..a7298153 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -2,8 +2,7 @@ import csv import os.path as path import tempfile -from src.aws.s3 import JskSendBucket -from src.batch.common.batch_context import BatchContext +from src.aws.s3 import S3Client, JskSendBucket from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint from src.db.database import Database from src.error.exceptions import BatchOperationException, MaxRunCountReachedException @@ -23,16 +22,24 @@ class DcfInstMergeIO(JskultBatchEntrypoint): def execute(self): jskultBatchRunManager = JskultBatchRunManager( environment.BATCH_EXECUTION_ID) + jskultHdkeTblManager = JskultHdkeTblManager() + + # /transfer_result/yyyy/mm/dd/ + jskult_backuo_folder_name = f"""/transfer_result/{jskultHdkeTblManager.get_batch_statuses()[2]}""" + + receive_file_count = S3Client.list_objects( + environment.JSKULT_BACKUP_BUCKET, jskult_backuo_folder_name).count() + jskultBatchStatusManager = JskultBatchStatusManager( environment.PROCESS_NAME, + + # TODO チケットNEWDWH2021-1847の実装で作成した定数に置き換え environment.POST_PROCESS, - environment.MAX_RUN_COUNT_FLG, - environment.RECEIVE_FILE_COUNT + environment.MAX_RUN_COUNT, + receive_file_count ) try: - jskultHdkeTblManager = JskultHdkeTblManager() - if not jskultHdkeTblManager.can_run_process(): logger.error( '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') @@ -41,7 +48,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): jskultBatchStatusManager.set_process_status("start") try: if not jskultBatchStatusManager.can_run_post_process(): - # リトライ判断された場合 + # 後続処理の起動条件を満たしていない場合 # 処理ステータスを「処理待」に設定 jskultBatchStatusManager.set_process_status("waiting") @@ -57,21 +64,23 @@ class DcfInstMergeIO(JskultBatchEntrypoint): # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 if jskultBatchStatusManager.is_done_ultmarc_import(): - # + # COM_施設からDCF削除新規マスタに登録 (is_add_dcf_inst_merge, - duplication_inst_records) = _insert_dcf_inst_merge_from_com_inst(self) + duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(self) if is_add_dcf_inst_merge: - # COM_施設からDCF削除新規マスタに登録 - _output_add_dcf_inst_merge_log(duplication_inst_records) - dcf_inst_merge_all_records = _select_dcf_inst_merge_all() + self._output_add_dcf_inst_merge_log( + duplication_inst_records) + dcf_inst_merge_all_records = self._select_dcf_inst_merge_all() # CSV出力 - file_path = _make_csv_data( - dcf_inst_merge_all_records, environment.CSV_FILE_NAME) + file_path = self._make_csv_data( + environment.DCF_INST_MERGE_SEND_FILE_NAME, + dcf_inst_merge_all_records) # CSVをS3にアップロード - _upload_dcf_inst_merge_csv_file( - file_path, environment.CSV_FILE_NAME) + + self._upload_dcf_inst_merge_csv_file( + file_path, environment.DCF_INST_MERGE_SEND_FILE_NAME) # 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録 logger.info("DCF削除新規マスタ作成処理を正常終了します。") @@ -79,210 +88,198 @@ class DcfInstMergeIO(JskultBatchEntrypoint): jskultBatchRunManager.batch_success() jskultBatchStatusManager.set_process_status("done") - except: + except Exception as e: # 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録 - logger.error("エラーが発生したため、DCF削除新規マスタ作成処理を終了します。") + logger.exception(f'予期せぬエラーが発生したため、DCF削除新規マスタ作成処理を終了します。{e}') + jskultBatchRunManager.batch_failed() jskultBatchStatusManager.set_process_status("failed") - def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: - try: - self._db = Database.get_instance() - self._db.connect() - self._db.begin() - self._db.to_jst() - sql = """\ - SELECT - * - FROM - src07.dcf_inst_merge - """ - dcf_inst_merge_all_records = self._db.execute_select(sql) - return dcf_inst_merge_all_records + def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: + try: + self._db = Database.get_instance() + self._db.connect() + sql = """\ + SELECT + * + FROM + src07.dcf_inst_merge + """ + dcf_inst_merge_all_records = self._db.execute_select(sql) + return dcf_inst_merge_all_records + except Exception as e: + raise BatchOperationException(e) + finally: + self._db.disconnect() - except Exception as e: - self._db.rollback() - raise BatchOperationException(e) - finally: - self._db.disconnect() + # com_instからdcf_inst_mergeにinsert + def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: - def _insert_dcf_inst_merge_from_com_inst(self) -> tuple[bool, list[dict]]: - # com_instからdcf_inst_mergeにinsert - try: - self._db = Database.get_instance() - self._db.connect() - self._db.begin() - self._db.to_jst() - - sql = """\ - SELECT - ci.DCF_DSF_INST_CD, - ci.FORM_INST_NAME_KANJI, - ci.DELETE_SCHE_REASON_CD, - ci.DUP_OPP_CD, - ci.SYS_UPDATE_DATE - FROM - src05.COM_INST AS ci - WHERE - ci.DUP_OPP_CD IS NOT NULL - AND - ci.DELETE_SCHE_REASON_CD = 'D' - AND - ci.DELETE_DATA IS NULL - AND - ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() - AND - NOT EXISTS ( - SELECT - dim.DCF_INST_CD - FROM - src07.DCF_INST_MERGE AS dim - WHERE - dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + try: + self._db = Database.get_instance() + self._db.connect() + self._db.begin() + self._db.to_jst() + sql = """\ + SELECT + ci.DCF_DSF_INST_CD, + ci.FORM_INST_NAME_KANJI, + ci.DELETE_SCHE_REASON_CD, + ci.DUP_OPP_CD, + ci.SYS_UPDATE_DATE + FROM + src05.COM_INST AS ci + WHERE + ci.DUP_OPP_CD IS NOT NULL + AND + ci.DELETE_SCHE_REASON_CD = 'D' + AND + ci.DELETE_DATA IS NULL + AND + ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() + AND + NOT EXISTS ( + SELECT + dim.DCF_INST_CD + FROM + src07.DCF_INST_MERGE AS dim + WHERE + dim.DCF_INST_CD = ci.DCF_DSF_INST_CD + ) + AND + + (ci.DCF_DSF_INST_CD EXISTS( + SELECT + mia.INST_CD + FROM + src07.MST_INST_ASSN as mia + WHERE + mia.INST_CD = ci.DCF_DSF_INST_CD + ) ) - AND - - (ci.DCF_DSF_INST_CD EXISTS( - SELECT - mia.INST_CD - FROM - src07.MST_INST_ASSN as mia - WHERE - mia.INST_CD = ci.DCF_DSF_INST_CD - ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + ap.PRSB_INST_CD + FROM + src07.ATC_PHARM AS ap + WHERE + ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - ap.PRSB_INST_CD - FROM - src07.ATC_PHARM AS ap - WHERE - ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD - ) - OR ci.DCF_DSF_INST_CD EXISTS( - SELECT - trd.INST_CD - FROM - src07.TRN_RESULT_DATA AS trd - WHERE - trd.INST_CD = ci.DCF_DSF_INST_CD - ) + OR ci.DCF_DSF_INST_CD EXISTS( + SELECT + trd.INST_CD + FROM + src07.TRN_RESULT_DATA AS trd + WHERE + trd.INST_CD = ci.DCF_DSF_INST_CD ) - ; + ) + ; + """ + duplication_inst_records = self._db.execute_select(sql) + # DCF削除新規マスタ取り込み + values_clauses = [] + params = {} + for clauses_no, row in enumerate(duplication_inst_records, start=1): + dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" + dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" + values_clause = f"""(:{dcf_inst_cd_arr}, + :{dup_opp_cd_arr}, + DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), + NULL, + NULL, + NULL, + "Y", + batchuser, + SYSDATE(), + batchuser, + SYSDATE() + )""" + values_clauses.append(values_clause) + params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] + params[dup_opp_cd_arr] = row['DUP_OPP_CD'] + insert_sql = f""" + INSERT INTO + src07.dcf_inst_merge ( + DCF_INST_CD, + DUP_OPP_CD, + START_MONTH, + INVALID_FLG, + REMARKS, + DCF_INST_CD_NEW, + ENABLED_FLG, + CREATER, + CREATE_DATE, + UPDATER, + UPDATE_DATE + ) + VALUES + {','.join(values_clauses)} + """ + self._db.execute(insert_sql, params) + return (True, duplication_inst_records) + except Exception as e: + self._db.rollback() + raise BatchOperationException(e) + finally: + self._db.disconnect() - """ - duplication_inst_records = self._db.execute_select(sql) - - # DCF削除新規マスタ取り込み - values_clauses = [] - params = {} - for clauses_no, row in enumerate(duplication_inst_records, start=1): - dcf_inst_cd_arr = f"DCF_INST_CD{clauses_no}" - dup_opp_cd_arr = f"DUP_OPP_CD{clauses_no}" - values_clause = f"""(:{dcf_inst_cd_arr}, - :{dup_opp_cd_arr}, - DATE_FORMAT((src07.get_syor_date() + INTERVAL 1 MONTH), - NULL, - NULL, - NULL, - "Y", - batchuser, - SYSDATE(), - batchuser, - SYSDATE() - )""" - values_clauses.append(values_clause) - params[dcf_inst_cd_arr] = row['DCF_DSF_INST_CD'] - params[dup_opp_cd_arr] = row['DUP_OPP_CD'] - insert_sql = f""" - INSERT INTO - src07.dcf_inst_merge ( - DCF_INST_CD, - DUP_OPP_CD, - START_MONTH, - INVALID_FLG, - REMARKS, - DCF_INST_CD_NEW, - ENABLED_FLG, - CREATER, - CREATE_DATE, - UPDATER, - UPDATE_DATE - ) - VALUES - {','.join(values_clauses)} - """ - - self._db.execute(insert_sql, params) - - return (True, duplication_inst_records) - except Exception as e: - self._db.rollback() - raise BatchOperationException(e) - finally: - self._db.disconnect() - - def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): - sys_update_date = duplication_inst_records[0]['sys_update_date'] - set_year_month = '{set_year}年{set_month}月'.format( - set_year=sys_update_date[0:4], - set_month=sys_update_date[-2:] - ) - - add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' - add_dct_inst_merge_list = [] - for row in duplication_inst_records: - add_dct_inst_merge_list.append( - add_dct_inst_merge.format(**row)) - add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) - # 顧客報告用にログ出力 - logger.info( - f"""DCF施設統合マスタが追加されました。 + def _output_add_dcf_inst_merge_log(duplication_inst_records: list[dict]): + sys_update_date = duplication_inst_records[0]['sys_update_date'] + set_year_month = '{set_year}年{set_month}月'.format( + set_year=sys_update_date[0:4], + set_month=sys_update_date[-2:] + ) + add_dct_inst_merge = 'DCF施設コード {dcf_dsf_inst_cd} {form_inst_name_kanji},  重複時相手先コード {dup_opp_cd} {dup_inst_name_kanji}' + add_dct_inst_merge_list = [] + for row in duplication_inst_records: + add_dct_inst_merge_list.append( + add_dct_inst_merge.format(**row)) + add_dct_inst_merge_list = '\n'.join(add_dct_inst_merge_list) + # 顧客報告用にログ出力 + logger.info( + f"""DCF施設統合マスタが追加されました。 ********************************************************** 適用月度 {set_year_month} ********************************************************** {add_dct_inst_merge_list} ********************************************************** 合計 {len(duplication_inst_records)}件""" - ) - return + ) + return - def _make_csv_data(csv_file_name: str, record_inst: list): - # CSVファイルを作成する - temporary_dir = tempfile.mkdtemp() - csv_file_path = path.join(temporary_dir, csv_file_name) + def _make_csv_data(csv_file_name: str, record_inst: list): + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, csv_file_name) + head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH', + 'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG', + 'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE'] + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") + # UTF-8、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\r\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = [ + '' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) + return csv_file_path - head_str = ['DCF_INST_CD', 'DUP_OPP_CD', 'START_MONTH', - 'INVALID_FLG', 'REMARKS', 'DCF_INST_CD_NEW', 'ENABLED_FLG', - 'CREATER', 'CREATE_DATE', 'UPDATER', 'UPDATE_DATE'] + # CSVファイルをバックアップ + def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): + # S3バケットにファイルを移動 + jsk_send_bucket = JskSendBucket() - with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: - # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) - csv_file.write(f"{','.join(head_str)}\n") + # 処理日を取得 + _, _, syor_date = JskultHdkeTblManager.get_batch_statuses() - # Shift-JIS、CRLF、価囲いありで書き込む - writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', - quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, - strict=True - ) - - # データ部分書き込み(施設) - for record_inst_data in record_inst: - record_inst_value = list(record_inst_data.values()) - csv_data = [ - '' if n is None else n for n in record_inst_value] - writer.writerow(csv_data) - - return csv_file_path - - def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): - # S3バケットにファイルを移動 - jsk_send_bucket = JskSendBucket() - # バッチ共通設定を取得 - batch_context = BatchContext.get_instance() - - jsk_send_bucket.upload_dcf_inst_merge_csv_file( - csv_file_name, csv_file_path) - jsk_send_bucket.backup_dcf_inst_merge_csv_file( - csv_file_name, batch_context.syor_date) - return + jsk_send_bucket.upload_dcf_inst_merge_csv_file( + csv_file_name, csv_file_path) + jsk_send_bucket.backup_dcf_inst_merge_csv_file( + csv_file_name, syor_date) + return diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index 2d5d5f41..91a060e6 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -9,9 +9,9 @@ DB_SCHEMA = os.environ['DB_SCHEMA'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] POST_PROCESS = os.environ["POST_PROCESS"] -MAX_RUN_COUNT_FLG = os.environ["MAX_RUN_COUNT_FLG"] +MAX_RUN_COUNT = os.environ["MAX_RUN_COUNT"] RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] -CSV_FILE_NAME = os.environ['CSV_FILE_NAME'] +DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET'] From 95ce00a122d660123a4c981753be72f77e497860 Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 13:58:44 +0900 Subject: [PATCH 7/8] =?UTF-8?q?feat:=20=E8=BB=A2=E9=80=81=E3=83=87?= =?UTF-8?q?=E3=83=BC=E3=82=BF=E3=83=AA=E3=82=B9=E3=83=88=E3=82=92=E5=8F=96?= =?UTF-8?q?=E5=BE=97=E3=81=99=E3=82=8B=E9=83=A8=E5=88=86=E3=82=92=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/.env.example | 2 + ecs/jskult-batch/src/aws/s3.py | 25 +++- .../src/batch/dcf_inst_merge_io.py | 115 ++++++++++-------- .../src/manager/jskult_hdke_tbl_manager.py | 2 +- ecs/jskult-batch/src/system_var/constants.py | 2 +- .../src/system_var/environment.py | 6 +- 6 files changed, 92 insertions(+), 60 deletions(-) diff --git a/ecs/jskult-batch/.env.example b/ecs/jskult-batch/.env.example index 500f843d..d0bf48d3 100644 --- a/ecs/jskult-batch/.env.example +++ b/ecs/jskult-batch/.env.example @@ -18,6 +18,8 @@ VJSK_DATA_BUCKET=************* JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json +TRANSFER_RESULT_FOLDER=transfer_result +TRANSFER_RESULT_FILE_NAME=transfer_result.json # 連携データ抽出期間 SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 # 洗替対象テーブル名 diff --git a/ecs/jskult-batch/src/aws/s3.py b/ecs/jskult-batch/src/aws/s3.py index 66032e1c..6e5755be 100644 --- a/ecs/jskult-batch/src/aws/s3.py +++ b/ecs/jskult-batch/src/aws/s3.py @@ -1,11 +1,7 @@ -import gzip -import os import os.path as path -import shutil import tempfile import boto3 - from src.system_var import environment @@ -77,7 +73,9 @@ class ConfigBucket(S3Bucket): temporary_dir = tempfile.mkdtemp() temporary_file_path = path.join( temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) - wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + wholesaler_stock_input_day_list_key = \ + f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: self._s3_client.download_file( self._bucket_name, wholesaler_stock_input_day_list_key, f) @@ -105,6 +103,23 @@ class JskBackupBucket(JskUltBackupBucket): _folder = environment.JSKULT_BACKUP_BUCKET +class JskTransferListBucket(JskUltBackupBucket): + _folder = environment.TRANSFER_RESULT_FOLDER + + def download_transfer_result_file(self, process_date_yyyymmdd: str): + file_name = environment.TRANSFER_RESULT_FILE_NAME + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join( + temporary_dir, file_name) + holiday_list_key = f'{self._folder}/{process_date_yyyymmdd}/{file_name}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file( + self._bucket_name, holiday_list_key, f) + f.seek(0) + return temporary_file_path + + class JskSendBucket(S3Bucket): _bucket_name = environment.JSK_IO_BUCKET _send_folder = environment.JSK_DATA_SEND_FOLDER diff --git a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py index a7298153..ad311d42 100644 --- a/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py +++ b/ecs/jskult-batch/src/batch/dcf_inst_merge_io.py @@ -1,16 +1,18 @@ import csv +import json import os.path as path import tempfile -from src.aws.s3 import S3Client, JskSendBucket +from src.aws.s3 import JskSendBucket, JskTransferListBucket from src.batch.jskult_batch_entrypoint import JskultBatchEntrypoint from src.db.database import Database -from src.error.exceptions import BatchOperationException, MaxRunCountReachedException -from src.manager.jskult_batch_run_manager import JskultBatchRunManager -from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager -from src.manager.jskult_batch_status_manager import JskultBatchStatusManager -from src.system_var import environment +from src.error.exceptions import (BatchOperationException, + MaxRunCountReachedException) from src.logging.get_logger import get_logger +from src.manager.jskult_batch_run_manager import JskultBatchRunManager +from src.manager.jskult_batch_status_manager import JskultBatchStatusManager +from src.manager.jskult_hdke_tbl_manager import JskultHdkeTblManager +from src.system_var import environment logger = get_logger('DCF削除新規マスタ作成') @@ -20,80 +22,92 @@ class DcfInstMergeIO(JskultBatchEntrypoint): super().__init__() def execute(self): - jskultBatchRunManager = JskultBatchRunManager( + jskult_hdke_tbl_manager = JskultHdkeTblManager() + jskult_batch_run_manager = JskultBatchRunManager( environment.BATCH_EXECUTION_ID) - jskultHdkeTblManager = JskultHdkeTblManager() + if not jskult_hdke_tbl_manager.can_run_process(): + logger.error( + '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') + # バッチ実行管理テーブルをfailedで登録 + jskult_batch_run_manager.batch_failed() + return - # /transfer_result/yyyy/mm/dd/ - jskult_backuo_folder_name = f"""/transfer_result/{jskultHdkeTblManager.get_batch_statuses()[2]}""" + # 業務日付を取得 + _, _, process_date = jskult_hdke_tbl_manager.get_batch_statuses() - receive_file_count = S3Client.list_objects( - environment.JSKULT_BACKUP_BUCKET, jskult_backuo_folder_name).count() + # 転送ファイル一覧を取得し、転送件数を取得 + try: + transfer_list_bucket = JskTransferListBucket() + transfer_list_file_path = transfer_list_bucket.download_transfer_result_file( + process_date) + except Exception as e: + logger.exception(f'転送ファイル一覧の取得に失敗しました。 {e}') + # バッチ実行管理テーブルをfailedで登録 + jskult_batch_run_manager.batch_failed() - jskultBatchStatusManager = JskultBatchStatusManager( + with open(transfer_list_file_path) as f: + transfer_list = json.load(f) + + # 実消化データ + アルトマークデータの転送件数を合算し、受信ファイル件数とする + receive_file_count = len( + transfer_list['jsk_transfer_list']) + len(transfer_list['ult_transfer_list']) + + jskult_batch_status_manager = JskultBatchStatusManager( environment.PROCESS_NAME, - # TODO チケットNEWDWH2021-1847の実装で作成した定数に置き換え - environment.POST_PROCESS, + 'post_process', environment.MAX_RUN_COUNT, receive_file_count ) - try: - if not jskultHdkeTblManager.can_run_process(): - logger.error( - '日次バッチ処理中またはdump取得が正常終了していないため、DCF削除新規マスタ作成を終了します。') - return - jskultBatchStatusManager.set_process_status("start") + jskult_batch_status_manager.set_process_status("start") try: - if not jskultBatchStatusManager.can_run_post_process(): + if not jskult_batch_status_manager.can_run_post_process(): # 後続処理の起動条件を満たしていない場合 # 処理ステータスを「処理待」に設定 - jskultBatchStatusManager.set_process_status("waiting") + jskult_batch_status_manager.set_process_status("waiting") # バッチ実行管理テーブルに「retry」で登録 - jskultBatchRunManager.batch_retry() + jskult_batch_run_manager.batch_retry() return - except MaxRunCountReachedException as e: + except MaxRunCountReachedException: logger.info('最大起動回数に到達したため、DCF削除新規マスタ作成処理を実行します。') - jskultBatchStatusManager.set_process_status("doing") + jskult_batch_status_manager.set_process_status("doing") # アルトマーク取込が実行されていた場合にDCF施設削除新規マスタの作成処理を実行 - if jskultBatchStatusManager.is_done_ultmarc_import(): - + if jskult_batch_status_manager.is_done_ultmarc_import(): # COM_施設からDCF削除新規マスタに登録 (is_add_dcf_inst_merge, duplication_inst_records) = self._insert_dcf_inst_merge_from_com_inst(self) if is_add_dcf_inst_merge: - self._output_add_dcf_inst_merge_log( duplication_inst_records) - dcf_inst_merge_all_records = self._select_dcf_inst_merge_all() + # CSV出力 + dcf_inst_merge_all_records = self._select_dcf_inst_merge_all() file_path = self._make_csv_data( environment.DCF_INST_MERGE_SEND_FILE_NAME, dcf_inst_merge_all_records) # CSVをS3にアップロード - self._upload_dcf_inst_merge_csv_file( - file_path, environment.DCF_INST_MERGE_SEND_FILE_NAME) + file_path, process_date, environment.DCF_INST_MERGE_SEND_FILE_NAME) # 処理が全て正常終了した際に、バッチ実行管理テーブルに「success」で登録 logger.info("DCF削除新規マスタ作成処理を正常終了します。") + jskult_batch_run_manager.batch_success() + jskult_batch_status_manager.set_process_status("done") - jskultBatchRunManager.batch_success() - jskultBatchStatusManager.set_process_status("done") + return except Exception as e: # 何らかのエラーが発生した際に、バッチ実行管理テーブルに「failed」で登録 logger.exception(f'予期せぬエラーが発生したため、DCF削除新規マスタ作成処理を終了します。{e}') - - jskultBatchRunManager.batch_failed() - jskultBatchStatusManager.set_process_status("failed") + jskult_batch_run_manager.batch_failed() + jskult_batch_status_manager.set_process_status("failed") def _select_dcf_inst_merge_all(self) -> tuple[bool, list[dict]]: try: @@ -121,7 +135,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): self._db.begin() self._db.to_jst() sql = """\ - SELECT + SELECT ci.DCF_DSF_INST_CD, ci.FORM_INST_NAME_KANJI, ci.DELETE_SCHE_REASON_CD, @@ -137,7 +151,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): ci.DELETE_DATA IS NULL AND ci.SYS_UPDATE_DATE BETWEEN src07.get_syor_date() AND NOW() - AND + AND NOT EXISTS ( SELECT dim.DCF_INST_CD @@ -147,11 +161,11 @@ class DcfInstMergeIO(JskultBatchEntrypoint): dim.DCF_INST_CD = ci.DCF_DSF_INST_CD ) AND - + (ci.DCF_DSF_INST_CD EXISTS( SELECT mia.INST_CD - FROM + FROM src07.MST_INST_ASSN as mia WHERE mia.INST_CD = ci.DCF_DSF_INST_CD @@ -160,7 +174,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): OR ci.DCF_DSF_INST_CD EXISTS( SELECT ap.PRSB_INST_CD - FROM + FROM src07.ATC_PHARM AS ap WHERE ap.PRSB_INST_CD = ci.DCF_DSF_INST_CD @@ -201,7 +215,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): insert_sql = f""" INSERT INTO src07.dcf_inst_merge ( - DCF_INST_CD, + DCF_INST_CD, DUP_OPP_CD, START_MONTH, INVALID_FLG, @@ -246,6 +260,7 @@ class DcfInstMergeIO(JskultBatchEntrypoint): ********************************************************** 合計 {len(duplication_inst_records)}件""" ) + return def _make_csv_data(csv_file_name: str, record_inst: list): @@ -268,18 +283,18 @@ class DcfInstMergeIO(JskultBatchEntrypoint): csv_data = [ '' if n is None else n for n in record_inst_value] writer.writerow(csv_data) + return csv_file_path - # CSVファイルをバックアップ - def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, csv_file_path: str): - # S3バケットにファイルを移動 + def _upload_dcf_inst_merge_csv_file(self, csv_file_name: str, process_date: str, csv_file_path: str): jsk_send_bucket = JskSendBucket() - # 処理日を取得 - _, _, syor_date = JskultHdkeTblManager.get_batch_statuses() - + # S3バケットにファイルをアップロード jsk_send_bucket.upload_dcf_inst_merge_csv_file( csv_file_name, csv_file_path) + + # CSVファイルをバックアップ jsk_send_bucket.backup_dcf_inst_merge_csv_file( - csv_file_name, syor_date) + csv_file_name, process_date) + return diff --git a/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py b/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py index 4a804ef3..f6c8a9f0 100644 --- a/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py +++ b/ecs/jskult-batch/src/manager/jskult_hdke_tbl_manager.py @@ -113,7 +113,7 @@ class JskultHdkeTblManager: finally: self._db.disconnect() # 日次バッチ処理中ではない場合、後続の処理は行わない - if batch_processing_flag != constants.BATCH_ACTF_BATCH_START: + if batch_processing_flag != constants.BATCH_ACTF_BATCH_START: return False # dump取得が正常終了していない場合、後続の処理は行わない if dump_status_kbn != constants.DUMP_STATUS_KBN_COMPLETE: diff --git a/ecs/jskult-batch/src/system_var/constants.py b/ecs/jskult-batch/src/system_var/constants.py index 8a0ccbb3..10cd7fb8 100644 --- a/ecs/jskult-batch/src/system_var/constants.py +++ b/ecs/jskult-batch/src/system_var/constants.py @@ -4,7 +4,7 @@ BATCH_EXIT_CODE_SUCCESS = 0 # バッチ処理中フラグ:未処理 BATCH_ACTF_BATCH_UNPROCESSED = '0' # バッチ処理中フラグ:処理中 -BATCH_ACTF_BATCH_IN_PROCESSING = '1' +BATCH_ACTF_BATCH_START = '1' # dump取得状態区分:未処理 DUMP_STATUS_KBN_UNPROCESSED = '0' # dump取得状態区分:dump取得正常終了 diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index 91a060e6..e4a230c3 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -8,9 +8,9 @@ DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] -POST_PROCESS = os.environ["POST_PROCESS"] -MAX_RUN_COUNT = os.environ["MAX_RUN_COUNT"] -RECEIVE_FILE_COUNT = os.environ["RECEIVE_FILE_COUNT"] +MAX_RUN_COUNT = os.environ['MAX_RUN_COUNT'] +TRANSFER_RESULT_FOLDER = os.environ['TRANSFER_RESULT_FOLDER'] +TRANSFER_RESULT_FILE_NAME = os.environ['TRANSFER_RESULT_FILE_NAME'] DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] From 8adec779b2797972b49641650f5209a0b1b2c42c Mon Sep 17 00:00:00 2001 From: "shimoda.m@nds-tyo.co.jp" Date: Wed, 28 May 2025 14:55:32 +0900 Subject: [PATCH 8/8] =?UTF-8?q?.env.example=E3=81=A8=E3=82=BF=E3=82=B9?= =?UTF-8?q?=E3=82=AF=E8=A8=AD=E5=AE=9A=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E3=82=92=E4=BD=9C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ecs/jskult-batch/.env.example | 44 +++++++++---------- .../src/system_var/environment.py | 3 ++ .../dcf_inst_merge_io_task_settings.env | 13 ++++++ 3 files changed, 36 insertions(+), 24 deletions(-) create mode 100644 s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env diff --git a/ecs/jskult-batch/.env.example b/ecs/jskult-batch/.env.example index d0bf48d3..b5ac18f8 100644 --- a/ecs/jskult-batch/.env.example +++ b/ecs/jskult-batch/.env.example @@ -1,28 +1,24 @@ -DB_HOST=************ -DB_PORT=************ -DB_USERNAME=************ -DB_PASSWORD=************ +DB_HOST****************** +DB_PORT=***************** +DB_USERNAME=************* +DB_PASSWORD=************* DB_SCHEMA=src05 +JSK_IO_BUCKET=mbj-newdwh2021-staging-jskult-io +JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult +BATCH_MANAGE_DYNAMODB_TABLE_NAME=mbj-newdwh2021-staging-jskult-batch-run-manage +BATCH_EXECUTION_ID=localtest +MAX_RUN_COUNT=3 LOG_LEVEL=INFO -ULTMARC_DATA_BUCKET=**************** -ULTMARC_DATA_FOLDER=recv -JSKULT_BACKUP_BUCKET=**************** -ULTMARC_BACKUP_FOLDER=ultmarc -VJSK_BACKUP_FOLDER=vjsk -JSKULT_CONFIG_BUCKET=********************** -JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar -JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt -VJSK_DATA_SEND_FOLDER=send -VJSK_DATA_RECEIVE_FOLDER=recv -VJSK_DATA_BUCKET=************* -JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt -JSKULT_CONFIG_CONVERT_FOLDER=jskult/convert -JSKULT_ULTMARC_HEX_CONVERT_CONFIG_FILE_NAME=ultmarc_hex_convert_config.json +PROCESS_NAME=jskult-batch-dcf-inst-merge-io +JSK_DATA_SEND_FOLDER=send +JSK_BACKUP_FOLDER=jsk/send TRANSFER_RESULT_FOLDER=transfer_result TRANSFER_RESULT_FILE_NAME=transfer_result.json -# 連携データ抽出期間 -SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 -# 洗替対象テーブル名 -SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau -# 卸実績洗替で作成するデータの期間(年単位) -SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 +DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv +JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config + +# DB接続リトライ設定 +DB_CONNECTION_MAX_RETRY_ATTEMPT=1 +DB_CONNECTION_RETRY_INTERVAL_INIT=1 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=1 diff --git a/ecs/jskult-batch/src/system_var/environment.py b/ecs/jskult-batch/src/system_var/environment.py index e4a230c3..4e220ba0 100644 --- a/ecs/jskult-batch/src/system_var/environment.py +++ b/ecs/jskult-batch/src/system_var/environment.py @@ -6,6 +6,8 @@ DB_PORT = int(os.environ['DB_PORT']) DB_USERNAME = os.environ['DB_USERNAME'] DB_PASSWORD = os.environ['DB_PASSWORD'] DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] BATCH_EXECUTION_ID = os.environ['BATCH_EXECUTION_ID'] MAX_RUN_COUNT = os.environ['MAX_RUN_COUNT'] @@ -15,6 +17,7 @@ DCF_INST_MERGE_SEND_FILE_NAME = os.environ['DCF_INST_MERGE_SEND_FILE_NAME'] PROCESS_NAME = os.environ['PROCESS_NAME'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] JSK_IO_BUCKET = os.environ['JSK_IO_BUCKET'] +JSK_BACKUP_FOLDER = os.environ['JSK_BACKUP_FOLDER'] JSK_DATA_SEND_FOLDER = os.environ['JSK_DATA_SEND_FOLDER'] # 初期値がある環境変数 diff --git a/s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env b/s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env new file mode 100644 index 00000000..8469e66f --- /dev/null +++ b/s3/config/jskult/task_settings/dcf_inst_merge_io_task_settings.env @@ -0,0 +1,13 @@ +# task environment file. +LOG_LEVEL=INFO +PROCESS_NAME=jskult-batch-dcf-inst-merge-io +JSK_DATA_SEND_FOLDER=send +JSK_BACKUP_FOLDER=jsk/send +TRANSFER_RESULT_FOLDER=transfer_result +TRANSFER_RESULT_FILE_NAME=transfer_result.json +DCF_INST_MERGE_SEND_FILE_NAME=dcf_inst_merge.csv +JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config +DB_CONNECTION_MAX_RETRY_ATTEMPT=1 +DB_CONNECTION_RETRY_INTERVAL_INIT=1 +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=1 +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=1