Merge pull request #153 feature-NEWDWH2021-1007 into develop

This commit is contained in:
下田雅人 2023-03-30 18:10:15 +09:00
commit bddb75ce00
117 changed files with 6096 additions and 0 deletions

View File

@ -0,0 +1,12 @@
tests/*
.coverage
.env
.env.example
.report/*
.vscode/*
.pytest_cache/*
*/__pychache__/*
Dockerfile
pytest.ini
README.md
*.sql

View File

@ -0,0 +1,6 @@
DB_HOST=************
DB_PORT=************
DB_USERNAME=************
DB_PASSWORD=************
DB_SCHEMA=src05
LOG_LEVEL=INFO

4
ecs/jskult-batch-daily/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
__pycache__
.vscode/settings.json
.env

View File

@ -0,0 +1,16 @@
{
// IntelliSense 使
//
// : https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Batch Sample",
"type": "python",
"request": "launch",
"program": "entrypoint.py",
"console": "integratedTerminal",
"justMyCode": true
}
]
}

View File

@ -0,0 +1,18 @@
{
"[python]": {
"editor.defaultFormatter": null,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
//
"python.defaultInterpreterPath": "<pythonインタプリターのパス>",
"python.linting.lintOnSave": true,
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"python.linting.flake8Args": ["--max-line-length=120"],
"python.formatting.provider": "autopep8",
"python.formatting.autopep8Args": ["--max-line-length", "120"]
}

View File

@ -0,0 +1,20 @@
FROM python:3.9
ENV TZ="Asia/Tokyo"
WORKDIR /usr/src/app
COPY Pipfile Pipfile.lock ./
RUN \
apt update -y && \
# パッケージのセキュリティアップデートのみを適用するコマンド
apt install -y unattended-upgrades && \
unattended-upgrades && \
pip install --upgrade pip wheel setuptools && \
pip install pipenv --no-cache-dir && \
pipenv install --system --deploy && \
pip uninstall -y pipenv virtualenv-clone virtualenv
COPY src ./src
COPY entrypoint.py entrypoint.py
CMD ["python", "entrypoint.py"]

View File

@ -0,0 +1,20 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
boto3 = "*"
PyMySQL = "*"
sqlalchemy = "*"
tenacity = "*"
[dev-packages]
autopep8 = "*"
flake8 = "*"
[requires]
python_version = "3.9"
[pipenv]
allow_prereleases = true

263
ecs/jskult-batch-daily/Pipfile.lock generated Normal file
View File

@ -0,0 +1,263 @@
{
"_meta": {
"hash": {
"sha256": "1b9b3da586499b64915b0cf14217a1bcfb26c5e3e1c6fbfc9cce99242bc4faed"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"boto3": {
"hashes": [
"sha256:043f8981d10c4e7c48736df4381dac557b46c5b369b0a450d8f3d7f5fdd24db5",
"sha256:b00f416832bc59863b96175045d2ebe067d9222289bce677c48fd72c006eaaad"
],
"index": "pypi",
"version": "==1.26.102"
},
"botocore": {
"hashes": [
"sha256:4bae8f502507da18ff37c61cb18745cfb11d87a61dd0ea27e346adadff92aa3f",
"sha256:58b11c630d2044ea732ba4c403d29fab51e954465f9b3f7099cbf5ac0ce7ab47"
],
"markers": "python_version >= '3.7'",
"version": "==1.29.102"
},
"greenlet": {
"hashes": [
"sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a",
"sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a",
"sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43",
"sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33",
"sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8",
"sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088",
"sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca",
"sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343",
"sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645",
"sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db",
"sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df",
"sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3",
"sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86",
"sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2",
"sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a",
"sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf",
"sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7",
"sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394",
"sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40",
"sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3",
"sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6",
"sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74",
"sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0",
"sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3",
"sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91",
"sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5",
"sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9",
"sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8",
"sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b",
"sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6",
"sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb",
"sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73",
"sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b",
"sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df",
"sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9",
"sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f",
"sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0",
"sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857",
"sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a",
"sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249",
"sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30",
"sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292",
"sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b",
"sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d",
"sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b",
"sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c",
"sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca",
"sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7",
"sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75",
"sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae",
"sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b",
"sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470",
"sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564",
"sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9",
"sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099",
"sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0",
"sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5",
"sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19",
"sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1",
"sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"
],
"markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))",
"version": "==2.0.2"
},
"jmespath": {
"hashes": [
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
],
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
"pymysql": {
"hashes": [
"sha256:3dda943ef3694068a75d69d071755dbecacee1adf9a1fc5b206830d2b67d25e8",
"sha256:89fc6ae41c0aeb6e1f7710cdd623702ea2c54d040565767a78b00a5ebb12f4e5"
],
"index": "pypi",
"version": "==1.0.3"
},
"python-dateutil": {
"hashes": [
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.2"
},
"s3transfer": {
"hashes": [
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
],
"markers": "python_version >= '3.7'",
"version": "==0.6.0"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sqlalchemy": {
"hashes": [
"sha256:013f4f330001e84a2b0ef1f2c9bd73169c79d582e54e1a144be1be1dbc911711",
"sha256:0789e199fbce8cb1775337afc631ed12bcc5463dd77d7a06b8dafd758cde51f8",
"sha256:0b698440c477c00bdedff87348b19a79630a235864a8f4378098d61079c16ce9",
"sha256:0eac488be90dd3f7a655d2e34fa59e1305fccabc4abfbd002e3a72ae10bd2f89",
"sha256:14854bdb2a35af536d14f77dfa8dbc20e1bb1972996d64c4147e0d3165c9aaf5",
"sha256:18795e87601b4244fd08b542cd6bff9ef674b17bcd34e4a3c9935398e2cc762c",
"sha256:32f508fef9c5a7d19411d94ef64cf5405e42c4689e51ddbb81ac9a7be045cce8",
"sha256:33f73cc45ffa050f5c3b60ff4490e0ae9e02701461c1600d5ede1b008076b1b9",
"sha256:38e26cf6b9b4c6c37846f7e31b42e4d664b35f055691265f07e06aeb6167c494",
"sha256:3da3dff8d9833a7d7f66a3c45a79a3955f775c79f47bb7eea266d0b4c267b17a",
"sha256:432cfd77642771ee7ea0dd0f3fb664f18506a3625eab6e6d5d1d771569171270",
"sha256:4339110be209fea37a2bb4f35f1127c7562a0393e9e6df5d9a65cc4f5c167cb6",
"sha256:486015a58c9a67f65a15b4f19468b35b97cee074ae55386a9c240f1da308fbfe",
"sha256:494db0026918e3f707466a1200a5dedbf254a4bce01a3115fd95f04ba8258f09",
"sha256:57b80e877eb6ec63295835f8a3b86ca3a44829f80c4748e1b019e03adea550fc",
"sha256:5f7c40ec2e3b31293184020daba95850832bea523a08496ac89b27a5276ec804",
"sha256:6d44ff7573016fc26311b5a5c54d5656fb9e0c39e138bc8b81cb7c8667485203",
"sha256:774965c41b71c8ebe3c5728bf5b9a948231fc3a0422d9fdace0686f5bb689ad6",
"sha256:7917632606fc5d4be661dcde45cc415df835e594e2c50cc999a44f24b6bf6d92",
"sha256:9020125e3be677c64d4dda7048e247343f1663089cf268a4cc98c957adb7dbe0",
"sha256:921485d1f69ed016e1f756de67d02ad4f143eb6b92b9776bfff78786d8978ab5",
"sha256:94556a2a7fc3de094ea056b62845e2e6e271e26d1e1b2540a1cd2d2506257a10",
"sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649",
"sha256:a65a8fd09bdffd63fa23b39cd902e6a4ca23d86ecfe129513e43767a1f3e91fb",
"sha256:a6f7d1debb233f1567d700ebcdde0781a0b63db0ef266246dfbf75ae41bfdf85",
"sha256:b0995b92612979d208189245bf87349ad9243b97b49652347a28ddee0803225a",
"sha256:b8ab8f90f4a13c979e6c41c9f011b655c1b9ae2df6cffa8fa2c7c4d740f3512e",
"sha256:bc370d53fee7408330099c4bcc2573a107757b203bc61f114467dfe586a0c7bd",
"sha256:c38641f5c3714505d65dbbd8fb1350408b9ad8461769ec8e440e1177f9c92d1d",
"sha256:cc337b96ec59ef29907eeadc2ac11188739281568f14c719e61550ca6d201a41",
"sha256:ce076e25f1170000b4ecdc57a1ff8a70dbe4a5648ec3da0563ef3064e8db4f15",
"sha256:cebd161f964af58290596523c65e41a5a161a99f7212b1ae675e288a4b5e0a7c",
"sha256:d2e7411d5ea164c6f4d003f5d4f5e72e202956aaa7496b95bb4a4c39669e001c",
"sha256:e735a635126b2338dfd3a0863b675437cb53d85885a7602b8cffb24345df33ed",
"sha256:e7e61e2e4dfe175dc3510889e44eda1c32f55870d6950ef40519640cb266704d",
"sha256:e90f0be674e0845c5c1ccfa5e31c9ee28fd406546a61afc734355cc7ea1f8f8b",
"sha256:ea1c63e61b5c13161c8468305f0a5837c80aae2070e33654c68dd12572b638eb",
"sha256:ea9461f6955f3cf9eff6eeec271686caed7792c76f5b966886a36a42ea46e6b2",
"sha256:f15c54713a8dd57a01c974c9f96476688f6f6374d348819ed7e459535844b614",
"sha256:fb649c5473f79c9a7b6133f53a31f4d87de14755c79224007eb7ec76e628551e",
"sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"
],
"index": "pypi",
"version": "==2.0.7"
},
"tenacity": {
"hashes": [
"sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0",
"sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"
],
"index": "pypi",
"version": "==8.2.2"
},
"typing-extensions": {
"hashes": [
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
],
"markers": "python_version >= '3.7'",
"version": "==4.5.0"
},
"urllib3": {
"hashes": [
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305",
"sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.26.15"
}
},
"develop": {
"autopep8": {
"hashes": [
"sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1",
"sha256:f9849cdd62108cb739dbcdbfb7fdcc9a30d1b63c4cc3e1c1f893b5360941b61c"
],
"index": "pypi",
"version": "==2.0.2"
},
"flake8": {
"hashes": [
"sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7",
"sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"
],
"index": "pypi",
"version": "==6.0.0"
},
"mccabe": {
"hashes": [
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
],
"markers": "python_version >= '3.6'",
"version": "==0.7.0"
},
"pycodestyle": {
"hashes": [
"sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053",
"sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"
],
"markers": "python_version >= '3.6'",
"version": "==2.10.0"
},
"pyflakes": {
"hashes": [
"sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf",
"sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"
],
"markers": "python_version >= '3.6'",
"version": "==3.0.1"
},
"tomli": {
"hashes": [
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
],
"markers": "python_version < '3.11'",
"version": "==2.0.1"
}
}
}

View File

@ -0,0 +1,73 @@
# 実消化&アルトマーク 日次バッチ
## 概要
実消化&アルトマークの日次バッチ処理。
## 環境情報
- Python 3.9
- MySQL 8.x
- VSCode
## 環境構築
- Pythonの構築
- Merck_NewDWH開発2021のWiki、[Python環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照
- 「Pipenvの導入」までを行っておくこと
- 構築完了後、プロジェクト配下で以下のコマンドを実行し、Pythonの仮想環境を作成する
- `pipenv install --python <pyenvでインストールしたpythonバージョン>`
- この手順で出力される仮想環境のパスは、後述するVSCodeの設定手順で使用するため、控えておく
- MySQLの環境構築
- Windowsの場合、以下のリンクからダウンロードする
- <https://dev.mysql.com/downloads/installer/>
- Dockerを利用する場合、「newsdwh-tools」リポジトリのMySQL設定を使用すると便利
- 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると
- `docker-compose up -d`
- Dockerの構築手順は、[Dockerのセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと
- データを投入する
- 立ち上げたデータベースに「src05」スキーマを作成する
- [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysqlコマンドを使用して復元する
- `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql`
- 環境変数の設定
- 「.env.example」ファイルをコピーし、「.env」ファイルを作成する
- 環境変数を設定する。設定内容はPRJメンバーより共有を受けてください
- VSCodeの設定
- 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する
- 「python.defaultInterpreterPath」を、Pythonの構築手順で作成した仮想環境のパスに変更する
## 実行
- VSCode上で「F5」キーを押下すると、バッチ処理が起動する。
- 「entrypoint.py」が、バッチ処理のエントリーポイント。
- 実際の処理は、「src/jobctrl_daily.py」で行っている。
## フォルダ構成
```text
.
├── Pipfile -- Pythonモジュールの依存関係を管理するファイル
├── Dockerfile -- Dockerイメージを作成するためのファイル
├── Pipfile -- Pythonモジュールの依存関係を管理するファイル
├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル
├── README.md -- 当ファイル
├── entrypoint.py -- バッチ処理のエントリーポイントになるpythonファイル
└── src -- ソースコードの保管場所
├── batch -- バッチ処理関連ソース置き場
│ ├── batch_functions.py -- バッチ処理共通関数置き場
│ ├── datachange -- 実績洗替関連ソース置き場
│ │ └── emp_chg_inst_lau.py -- 施設担当者マスタ洗替
│ └── jissekiaraigae.py -- 実績洗替処理のエントリーポイント
├── db
│ └── database.py -- データベース操作共通処理
├── error
│ └── exceptions.py -- カスタム例外
├── jobctrl_daily.py -- 日次バッチ処理のエントリーポイント。「entrypoint.py」 から呼ばれる。
├── logging
│ └── get_logger.py -- ログ出力の共通処理
├── system_var
│ └── environment.py -- 環境変数
└── time
└── elapsed_time.py -- 実行時間計測用
```

View File

@ -0,0 +1,10 @@
"""実消化&アルトマーク 日次バッチのエントリーポイント"""
from src.jobctrl_daily import batch_process
if __name__ == '__main__':
try:
exit(batch_process())
except Exception:
# エラーが起きても、正常系のコードで返す。
# エラーが起きた事実はbatch_process内でログを出す。
exit(0)

View File

View File

@ -0,0 +1,56 @@
"""バッチ処理の共通関数"""
import textwrap
from datetime import datetime
from src.db.database import Database
from src.error.exceptions import BatchOperationException, DBException
def get_syor_date() -> str:
"""DBから処理日を取得します
Raises:
BatchOperationException: 日付テーブルが取得できないとき何らかのエラーが発生したとき
Returns:
str: hdke_tbl.syor_date
"""
db = Database.get_instance()
db.connect()
sql = 'SELECT syor_date FROM src05.hdke_tbl'
try:
syor_date_result = db.execute_select(sql)
except DBException as e:
raise BatchOperationException(e)
db.disconnect()
if len(syor_date_result) == 0:
raise BatchOperationException('日付テーブルが取得できませんでした')
# 必ず一件取れる
syor_date_record = syor_date_result[0]
syor_date_str = syor_date_record['syor_date']
return syor_date_str
def get_syor_date_as_date_format() -> str:
"""DBから処理日を取得し、yyyy/mm/ddのフォーマットにして返します
Raises:
BatchOperationException: 日付テーブルが取得できないとき何らかのエラーが発生したとき
Returns:
str: hdke_tbl.syor_dateをyyyy/mm/ddにフォーマットした文字列
"""
syor_date_str = get_syor_date()
syor_date = datetime.strptime(syor_date_str, '%Y%m%d')
return syor_date.strftime('%Y/%m/%d')
def logging_sql(logger, sql):
"""SQL文をデバッグログで出力する
Args:
logger (logging.Logger): ロガー
sql (str): SQL文
"""
logger.debug(f'\n{"-"*15}\n{textwrap.dedent(sql)[1:-1]}\n{"-"*15}')

View File

@ -0,0 +1,208 @@
from src.batch.batch_functions import get_syor_date_as_date_format, logging_sql
from src.db.database import Database
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
from src.time.elapsed_time import ElapsedTime
logger = get_logger('48-施設担当者マスタ洗替')
def batch_process():
db = Database.get_instance()
db.connect()
logger.info('##########################')
logger.info('START Changing Employee in charge of institution PGM.')
# 日付テーブルを取得
syor_date = get_syor_date_as_date_format(db)
# `emp_chg_inst_lau`をTruncate
truncate_emp_chg_inst_lau(db)
# emp_chg_inst から、`emp_chg_inst_lau`へInsert
insert_into_emp_chg_inst_lau_from_emp_chg_inst(db)
# vop_hco_merge_vから、emp_chg_inst_lauをUpdate
update_emp_chg_inst_lau_from_vop_hco_merge_v(db, syor_date)
# dcf_inst_mergeから、emp_chg_inst_lauをUpdate
update_dcf_inst_merge_from_emp_chg_inst_lau(db, syor_date)
db.disconnect()
logger.info('##########################')
logger.info('End All Processing PGM.')
def truncate_emp_chg_inst_lau(db: Database):
logger.info("##########################")
try:
db.execute("TRUNCATE TABLE src05.emp_chg_inst_lau")
except Exception as e:
logger.info("Error! Truncate Table `emp_chg_inst_lau` is Failed!!!")
raise BatchOperationException(e)
logger.info("Table `emp_chg_inst_lau` was truncated!")
return
def insert_into_emp_chg_inst_lau_from_emp_chg_inst(db: Database):
logger.info("##########################")
try:
elapsed_time = ElapsedTime()
sql = """
INSERT INTO
src05.emp_chg_inst_lau
SELECT
inst_cd,
ta_cd,emp_cd,
bu_cd,
start_date,
end_date,
main_chg_flg,
enabled_flg,
creater,
create_date,
updater,
update_date,
NULL -- lua_ope_dt
FROM
src05.emp_chg_inst
WHERE
enabled_flg = 'Y'
"""
res = db.execute(sql)
logging_sql(logger, sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.info("Error! Insert into `emp_chg_inst_lau` from `emp_chg_inst` was failed!!!")
raise BatchOperationException(e)
logger.info("Success! Insert into `emp_chg_inst_lau` from `emp_chg_inst` was inserted!")
return
def update_emp_chg_inst_lau_from_vop_hco_merge_v(db: Database, syor_date: str):
# vop_hco_merge_vはデータが作られないため、この洗い替え処理は基本空振りする
logger.info("##########################")
try:
select_result = db.execute_select(
"""
SELECT
COUNT(v_inst_cd) AS row_count
FROM
src05.vop_hco_merge_v
WHERE
STR_TO_DATE(apply_dt, '%Y-%m-%d') <= :syor_date
""",
{'syor_date': syor_date}
)
except Exception as e:
logger.info("Error! `vop_hco_merge_v` Table count error!")
raise BatchOperationException(e)
count = [row for row in select_result][0]['row_count']
if count == 0:
logger.info('vop_hco_merge_v Table Data is not exists!')
return
logger.info('vop_hco_merge_v Table Data is exists!')
# vop_hco_merge_v から、emp_chg_inst_lauをUpdateします
result = db.execute_select(
"""
SELECT
v_inst_cd,
v_inst_cd_merg
FROM
src05.vop_hco_merge_v
WHERE
STR_TO_DATE(apply_dt, '%Y-%m-%d') <= :syor_date
ORDER BY
STR_TO_DATE(apply_dt, '%Y-%m-%d') ASC
""",
{'syor_date': syor_date}
)
for row in result:
v_inst_cd = row['v_inst_cd']
v_inst_cd_merge = row['v_inst_cd_merg']
try:
elapsed_time = ElapsedTime()
update_sql = """
UPDATE
emp_chg_inst_lau
SET
inst_cd = :v_inst_merge,
lua_ope_dt = SYSDATE()
WHERE
inst_cd = :v_inst_cd;
"""
update_result = db.execute(
update_sql,
{'v_inst_cd': v_inst_cd, 'v_inst_cd_merg': v_inst_cd_merge}
)
logging_sql(logger, update_sql)
logger.info(f'Query OK, {update_result.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.info(f"emp_chg_inst_lau v_inst_cd could not set from {v_inst_cd_merge} to {v_inst_cd_merge}!")
raise BatchOperationException(e)
logger.info(f"Success! emp_chg_inst_lau v_inst_cd was set from {v_inst_cd} to {v_inst_cd_merge}!")
return
def update_dcf_inst_merge_from_emp_chg_inst_lau(db: Database, syor_date: str):
# dcf_inst_mergeから、emp_chg_inst_lauをUpdate
# Get count from DCF_INST_MERGE
logger.info("##########################")
try:
select_result = db.execute_select(
"""
SELECT
COUNT(dcf_inst_cd) AS row_count
FROM
src05.dcf_inst_merge
WHERE
muko_flg = '0'
AND dcf_inst_cd_new IS NOT NULL
AND enabled_flg = 'Y'
AND STR_TO_DATE(CONCAT(tekiyo_month, '01'), '%Y%m%d') <= :syor_date -- TODO: tekiyo_monthはいっぴにする
""",
{'syor_date': syor_date}
)
except Exception as e:
logger.info("Error! Getting Count of dcf_inst_merge was failed!")
raise BatchOperationException(e)
count = [row for row in select_result][0]['row_count']
if count == 0:
logger.info('dcf_inst_merge Table Data is not exists!')
return
logger.info('dcf_inst_merge Table Data is exists!')
# dcf_inst_mergeから、emp_chg_inst_lauをUpdate
logger.info("##########################")
logger.info("#### UPDATE DATA #########")
logger.info("##########################")
try:
elapsed_time = ElapsedTime()
update_sql = """
UPDATE
src05.emp_chg_inst_lau el,
(
SELECT
dcf_inst_cd,
dcf_inst_cd_new
FROM
src05.dcf_inst_merge
WHERE
muko_flg = '0'
AND dcf_inst_cd_new IS NOT NULL
AND enabled_flg = 'Y'
AND STR_TO_DATE(CONCAT(tekiyo_month, '01'), '%Y%m%d') <= :syor_date
) dm
SET
el.inst_cd = dm.dcf_inst_cd_new,
el.lua_ope_dt = SYSDATE()
WHERE
el.inst_cd = dm.dcf_inst_cd
"""
res = db.execute(
update_sql,
{'syor_date': syor_date}
)
logging_sql(logger, update_sql)
logger.info(f'Query OK, {res.rowcount} rows affected ({elapsed_time.of})')
except Exception as e:
logger.info("emp_chg_inst_lau.v_inst_cd could not set!")
raise BatchOperationException(e)
logger.info("emp_chg_inst_lau.v_inst_cd was set!")
return

View File

@ -0,0 +1,11 @@
from src.batch.datachange import emp_chg_inst_lau
from src.logging.get_logger import get_logger
logger = get_logger('実績洗替')
def batch_process():
"""実績洗替処理"""
logger.info('Start Jisseki Araigae Batch PGM.')
# 施設担当者洗替
emp_chg_inst_lau.batch_process()

View File

@ -0,0 +1,150 @@
from sqlalchemy import (Connection, CursorResult, Engine, QueuePool,
create_engine, text)
from sqlalchemy.engine.create import create_engine
from sqlalchemy.engine.url import URL
from tenacity import retry, stop_after_attempt, wait_exponential
from src.error.exceptions import DBException
from src.logging.get_logger import get_logger
from src.system_var import environment
logger = get_logger(__name__)
class Database:
"""データベース操作クラス"""
__connection: Connection = None
__engine: Engine = None
__host: str = None
__port: str = None
__username: str = None
__password: str = None
__schema: str = None
__connection_string:str = None
def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None:
"""このクラスの新たなインスタンスを初期化します
Args:
username (str): DBユーザー名
password (str): DBパスワード
host (str): DBホスト名
port (int): DBポート
schema (str): DBスキーマ名
"""
self.__username = username
self.__password = password
self.__host = host
self.__port = int(port)
self.__schema = schema
self.__connection_string = URL.create(
drivername='mysql+pymysql',
username=self.__username,
password=self.__password,
host=self.__host,
port=self.__port,
database=self.__schema,
query={"charset": "utf8mb4"}
)
self.__engine = create_engine(
self.__connection_string,
pool_timeout=5,
poolclass=QueuePool,
isolation_level="AUTOCOMMIT"
)
@classmethod
def get_instance(cls):
"""インスタンスを取得します
Returns:
Database: DB操作クラスインスタンス
"""
return cls(
username=environment.DB_USERNAME,
password=environment.DB_PASSWORD,
host=environment.DB_HOST,
port=environment.DB_PORT,
schema=environment.DB_SCHEMA
)
@retry(
wait=wait_exponential(
multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT,
min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS,
max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS
),
stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT))
def connect(self):
"""
DBに接続します接続に失敗した場合リトライします
Raises:
DBException: 接続失敗
"""
self.__connection = self.__engine.connect()
def execute_select(self, select_query: str, parameters=None) -> list[dict]:
"""SELECTクエリを実行します。
Args:
select_query (str): SELECT文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
list[dict]: カラム名: 値の辞書リスト
"""
if self.__connection is None:
raise DBException('DBに接続していません')
try:
result = self.__connection.execute(text(select_query), parameters=parameters)
except Exception as e:
raise DBException(e)
result_rows = result.mappings().all()
return result_rows
def execute(self, query: str, parameters=None) -> CursorResult:
"""SQLクエリを実行します。
Args:
query (str): SQL文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
CursorResult: 取得結果
"""
if self.__connection is None:
raise DBException('DBに接続していません')
try:
result = self.__connection.execute(text(query), parameters=parameters)
except Exception as e:
raise DBException(e)
return result
def begin(self):
"""トランザクションを開始します。"""
if not self.__connection.in_transaction():
self.__connection.begin()
def commit(self):
"""トランザクションをコミットします"""
if self.__connection.in_transaction():
self.__connection.commit()
def rollback(self):
"""トランザクションをロールバックします"""
if self.__connection.in_transaction():
self.__connection.rollback()
def disconnect(self):
"""DB接続を切断します。"""
if self.__connection is not None:
self.__connection.close()
self.__connection = None

View File

@ -0,0 +1,14 @@
from tenacity import RetryError
class MeDaCaException(Exception):
pass
class DBException(MeDaCaException):
pass
class BatchOperationException(MeDaCaException):
pass
class MaxRetryExceededException(MeDaCaException, RetryError):
pass

View File

@ -0,0 +1,117 @@
from src.batch import jissekiaraigae
from src.batch.batch_functions import get_syor_date
from src.error.exceptions import BatchOperationException
from src.logging.get_logger import get_logger
logger = get_logger('日次処理コントロール') # ここを処理IDとかにするといいかもしれない
def batch_process():
try:
logger.info('日次ジョブ:開始')
# logger.info('S3マウント状況確認')
# logger.error('S3マウントエラー:DWH異常終了')
# logger.error('S3マウントエラー:BIO異常終了')
# logger.info('データベース接続') # 実際には、ここでつなげているわけではないので、いらないと思う
# logger.error('データベース接続エラー(異常終了)') # 検査例外を捕まえて、共通的に出せばいいと思う
try:
logger.info('処理日取得')
syor_date = get_syor_date()
except BatchOperationException as e:
logger.error(f'処理日取得エラー(異常終了){e}')
logger.info(f'処理日={syor_date}')
# 休日判定ファイルを読み込み
logger.info('休日判定処理')
if True: # 休日判定
logger.info('非営業日かつ月、火、水以外です。') # 分岐
try:
# 処理中フラグ判定。ここでdumpのフラグも見る
logger.info('処理フラグ更新中')
logger.info('処理フラグ更新終了')
except BatchOperationException as e:
logger.error(f'処理フラグ更新処理エラー(異常終了){e}')
logger.info('日次ジョブ:終了(正常終了)')
try:
logger.info('日次ジョブ処理中判定')
if True: # 処理中判定
logger.error('処理フラグ処理中(異常終了)')
logger.info('処理中フラグの更新:起動')
logger.info('処理中フラグの更新:終了')
except BatchOperationException as e:
logger.error(f'日次ジョブ処理中エラー(異常終了){e}')
# ↓ここから、やらない↓
# logger.info('処理前バックアップ実行')
# logger.info('処理前バックアップ:起動')
# logger.info('処理前バックアップ:終了')
# logger.error('処理前バックアップ処理エラー(異常終了)', $ex->getMessage())
# ↑ここまで↑
logger.info('卸在庫データ取込判定')
if True: # 卸在庫日判定
logger.info('卸在庫データ取込日です')
logger.debug('卸在庫データファイル名: {_PATH_OROSHI_ZAIKO}')
if True: # 卸在庫ファイル存在確認なければ異常終了
logger.error('卸在庫データ存在確認エラー(異常終了)')
logger.info('卸在庫データ存在確認:取込処理開始')
logger.debug('卸在庫データファイル名作成: {read_filename}')
logger.debug('ファイル移動OK{_MOVE_OROSHI_ZAIKO}') # S3からダウンロード
logger.debug('ファイル解凍OK{sprintf(_ZIP_OROSHI_ZAIKO, $read_filename)}') # gunzip -fなので、gzipを使う
logger.debug('ファイル名変更OK {sprintf(_RENAME_OROSHI_ZAIKO, $read_filename)}') #S3にアップロード
try:
logger.info('卸在庫データ取込:起動')
logger.info('卸在庫データ取込:終了')
except BatchOperationException as e:
logger.error(f'卸在庫データ取込処理エラー(異常終了){e}')
logger.info('日次処理(アルトマーク)')
if True: # アルトマークなければ
logger.info('日次処理(アルトマーク)実行対象日でない為未実行')
try:
logger.info('アルトマーク取込:起動')
logger.info('アルトマーク取込:終了')
except BatchOperationException as e:
logger.error(f'アルトマーク取込処理エラー(異常終了){e}')
if True: # 休日判定
try:
logger.info('メルク施設マスタ作成')
logger.info('メルク施設マスタ作成終了')
except BatchOperationException as e:
logger.error(f'メルク施設マスタ作成エラー(異常終了){e}')
try:
logger.info('DCF施設統合マスタ作成')
logger.info('DCF施設統合マスタ作成終了')
except BatchOperationException as e:
logger.error(f'DCF施設統合マスタ作成エラー異常終了{e}')
# if False: # ($holiday === FALSE) とにかく毎日動かす
logger.info('V実消化連携データ存在確認')
if True:
logger.error('V実消化連携データ存在確認異常終了')
logger.info('日次処理V実消化')
try:
logger.info('V実消化取込起動')
logger.info('V実消化取込終了')
except BatchOperationException as e:
logger.exception(f'V実消化取込処理エラー異常終了{e}')
logger.info('日次処理(実績更新)')
try:
logger.info('実績更新:起動')
jissekiaraigae.batch_process()
logger.info('実績更新:終了')
except BatchOperationException as e:
logger.exception(f'実績更新処理エラー(異常終了){e}')
# ↓以下、ファイルのバックアップ以外はやらない↓
# logger.info('処理後バックアップ実行')
# logger.info('処理後バックアップ:起動')
# logger.info('処理後バックアップ:終了')
# logger.error('処理後バックアップ処理エラー(異常終了)', $ex->getMessage())
# ↑ここまでやらない↑
logger.info('処理中フラグの更新:非処理中')
try:
logger.info('処理中フラグの更新:起動')
logger.info('処理中フラグの更新:終了')
except BatchOperationException as e:
logger.exception(f'処理中フラグ更新エラー(異常終了){e}')
logger.info('ワークディレクトリクリーニング')
logger.info('日次ジョブ:終了(正常終了)')
return 0
except Exception as e:
raise e

View File

@ -0,0 +1,29 @@
import logging
from src.system_var.environment import LOG_LEVEL
# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する
for name in ["boto3", "botocore", "s3transfer", "urllib3"]:
logging.getLogger(name).setLevel(logging.WARNING)
# 共通ロガー
def get_logger(log_name):
logger = logging.getLogger(log_name)
level = logging.getLevelName(LOG_LEVEL)
if not isinstance(level, int):
level = logging.INFO
logger.setLevel(level)
if not logger.hasHandlers():
handler = logging.StreamHandler()
logger.addHandler(handler)
formatter = logging.Formatter(
'%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s',
'%Y-%m-%d %H:%M:%S'
)
for handler in logger.handlers:
handler.setFormatter(formatter)
return logger

View File

@ -0,0 +1,13 @@
import os
DB_HOST = os.environ['DB_HOST']
DB_PORT = int(os.environ['DB_PORT'])
DB_USERNAME = os.environ['DB_USERNAME']
DB_PASSWORD = os.environ['DB_PASSWORD']
DB_SCHEMA = os.environ['DB_SCHEMA']
LOG_LEVEL = os.environ['LOG_LEVEL']
DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4))
DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5))
DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5))
DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50))

View File

@ -0,0 +1,22 @@
import time
class ElapsedTime:
"""処理実行時間計測クラス"""
def __init__(self) -> None:
"""このクラスの新たなインスタンスを初期化します。"""
self.__start = time.perf_counter()
@property
def of(self):
"""インスタンス化してからの経過時間をhh:mm:ssの形式にフォーマットして返す
Returns:
str: 時分秒形式の経過時間
"""
elapsed_time = time.perf_counter() - self.__start
h, rem = divmod(elapsed_time, 3600)
m, s = divmod(rem, 60)
h_str = f'{h:02.0f} hour ' if h > 0.0 else ''
m_str = f'{m:02.0f} min ' if m > 0.0 else ''
s_str = f'{s:06.02f} sec' if s > 0.0 else ''
return f"{h_str}{m_str}{s_str}"

View File

@ -0,0 +1,12 @@
tests/*
.coverage
.env
.env.example
.report/*
.vscode/*
.pytest_cache/*
*/__pychache__/*
Dockerfile
pytest.ini
README.md
*.sql

View File

@ -0,0 +1,25 @@
#AWS
##Cognito
COGNITO_AUTH_DOMAIN=*******
COGNITO_IDENTITY_PROVIDER=*********
COGNITO_REDIRECT_URI=*****************
COGNITO_USER_POOL_ID=********************
COGNITO_CLIENT_ID=**********************
COGNITO_CLIENT_SECRET=******************************
##DynamoDB
SESSION_TABLE_NAME=***********************
##S3
BIO_ACCESS_LOG_BUCKET=*******************
#MySQL
DB_HOST=************
DB_PORT=************
DB_USERNAME=************
DB_PASSWORD=************
DB_SCHEMA=src05
#実装の設定(task_settingsに設定するパラメータ)
AWS_REGION=ap-northeast-1
AUTHORIZE_ENDPOINT=oauth2/authorize
TOKEN_ENDPOINT=oauth2/token
BIO_SEARCH_RESULT_MAX_COUNT=35000
SESSION_EXPIRE_MINUTE=20

6
ecs/jskult-webapp/.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
__pycache__
.vscode/settings.json
.env
src/data/*
!src/data/BioData_template.xlsx

17
ecs/jskult-webapp/.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,17 @@
{
// IntelliSense 使
//
// : https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch AP Server(FastAPI)",
"type": "python",
"request": "launch",
"module": "uvicorn",
"args": ["src.main:app","--reload", "--no-server-header"],
"justMyCode": true,
"envFile": "${workspaceFolder}/.env"
}
]
}

View File

@ -0,0 +1,18 @@
{
"[python]": {
"editor.defaultFormatter": null,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
//
"python.defaultInterpreterPath": "<pythonインタプリターのパス>",
"python.linting.lintOnSave": true,
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"python.linting.flake8Args": ["--max-line-length=120"],
"python.formatting.provider": "autopep8",
"python.formatting.autopep8Args": ["--max-line-length", "120"]
}

View File

@ -0,0 +1,19 @@
FROM python:3.9
ENV TZ="Asia/Tokyo"
WORKDIR /usr/src/app
COPY Pipfile Pipfile.lock ./
RUN \
apt update -y && \
# パッケージのセキュリティアップデートのみを適用するコマンド
apt install -y unattended-upgrades && \
unattended-upgrades && \
pip install --upgrade pip wheel setuptools && \
pip install pipenv --no-cache-dir && \
pipenv install --system --deploy && \
pip uninstall -y pipenv virtualenv-clone virtualenv
COPY src ./src
CMD ["gunicorn", "src.main:app", "-w", "4", "-k" ,"uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:80"]

34
ecs/jskult-webapp/Pipfile Normal file
View File

@ -0,0 +1,34 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[scripts]
app = "uvicorn src.main:app --reload --no-server-header"
[packages]
fastapi = "*"
uvicorn = "*"
gunicorn = "*"
boto3 = "*"
jinja2 = "*"
pyjwt = "*"
"pyjwt[crypto]" = "*"
requests = "*"
python-multipart = "*"
pynamodb = "*"
PyMySQL = "*"
pandas = "*"
openpyxl = "*"
xlrd = "*"
sqlalchemy = "*"
[dev-packages]
autopep8 = "*"
flake8 = "*"
[requires]
python_version = "3.9"
[pipenv]
allow_prereleases = true

765
ecs/jskult-webapp/Pipfile.lock generated Normal file
View File

@ -0,0 +1,765 @@
{
"_meta": {
"hash": {
"sha256": "d78a6bf1a96aa14c45431185961cae6d54ca1da8ea0319e1976bad4c2bebd673"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"anyio": {
"hashes": [
"sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421",
"sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"
],
"markers": "python_full_version >= '3.6.2'",
"version": "==3.6.2"
},
"boto3": {
"hashes": [
"sha256:278d896e9090a976f41ec68da5c572bc4e5b7cb1e515f1898fee8cb2fadfb50d",
"sha256:3ce2225a61832d69831d669d912424ea3863268ca1cfa2a82203bb90952acefa"
],
"index": "pypi",
"version": "==1.26.91"
},
"botocore": {
"hashes": [
"sha256:4ed6a488aee1b42367eace71f7d0993dda05b02eebd7dcdd78db5c9ce3d80da5",
"sha256:a8a800a2a945da807758cace539fc5b5ec1d5082ce363799d3a3870c2c4ed6fc"
],
"markers": "python_version >= '3.7'",
"version": "==1.29.91"
},
"certifi": {
"hashes": [
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
],
"markers": "python_version >= '3.6'",
"version": "==2022.12.7"
},
"cffi": {
"hashes": [
"sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5",
"sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef",
"sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104",
"sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426",
"sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405",
"sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375",
"sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a",
"sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e",
"sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc",
"sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf",
"sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185",
"sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497",
"sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3",
"sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35",
"sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c",
"sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83",
"sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21",
"sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca",
"sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984",
"sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac",
"sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd",
"sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee",
"sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a",
"sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2",
"sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192",
"sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7",
"sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585",
"sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f",
"sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e",
"sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27",
"sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b",
"sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e",
"sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e",
"sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d",
"sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c",
"sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415",
"sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82",
"sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02",
"sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314",
"sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325",
"sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c",
"sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3",
"sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914",
"sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045",
"sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d",
"sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9",
"sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5",
"sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2",
"sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c",
"sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3",
"sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2",
"sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8",
"sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d",
"sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d",
"sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9",
"sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162",
"sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76",
"sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4",
"sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e",
"sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9",
"sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6",
"sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b",
"sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01",
"sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"
],
"version": "==1.15.1"
},
"charset-normalizer": {
"hashes": [
"sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6",
"sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1",
"sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e",
"sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373",
"sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62",
"sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230",
"sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be",
"sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c",
"sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0",
"sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448",
"sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f",
"sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649",
"sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d",
"sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0",
"sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706",
"sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a",
"sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59",
"sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23",
"sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5",
"sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb",
"sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e",
"sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e",
"sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c",
"sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28",
"sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d",
"sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41",
"sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974",
"sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce",
"sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f",
"sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1",
"sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d",
"sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8",
"sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017",
"sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31",
"sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7",
"sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8",
"sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e",
"sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14",
"sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd",
"sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d",
"sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795",
"sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b",
"sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b",
"sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b",
"sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203",
"sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f",
"sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19",
"sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1",
"sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a",
"sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac",
"sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9",
"sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0",
"sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137",
"sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f",
"sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6",
"sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5",
"sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909",
"sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f",
"sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0",
"sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324",
"sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755",
"sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb",
"sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854",
"sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c",
"sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60",
"sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84",
"sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0",
"sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b",
"sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1",
"sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531",
"sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1",
"sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11",
"sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326",
"sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df",
"sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"
],
"markers": "python_version >= '3.7'",
"version": "==3.1.0"
},
"click": {
"hashes": [
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
],
"markers": "python_version >= '3.7'",
"version": "==8.1.3"
},
"cryptography": {
"hashes": [
"sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1",
"sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7",
"sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06",
"sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84",
"sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915",
"sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074",
"sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5",
"sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3",
"sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9",
"sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3",
"sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011",
"sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536",
"sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a",
"sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f",
"sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480",
"sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac",
"sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0",
"sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108",
"sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828",
"sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354",
"sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612",
"sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3",
"sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97"
],
"version": "==39.0.2"
},
"et-xmlfile": {
"hashes": [
"sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c",
"sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"
],
"markers": "python_version >= '3.6'",
"version": "==1.1.0"
},
"fastapi": {
"hashes": [
"sha256:451387550c2d25a972193f22e408a82e75a8e7867c834a03076704fe20df3256",
"sha256:4a75936dbf9eb74be5eb0d41a793adefe9f3fc6ba66dbdabd160120fd3c2d9cd"
],
"index": "pypi",
"version": "==0.94.1"
},
"greenlet": {
"hashes": [
"sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a",
"sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a",
"sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43",
"sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33",
"sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8",
"sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088",
"sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca",
"sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343",
"sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645",
"sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db",
"sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df",
"sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3",
"sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86",
"sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2",
"sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a",
"sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf",
"sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7",
"sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394",
"sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40",
"sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3",
"sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6",
"sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74",
"sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0",
"sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3",
"sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91",
"sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5",
"sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9",
"sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8",
"sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b",
"sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6",
"sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb",
"sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73",
"sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b",
"sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df",
"sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9",
"sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f",
"sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0",
"sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857",
"sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a",
"sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249",
"sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30",
"sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292",
"sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b",
"sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d",
"sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b",
"sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c",
"sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca",
"sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7",
"sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75",
"sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae",
"sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b",
"sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470",
"sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564",
"sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9",
"sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099",
"sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0",
"sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5",
"sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19",
"sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1",
"sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"
],
"markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))",
"version": "==2.0.2"
},
"gunicorn": {
"hashes": [
"sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e",
"sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"
],
"index": "pypi",
"version": "==20.1.0"
},
"h11": {
"hashes": [
"sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d",
"sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"
],
"markers": "python_version >= '3.7'",
"version": "==0.14.0"
},
"idna": {
"hashes": [
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
],
"markers": "python_version >= '3.5'",
"version": "==3.4"
},
"jinja2": {
"hashes": [
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
],
"index": "pypi",
"version": "==3.1.2"
},
"jmespath": {
"hashes": [
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
],
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
"markupsafe": {
"hashes": [
"sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed",
"sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc",
"sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2",
"sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460",
"sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7",
"sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0",
"sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1",
"sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa",
"sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03",
"sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323",
"sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65",
"sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013",
"sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036",
"sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f",
"sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4",
"sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419",
"sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2",
"sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619",
"sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a",
"sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a",
"sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd",
"sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7",
"sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666",
"sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65",
"sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859",
"sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625",
"sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff",
"sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156",
"sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd",
"sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba",
"sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f",
"sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1",
"sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094",
"sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a",
"sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513",
"sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed",
"sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d",
"sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3",
"sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147",
"sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c",
"sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603",
"sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601",
"sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a",
"sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1",
"sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d",
"sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3",
"sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54",
"sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2",
"sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6",
"sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"
],
"markers": "python_version >= '3.7'",
"version": "==2.1.2"
},
"numpy": {
"hashes": [
"sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22",
"sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f",
"sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9",
"sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96",
"sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0",
"sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a",
"sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281",
"sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04",
"sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468",
"sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253",
"sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756",
"sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a",
"sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb",
"sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d",
"sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0",
"sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910",
"sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978",
"sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5",
"sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f",
"sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a",
"sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5",
"sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2",
"sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d",
"sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95",
"sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5",
"sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d",
"sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780",
"sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"
],
"markers": "python_version < '3.10'",
"version": "==1.24.2"
},
"openpyxl": {
"hashes": [
"sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184",
"sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"
],
"index": "pypi",
"version": "==3.1.2"
},
"pandas": {
"hashes": [
"sha256:008aa9843e92753d1345353e643c51017d8a9e303041db3165b683fc16a4d380",
"sha256:1f060ae468cb24e1ab42c6344b097375b24a902d3cefb5524f93ef0cd0db5f4b",
"sha256:2379d66055592480aab24cda5b1543539302e0f85e9a33538e9e4fd309b3063e",
"sha256:26a507e14dc9a5ef29239b85d0ef5f01a7e308b88781b451a415d9d15e2d1a61",
"sha256:314bc00a0575151d3ec3124af23bf2ef7533b0e160fb138007a4ef1b3c6a0e63",
"sha256:3935c394e1b10d5c311bd9378018a468283adfe8469dc8084e21d55ca06be979",
"sha256:47f116fcb3aa533ab6661ca391136a643e25d1387dae989ed3e5b9248b98e2e9",
"sha256:4e99adf0a3b4e040fad8823567b52eacfd48db50d11024244a60197430ec74b8",
"sha256:67a5251a821b5af1c5aefe5a610a7758fae04693434fb98b2ebad10349cd727a",
"sha256:7bb2d670c1f7de9bcef0986ae9f832fbd99acc43db1d5fe22f2f06bda8a67d43",
"sha256:7fc7c85fcf27726633751d064f4d115dbccb202b0b6ea2909b6d89ca071115e3",
"sha256:8010e4c988c2c2ed1f5763a6e579448a13a7c87b810400124bb872121c9ca3f9",
"sha256:867fd5c3325c302e8feaaa7ec2d99c224be38551d8a9e1ae5d15be7e04424172",
"sha256:8cb4789c8b1f361d7b07a25002e871546b108519af9c176f8a5ca66316c09d90",
"sha256:8ce8603f8cf07044458914b81bb7445b6cc31d381657e0fac21b3eee40f404d0",
"sha256:adc1e91f282426d37830837f108747f0628e7635b1e83b2401b4f7e2a0068a82",
"sha256:b72ba4e9553645c0bfd688a4e89efe9694fb2936adb5c6295d31626233cb674a",
"sha256:c3c3be69e186d12a94004b0c76bb390e26b48e4b444f3adc86d2cf6506c71d99",
"sha256:cf960fc1f2545114b9ed1a0f025d6de63c891df31640e454e333e3b38504d36b",
"sha256:dc45eb7f23c92e0aa5278bb210fb30136e6e0b760636cf18874cdf2d6448df0f",
"sha256:e5ebb19a66d8c4a4563e6cb628a23ee6898dc50e5dfe8b73c692cd7ea81def0a",
"sha256:e817d97597be5c21b1a66cbecadd0d0242482b72f6f5b60129fce5cec329e274",
"sha256:e829b927b156f85432390580d8799dfee59db0be3954235cf5f5df8a42eaaacd",
"sha256:ebc301fb34185275d9ad57838f533d5413a02b434174d1be89785141f785b226",
"sha256:f082e075aeac904db0e69d8b8acc1d610362e3d823ace3af029622b24b105900"
],
"index": "pypi",
"version": "==2.0.0rc0"
},
"pycparser": {
"hashes": [
"sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
"sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
],
"version": "==2.21"
},
"pydantic": {
"hashes": [
"sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62",
"sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35",
"sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832",
"sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a",
"sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06",
"sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4",
"sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6",
"sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7",
"sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f",
"sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70",
"sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186",
"sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd",
"sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d",
"sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c",
"sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f",
"sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65",
"sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc",
"sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2",
"sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d",
"sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a",
"sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7",
"sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb",
"sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb",
"sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77",
"sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7",
"sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160",
"sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0",
"sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4",
"sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d",
"sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084",
"sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b",
"sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd",
"sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d",
"sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2",
"sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31",
"sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"
],
"markers": "python_version >= '3.7'",
"version": "==1.10.6"
},
"pyjwt": {
"extras": [
"crypto"
],
"hashes": [
"sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd",
"sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"
],
"index": "pypi",
"version": "==2.6.0"
},
"pymysql": {
"hashes": [
"sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641",
"sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"
],
"index": "pypi",
"version": "==1.0.2"
},
"pynamodb": {
"hashes": [
"sha256:3c4d10867d59e6d7a2b54ee4ae213f1021d6f50ff93145e3909784bfc2b7560e",
"sha256:e09c39880560e10251778185b3d0c7a97ee8f42ab363a940c674e9330b61bf9d"
],
"index": "pypi",
"version": "==5.4.1"
},
"python-dateutil": {
"hashes": [
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.2"
},
"python-multipart": {
"hashes": [
"sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132",
"sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"
],
"index": "pypi",
"version": "==0.0.6"
},
"pytz": {
"hashes": [
"sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0",
"sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"
],
"version": "==2022.7.1"
},
"requests": {
"hashes": [
"sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa",
"sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"
],
"index": "pypi",
"version": "==2.28.2"
},
"s3transfer": {
"hashes": [
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
],
"markers": "python_version >= '3.7'",
"version": "==0.6.0"
},
"setuptools": {
"hashes": [
"sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077",
"sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"
],
"markers": "python_version >= '3.7'",
"version": "==67.6.0"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sniffio": {
"hashes": [
"sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101",
"sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"
],
"markers": "python_version >= '3.7'",
"version": "==1.3.0"
},
"sqlalchemy": {
"hashes": [
"sha256:1df00f280fcf7628379c6838d47ac6abd2319848cb02984af313de9243994db8",
"sha256:1fd154847f2c77128e16757e3fd2028151aa8208dd3b9a5978918ea786a15312",
"sha256:20f36bff3b6c9fa94e40114fda4dc5048d40fd665390f5547b456a28e8059ee8",
"sha256:224c817e880359d344a462fc4dd94a233804f371aa290b024b6b976a2f5ade36",
"sha256:2ad44f45526411bebbf427cf858955a35f3a6bfd7db8f4314b12da4c0d1a4fd2",
"sha256:2c4c64f321080c83a3f0eed11cc9b73fe2a574f6b8339c402861274165c24cf6",
"sha256:3625a52fae744cff6f9beb6ed0775468b9eb7e6e8f6730676dfc49aa77d98b4e",
"sha256:3be54b3825512b3de5698ae04bf4aad6ea60442ac0f6b91ee4b8fa4db5c2dccd",
"sha256:4100c80070a66b042f1010b29b29a88d1d151c27a5e522c95ec07518b361a7a3",
"sha256:47e96be3e8c9c0f2c71ec87599be4bb8409d61841b66964a36b2447bec510b3b",
"sha256:483712fce53e2f7ec95ed7d106cd463f9fc122c28a7df4aaf2bc873d0d2a901f",
"sha256:48824b989a0e4340cd099dd4539702ddb1a5ce449f8a7355124e40a4935a95fa",
"sha256:4d653962da384a1d99795dbd8aac4a7516071b2f2984ed2aa25545fae670b808",
"sha256:5b067b2eaf3d97a49f3f6217981efa7b45d5726c2142f103712b020dd250fd98",
"sha256:5c35175b74cbcfe9af077bd13e87cfab13239e075c0e1e920095082f9377f0ed",
"sha256:61abff42e44e5daf17372cb8baa90e970dc647fc5f747e2caa9f9768acf17be8",
"sha256:6987f658389ad8bb6257db91551e7fde3e904974eef6f323856260907ef311d7",
"sha256:709f1ecb5dcea59f36fa0f485e09e41ff313b2d62c83a6f99b36870b0d6e42fa",
"sha256:7635cd38e3ea8522729b14451157104fce2117c44e7ba6a14684ed153d71b567",
"sha256:778db814cc21eff200c8bd42b4ffe976fa3378d10fb84d2c164d3c6a30bb38ee",
"sha256:81d4fc8f5c966677a3a2f39eb8e496442269d8c7d285b28145f7745fcc089d63",
"sha256:82691d3539023c3cee5ae055c47bf873728cd6b33bfaa7b916bea5a99b92f700",
"sha256:8ef7c56c74f4420b2c4a148d2531ba7f99b946cbf438a2bbcb2435fb4938a08d",
"sha256:9310666251385e4374c6f0bae6d69e62bc422021298ceb8669bf6ff56957ff37",
"sha256:ac6274dd530b684cca8cbb774e348afac6846f15d1694a56954413be6e2e8dcd",
"sha256:b7be0e6a4061d28b66ca4b4eb24558dd8c6386d3bcd2d6d7ef247be27cf1281b",
"sha256:bea2c1341abe9bc6f30071b8ada1a3c44f24ec0fe1b9418e9c1112ed32057c9e",
"sha256:bfcadfb8f0a9d26a76a5e2488cedd2e7cf8e70fe76d58aeb1c85eb83b33cbc5c",
"sha256:bfce790746d059af6d0bc68b578ba20d50a63c71a3db16edce7aa8eccdd73796",
"sha256:bfde1d7cf8b9aa6bbd0d53946cd508d76db7689afd442e2289642cdc8908b7b7",
"sha256:c343f0b546495f5d7a239c70bf50a99a48d7321c165b82afafa8483b9ebebf6e",
"sha256:c5d754665edea1ecdc79e3023659cb5594372e10776f3b3734d75c2c3ce95013",
"sha256:c76caced0c8e9129810895f71954c72f478e30bea7d0bba7130bade396be5048",
"sha256:ca147d9cde38b481085408e1d4277ee834cb88bcc31bc01933bc6513340071bc",
"sha256:d7bd001a40997f0c9a9ac10a57663a9397959966a5a365bb24a4d1a17aa60175",
"sha256:db91fe985f2264ab49b3450ab7e2a59c34f7eaf3bf283d6b9e2f9ee02b29e533",
"sha256:e0e270a4f5b42c67362d9c6af648cb86f6a00b20767553cfd734c914e1e2a5e0",
"sha256:ed714b864349704a7a719ec7199eec3f9cd15c190ecf6e10c34b5a0c549c5c18",
"sha256:edc16c8e24605d0a7925afaf99dbcbdc3f98a2cdda4622f1ea34482cb3b91940",
"sha256:f47709c98544384d390aed34046f0573df5725d22861c0cd0a5c151bc22eedff",
"sha256:ff10ad2d74a9a79c2984a2c709943e5362a1c898d8f3414815ea57515ae80c84"
],
"index": "pypi",
"version": "==2.0.6"
},
"starlette": {
"hashes": [
"sha256:41da799057ea8620e4667a3e69a5b1923ebd32b1819c8fa75634bbe8d8bea9bd",
"sha256:e87fce5d7cbdde34b76f0ac69013fd9d190d581d80681493016666e6f96c6d5e"
],
"markers": "python_version >= '3.7'",
"version": "==0.26.1"
},
"typing-extensions": {
"hashes": [
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
],
"markers": "python_version >= '3.7'",
"version": "==4.5.0"
},
"urllib3": {
"hashes": [
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305",
"sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.26.15"
},
"uvicorn": {
"hashes": [
"sha256:8635a388062222082f4b06225b867b74a7e4ef942124453d4d1d1a5cb3750932",
"sha256:e69e955cb621ae7b75f5590a814a4fcbfb14cb8f44a36dfe3c5c75ab8aee3ad5"
],
"index": "pypi",
"version": "==0.21.0"
},
"xlrd": {
"hashes": [
"sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd",
"sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"
],
"index": "pypi",
"version": "==2.0.1"
}
},
"develop": {
"autopep8": {
"hashes": [
"sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1",
"sha256:f9849cdd62108cb739dbcdbfb7fdcc9a30d1b63c4cc3e1c1f893b5360941b61c"
],
"index": "pypi",
"version": "==2.0.2"
},
"flake8": {
"hashes": [
"sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7",
"sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"
],
"index": "pypi",
"version": "==6.0.0"
},
"mccabe": {
"hashes": [
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
],
"markers": "python_version >= '3.6'",
"version": "==0.7.0"
},
"pycodestyle": {
"hashes": [
"sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053",
"sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"
],
"markers": "python_version >= '3.6'",
"version": "==2.10.0"
},
"pyflakes": {
"hashes": [
"sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf",
"sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"
],
"markers": "python_version >= '3.6'",
"version": "==3.0.1"
},
"tomli": {
"hashes": [
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
],
"markers": "python_version < '3.11'",
"version": "==2.0.1"
}
}
}

197
ecs/jskult-webapp/README.md Normal file
View File

@ -0,0 +1,197 @@
# 実消化&アルトマーク Webアプリケーション
## 概要
実消化&アルトマークのWebアプリケーション。
以下の機能を提供する
- アルトマークデータ照会(施設・医師)
- 生物由来データ照会
- マスターメンテナンス
## 環境情報
- Python 3.9
- MySQL 8.x
- FastAPI
- PythonのWebアプリケーションフレームワーク
- VSCode
## 環境構築
- Pythonの構築
- Merck_NewDWH開発2021のWiki、[Python環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照
- 「Pipenvの導入」までを行っておくこと
- 構築完了後、プロジェクト配下で以下のコマンドを実行し、Pythonの仮想環境を作成する
- `pipenv install --python <pyenvでインストールしたpythonバージョン>`
- この手順で出力される仮想環境のパスは、後述するVSCodeの設定手順で使用するため、控えておく
- MySQLの環境構築
- Windowsの場合、以下のリンクからダウンロードする
- <https://dev.mysql.com/downloads/installer/>
- Dockerを利用する場合、「newsdwh-tools」リポジトリのMySQL設定を使用すると便利
- 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると
- `docker-compose up -d`
- Dockerの構築手順は、[Dockerのセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと
- データを投入する
- 立ち上げたデータベースに「src05」スキーマを作成する
- [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysqlコマンドを使用して復元する
- `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql`
- 環境変数の設定
- 「.env.example」ファイルをコピーし、「.env」ファイルを作成する
- 環境変数を設定する。設定内容はPRJメンバーより共有を受けてください
- VSCodeの設定
- 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する
- 「python.defaultInterpreterPath」を、Pythonの構築手順で作成した仮想環境のパスに変更する
## 実行
- VSCode上で「F5」キーを押下すると、Webアプリケーションのサーバーが起動する
- 「<http://localhost:8000/maintlogin>」にアクセスし、ログイン画面が表示されていれば成功
## フォルダ構成
```text
.
├── Dockerfile -- Dockerイメージを作成するためのファイル
├── Pipfile -- Pythonモジュールの依存関係を管理するファイル
├── Pipfile.lock -- Pythonモジュールの依存関係バージョン固定用ファイル
├── README.md -- 当ファイル
└── src -- ソースコードの保管場所
├── aws -- AWSリソース操作用のコード
│   ├── aws_api_client.py
│   ├── cognito.py
│   └── s3.py
├── controller -- ルーティング層。基本的に1画面1つ
│   ├── bio.py
│   ├── bio_download.py
│   ├── login.py
│   ├── logout.py
│   └── menu.py
├── core -- APサーバーのコア設定。
│   └── tasks.py -- 起動・終了時に実行するタスクを設定。
├── data -- 生物由来照会のエクセルファイルテンプレート。
│   └── BioData_template.xlsx
├── db -- データベース関連処理。
│   ├── database.py -- データベース接続、クエリ発行の共通モジュール。
│   ├── sql_condition.py -- SQLの条件式を組み立てるためのモジュール
│   └── tasks.py -- coreに渡すタスク。サーバー起動時にDBとの接続モジュールの初期化、終了時にインスタンス破棄を行っている。
├── depends -- FastAPIの依存性注入(DI)使用するモジュールの置き場。Dependsで利用想定。
│   ├── auth.py -- セッション等の認証関連
│   ├── database.py -- リポジトリ層をコントローラーにDIするためのもの
│   └── services.py -- サービス層をコントローラーにDIするためのもの
├── error -- エラー処理関連のモジュール置き場
│   ├── exception_handler.py -- FastAPI内部でエラー発生時のハンドリング
│   └── exceptions.py -- カスタム例外クラス
├── main.py -- APサーバーのエントリーポイント。ここでルーターやハンドラーの登録を行う
├── model -- モデル層(MVCのM)
│   ├── db -- リポジトリから返されるDBレコードのモデル
│   │   ├── base_db_model.py
│   │   ├── bio_sales_view.py
│   │   ├── hdke_tbl.py
│   │   ├── pharmacy_product_master.py
│   │   ├── user_master.py
│   │   └── wholesaler_master.py
│   ├── jwt_token.py -- 認証用JWTトークンのモデル
│   ├── request -- 画面からのリクエストを受け付けるモデル
│   │   ├── bio.py
│   │   ├── bio_download.py
│   │   └── login.py
│   ├── session.py -- セッションデータのモデル
│   └── view -- ビューモデル。画面に対応したモデル。
│   ├── bio_disp_model.py
│   ├── bio_view_model.py
│   ├── logout_view_model.py
│   ├── mainte_login_view_model.py
│   ├── menu_view_model.py
│   └── user_view_model.py
├── repositories -- リポジトリ層。DB操作モジュール置き場。
│   ├── base_repository.py
│   ├── bio_sales_view_repository.py
│   ├── hdke_tbl_repository.py
│   ├── pharmacy_product_master_repository.py
│   ├── user_master_repository.py
│   └── wholesaler_master_repository.py
├── router -- コントローラー層の共通ルーティングの定義
│   └── session_router.py
├── services -- サービス層。ビジネスロジックはできる限りここに押し込む
│   ├── base_service.py
│   ├── batch_status_service.py
│   ├── bio_view_service.py
│   ├── login_service.py
│   └── session_service.py
├── static -- 静的ファイルの配信ルートディレクトリ
│   ├── css
│   │   ├── bioStyle.css
│   │   ├── datepicker.css
│   │   ├── menuStyle.css
│   │   └── pagenation.css
│   ├── function
│   │   └── businessLogicScript.js
│   ├── img
│   │   ├── icon_modal_confirm.png
│   │   └── icon_modal_error.png
│   ├── lib
│   │   └── fixed_midashi.js
│   ├── sample.css
│   └── sample.js
├── system_var -- システム変数
│   ├── constants.py -- 定数
│   └── environment.py -- 環境変数
├── templates -- ビューテンプレートエンジンの格納場所(Jinja2)
│   ├── _header.html -- 共通ヘッダー
│   ├── _modal.html -- モーダルの部品
│   ├── bioSearchList.html
│   ├── logout.html
│   ├── maintlogin.html
│   ├── menu.html
└── util -- ユーティリティ関数置き場
├── sanitize.py -- モデルクラスのサニタイズを行うデコレータ
└── string_util.py -- 文字列操作関連のユーティリティ
```
## (参考)ファイルの追いかけ方
- APサーバーそのものは「src/main」にある。
- ルーター、例外処理ハンドラ、開始終了タスクの設定、静的ファイルディレクトリのマウントを行っている
- URLパスに対する操作の実装は、「controller」フォルダを見る
- `@router.xxx``xxx`の部分がHTTPメソッドに相当する。このデコレータが付与された関数が、HTTPメソッドを処理するパスオペレーション関数となる
- パスオペレーション関数の引数には、リクエストで受け取るパラメータと、その関数内で使用できる依存関係を注入できる
- Request型と、Response型の引数は、その名の通り。
- str型, int型などの引数を指定した場合、その引数はクエリストリングを意味する
- Dependsで初期値が設定される引数は、Depends関数に渡した関数が処理されてから代入される
- たとえば、`get_service`関数にサービスクラスの型を渡すと、get_service関数でサービスクラスのインスタンスを作成して返してくれる
- サービスクラスはリポジトリクラスに依存しているので、自分でインスタンスを組み立てる手間が省ける
- formやリクエストボディのJSONを受け取る場合、リクエスト用のモデルクラスに「as_form」や「as_body」などの関数を実装し、リクエストを受け取れるようにする
- ビジネスロジックは基本的にサービスクラスに押し込む。コントローラーではそのサービスクラスをDependsで依存して利用すること
- ビジネスロジックに相当するサービスクラスは「services」フォルダに格納する。共通実装は以下。
- REPOSITORIES定数に、依存するリポジトリクラスを辞書形式で指定する
- CLIENTS定数に、依存するAWS APIクライアントクラスを辞書形式で指定する
- `__init__`コンストラクタ内で、2つの定数に指定したキーに紐付いたインスタンスが渡ってくるため、インスタンス変数として登録する
- あとは、ビジネスロジックにあたる関数を生やしていく
- DBへのアクセスを行うリポジトリクラスは「repositories」フォルダに格納する。
- SQL文を用意し、`_db`インスタンス変数のメソッドを利用してクエリを実行する。
- 必要に応じて条件設定をする。条件設定には`SQLCondition`クラスを使用する。
- `BioSalesViewRepository`のやり方が参考になる
- リポジトリクラスは、サービスクラスで利用するようにする(そのために、サービスクラス側に依存関係を書いている)
- モデルクラスは、「models」フォルダに格納する
- HTTPリクエスト用のモデルクラスは「request」フォルダへ
- ビュー表示用のモデルクラス(View Model)は「view」フォルダへ
- 画面への項目埋め込みや、非表示の制御を行うため、View Modelに値を詰めて、テンプレート側で操作するようにする
- DBから取得したレコードのモデルクラスは「db」フォルダへ
- 内部処理に利用するモデルクラスは「internal」フォルダへ
- 「static」フォルダは、静的ファイルの置き場所
- CSS, JS、画像ファイルを置く。
- 画面の細かな制御は「BusinessLogicScript.js」で行っている。現行からちょこちょこ変える必要がある。
- CSSも、現行は画面ごとに分かれているが、一つのベーススタイルにまとめたい
- 「templates」フォルダは、テンプレートエンジンを格納する
- 各画面1つのテンプレートエンジンを用意する
- 内部で使う変数は、コントローラーで「templates.TemplateResponse」」に詰めて渡す
- テンプレート内でincludeする用途のテンプレートは先頭に「_」をつける
- `_header.html`は、`<head>`タグ内に記載する部品。共通的に読み込むCSS等のファイルを指定する。
- PHPでは分岐などをベタ書きしているが、View Modelに宣言的な関数を用意して、可読性を向上させている。
- コントローラーの共通処理は、「router」フォルダ内のモジュールで実装している
- コントローラーのrouter変数が、`router.route_class = AfterSetCookieSessionRoute`となっている場合、レスポンス時、クッキーにセッションキーを登録する動きをする
- コントローラーのrouter変数が、`router.route_class = Authenticate`となっている場合、以下の動きをする
- リクエスト到達時にセッションの有無をチェックする
- レスポンス時、クッキーにセッションキーを登録する

View File

View File

View File

@ -0,0 +1,5 @@
from abc import ABCMeta
class AWSAPIClient(metaclass=ABCMeta):
pass

View File

@ -0,0 +1,25 @@
import boto3
from src.aws.aws_api_client import AWSAPIClient
from src.system_var import environment
class CognitoClient(AWSAPIClient):
def __init__(self) -> None:
self.__client = boto3.client('cognito-idp')
def login_by_user_password_flow(self, username: str, password: str, secret_hash: str):
auth_response = self.__client.admin_initiate_auth(
UserPoolId=environment.COGNITO_USER_POOL_ID,
ClientId=environment.COGNITO_CLIENT_ID,
AuthFlow='ADMIN_USER_PASSWORD_AUTH',
AuthParameters={
'USERNAME': username,
'PASSWORD': password,
'SECRET_HASH': secret_hash
},
)
authentication_result = auth_response['AuthenticationResult']
return authentication_result['IdToken'], authentication_result['RefreshToken'],

View File

@ -0,0 +1,32 @@
from urllib.parse import quote
import boto3
from src.aws.aws_api_client import AWSAPIClient
class S3Client(AWSAPIClient):
__s3_client = boto3.client('s3')
def upload_file(self, local_file_path: str, bucket_name: str, file_key: str):
self.__s3_client.upload_file(
local_file_path,
Bucket=bucket_name,
Key=file_key
)
def generate_presigned_url(self, bucket_name: str, file_key: str, download_filename: str=''):
# presigned_urlを生成
presigned_url = self.__s3_client.generate_presigned_url(
'get_object',
Params={
'Bucket': bucket_name,
'Key': file_key,
# 別ファイル名に変更するための仕掛け。Unicode文字はquoteでエスケープが必要
'ResponseContentDisposition': f'attachment; filename="{quote(download_filename)}"'
},
# 有効期限20分
ExpiresIn=1200
)
return presigned_url

View File

@ -0,0 +1,87 @@
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.exceptions import HTTPException
from starlette import status
from src.depends.services import get_service
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.view.bio_view_model import BioViewModel
from src.router.session_router import AuthenticatedRoute
from src.services.batch_status_service import BatchStatusService
from src.services.bio_view_service import BioViewService
from src.services.session_service import set_session
from src.system_var import constants
from src.templates import templates
router = APIRouter()
router.route_class = AuthenticatedRoute
#########################
# Views #
#########################
@router.get('/bio/BioSearchList')
def bio_view(
request: Request,
batch_status_service:BatchStatusService=Depends(get_service(BatchStatusService)),
bio_service: BioViewService=Depends(get_service(BioViewService))
):
session: UserSession = request.session
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BATCH_PROCESSING)
# 検索項目の取得
bio = bio_service.prepare_bio_view(session)
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'bioSearchList.html', {
'request': request,
'bio': bio,
},
headers={'session_key': session_key}
)
return templates_response
@router.post('/bio/BioSearchList')
def search_bio(
request: Request,
bio_form: Optional[BioModel] = Depends(BioModel.as_form),
bio_service: BioViewService=Depends(get_service(BioViewService)),
batch_status_service:BatchStatusService=Depends(get_service(BatchStatusService))
):
# error_log(date("Y/m/d H:i:s") . " [INFO] UserId:" . $UserId . "\r\n", 3, "$execLog");
session: UserSession = request.session
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BATCH_PROCESSING)
# 生物由来データを検索
bio_sales_view_data = bio_service.search_bio_data(bio_form)
# 検索項目などのデータを取得
bio: BioViewModel = bio_service.prepare_bio_view(session)
bio.bio_data = bio_sales_view_data
bio.form_data = bio_form
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
session_key = set_session(session)
templates_response = templates.TemplateResponse(
'bioSearchList.html', {
'request': request,
'bio': bio
},
headers={'session_key': session_key}
)
return templates_response

View File

@ -0,0 +1,121 @@
"""生物由来ファイルダウンロード APIRoute"""
from datetime import datetime
from typing import Union
from fastapi import APIRouter, Depends, HTTPException
from fastapi.exceptions import HTTPException
from fastapi.responses import JSONResponse
from starlette import status
from src.depends.auth import verify_session
from src.depends.services import get_service
from src.error.exceptions import DBException
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.request.bio_download import BioDownloadModel
from src.services.batch_status_service import BatchStatusService
from src.services.bio_view_service import BioViewService
from src.services.session_service import set_session
from src.system_var import constants
router = APIRouter()
#########################
# APIs #
#########################
@router.post('/api/bio/download')
async def download_bio_data(
search_param: BioModel=Depends(BioModel.as_body),
download_param: BioDownloadModel=Depends(BioDownloadModel.as_body),
bio_service: BioViewService = Depends(get_service(BioViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
session: Union[UserSession, None]=Depends(verify_session)
):
# 通常のビューとはルーティングの扱いを変えるために、個別のルーターで登録する
# error_log(date("Y/m/d H:i:s") . " [INFO] getBioData start" . "\r\n", 3, "$execLog");
# 改修後のパラメータを打ち出すようにする
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] param:szConditions=" . htmlspecialchars($_POST["szConditions"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] param:pageNum=" . htmlspecialchars($_POST["pageNum"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] szUser=" . htmlspecialchars($_POST["szUser"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] szfilename=" . htmlspecialchars($_POST["szfilename"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] extension=" . htmlspecialchars($_POST["extension"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] sql=" . htmlspecialchars($_POST["sql"], ENT_QUOTES) . "\r\n", 3, "$execLog");
# いらない error_log(date("Y/m/d H:i:s") . " [INFO] arrayPrepare=" . $_POST["arrayPrepare"] . "\r\n", 3, "$execLog");
# ファイル名に使用するタイムスタンプを初期化しておく
now = datetime.now()
if session is None:
return {'status': 'session_expired'}
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
return {'status': 'batch_processing'}
try:
# 生物由来データを検索
search_result_df = bio_service.search_download_bio_data(search_param)
except DBException as e:
# error_log(date("Y/m/d H:i:s") . " [ERROR] " . "\r\n", 3, "$execLog");
print('DB Error', e.args)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'db_error', 'message': e.args}
)
if search_result_df.size < 1:
# 検索結果が0件の場合、download_urlを返さない
print('Bio data not found')
return {'status': 'ok', 'download_url': None}
# ファイルに打ち出すカラムを抽出
extract_df = search_result_df[constants.BIO_EXTRACT_COLUMNS]
# 値を変換
# データ種別の正式名を設定
extract_df.loc[:, 'slip_org_kbn'] = extract_df['slip_org_kbn'].apply(lambda key: constants.SLIP_ORG_KBN_FULL_NAME.get(key))
# データ区分の区分の日本語名を設定
extract_df.loc[:, 'data_kbn'] = extract_df['data_kbn'].apply(lambda key: constants.DATA_KBN_JP_NAME.get(key))
# ロット番号エラーフラグの日本語名を設定
extract_df.loc[:, 'lot_no_err_flg'] = extract_df['lot_no_err_flg'].apply(lambda key: constants.LOT_NO_ERR_FLG_JP_NAME.get(key))
# 訂正前伝票管理番号がセットされているときのみ修正日時、修正者、エラー詳細種別をセット
extract_df.loc[:, 'ins_dt'] = extract_df['bef_slip_mgt_no'].apply(lambda bef_slip_mgt_no:extract_df['ins_dt'] if bef_slip_mgt_no is not None else '')
extract_df.loc[:, 'ins_usr'] = extract_df['bef_slip_mgt_no'].apply(lambda bef_slip_mgt_no:extract_df['ins_usr'] if bef_slip_mgt_no is not None else '')
# 種別によって出力を変える
local_file_path = ''
if download_param.kind == 'xlsx':
# error_log(date("Y/m/d H:i:s") . " [INFO] 今回はExcelファイルに出力する" . "\r\n", 3, "$execLog");
local_file_path = bio_service.write_excel_file(extract_df, download_param.user_id, timestamp=now)
elif download_param.kind == 'csv':
# error_log(date("Y/m/d H:i:s") . " [INFO] 今回はCSVファイルに出力する" . "\r\n", 3, "$execLog");
local_file_path = bio_service.write_csv_file(extract_df, download_param.user_id, header=constants.BIO_CSV_HEADER, timestamp=now)
# ローカルファイルからS3にアップロードし、ダウンロード用URLを取得する
try:
bio_service.upload_bio_data_file(local_file_path)
download_file_url = bio_service.generate_download_file_url(local_file_path, download_param.user_id, download_param.kind)
except Exception as e:
print('S3 access error', e.args)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail={'error': 'aws_error', 'message': e.args}
)
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
# クッキーも書き換え
json_response = JSONResponse(content={
'status': 'ok',
'download_url': download_file_url
})
json_response.set_cookie(
key='session',
value=session.session_key,
max_age=20*60,
secure=True,
httponly=True
)
return json_response

View File

@ -0,0 +1,10 @@
from fastapi import APIRouter
router = APIRouter()
#########################
# Views #
#########################
@router.get('/healthcheck')
def healthcheck():
return {'status': 'OK'}

View File

@ -0,0 +1,150 @@
import os.path as path
import secrets
import urllib.parse as parse
from typing import Union
from fastapi import APIRouter, Depends, HTTPException, Request, Response
from fastapi.responses import RedirectResponse
from starlette import status
from src.depends.auth import code_security
from src.depends.services import get_service
from src.error.exceptions import JWTTokenVerifyException, NotAuthorizeException
from src.model.internal.session import UserSession
from src.model.request.login import LoginModel
from src.model.view.mainte_login_view_model import MainteLoginViewModel
from src.router.session_router import AfterSetCookieSessionRoute
from src.services.login_service import LoginService
from src.services.session_service import set_session
from src.system_var import constants, environment
from src.templates import templates
router = APIRouter()
router.route_class = AfterSetCookieSessionRoute
#########################
# Views #
#########################
@router.get('/userlogin')
def login_user_redirect_view():
auth_query_string = parse.urlencode(
{
'response_type': 'code',
'identity_provider': environment.COGNITO_IDENTITY_PROVIDER,
'client_id': environment.COGNITO_CLIENT_ID,
'redirect_uri': environment.COGNITO_REDIRECT_URI
}
)
authorize_endpoint_url = f'{environment.COGNITO_AUTH_DOMAIN}/{environment.AUTHORIZE_ENDPOINT}?{auth_query_string}'
return RedirectResponse(url=authorize_endpoint_url, status_code=status.HTTP_303_SEE_OTHER)
@router.get('/maintlogin')
def login_maintenance_view(request: Request):
mainte_login = MainteLoginViewModel()
return templates.TemplateResponse(
'maintlogin.html',
{
'request': request,
'mainte_login': mainte_login
}
)
#########################
# APIs #
#########################
@router.post('/login')
def sso_authorize(
response: Response,
request: LoginModel = Depends(LoginModel.as_form),
login_service: LoginService = Depends(get_service(LoginService))
):
try:
jwt_token = login_service.login(request.username, request.password)
except NotAuthorizeException as e:
print(e)
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
except JWTTokenVerifyException as e:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED)
verified_token = jwt_token.verify_token()
# 普通の認証だと、`cognito:username`に入る。
user_id = verified_token.user_id
user_record = login_service.logged_in_user(user_id)
# ユーザーが有効ではない場合、ログアウトにリダイレクトする
if not user_record.is_enable_user():
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# メンテユーザーではない場合、ログアウトにリダイレクトする
if user_record is None or not user_record.is_maintenance_user():
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# CSRFトークンを生成
csrf_token = secrets.token_urlsafe(32)
# DynamoDBにトークンIDを設定する
session_model: UserSession = UserSession.new(
user_id=user_id,
id_token=verified_token.id_token,
refresh_token=verified_token.refresh_token,
csrf_token=csrf_token,
bio_flg=user_record.auth_flg1,
doc_flg=user_record.auth_flg2,
inst_flg=user_record.auth_flg3,
master_mainte_flg=user_record.auth_flg4,
user_flg=user_record.mntuser_flg
)
session_key = set_session(session_model)
response = RedirectResponse(
url='/menu',
status_code=status.HTTP_303_SEE_OTHER,
headers={'session_key': session_key}
)
return response
@router.get('/authorize')
def sso_authorize(
code:Union[str, None]=Depends(code_security),
login_service: LoginService=Depends(get_service(LoginService))
) -> Response:
if not code:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_NOT_LOGIN)
# トークン取得
jwt_token = login_service.login_with_security_code(code)
try:
# トークン検証
verified_token = jwt_token.verify_token()
except JWTTokenVerifyException as e:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED)
# トークンからユーザーIDを取得
user_id = verified_token.user_id
user_record = login_service.logged_in_user(user_id)
# ユーザーが有効ではない場合、ログアウトにリダイレクトする
if not user_record.is_enable_user():
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# Merckユーザーではない場合、ログアウトにリダイレクトする
if user_record is None or not user_record.is_groupware_user():
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR)
# CSRFトークンを生成
csrf_token = secrets.token_urlsafe(32)
# DynamoDBにトークンIDを設定する
session_model: UserSession = UserSession.new(
user_id=user_id,
id_token=verified_token.id_token,
refresh_token=verified_token.refresh_token,
csrf_token=csrf_token,
bio_flg=user_record.auth_flg1,
doc_flg=user_record.auth_flg2,
inst_flg=user_record.auth_flg3,
master_mainte_flg=user_record.auth_flg4,
user_flg=user_record.mntuser_flg
)
session_key = set_session(session_model)
response = RedirectResponse(
url='/menu',
status_code=status.HTTP_303_SEE_OTHER,
headers={'session_key': session_key}
)
return response

View File

@ -0,0 +1,41 @@
from typing import Optional, Union
from fastapi import APIRouter, Depends, Request
from fastapi.responses import HTMLResponse
from src.depends.auth import verify_session
from src.model.internal.session import UserSession
from src.model.view.logout_view_model import LogoutViewModel
from src.system_var import constants
from src.templates import templates
router = APIRouter()
#########################
# Views #
#########################
@router.get('/logout', response_class=HTMLResponse)
def logout_view(
request: Request,
reason: Optional[str] = None,
session: Union[UserSession, None]=Depends(verify_session)
):
redirect_to = '/userlogin'
link_text = 'MeDaCA機能メニューへ'
if session is not None and session.user_flg == '1':
redirect_to = '/maintlogin'
link_text = 'Login画面に戻る'
logout = LogoutViewModel()
logout.redirect_to = redirect_to
logout.reason = constants.LOGOUT_REASON_MESSAGE_MAP.get(reason, '')
logout.link_text = link_text
template_response = templates.TemplateResponse(
'logout.html',
{
'request': request,
'logout': logout,
}
)
# クッキーを削除
template_response.delete_cookie('session')
return template_response

View File

@ -0,0 +1,55 @@
from fastapi import APIRouter, Depends, Request
from fastapi.responses import HTMLResponse
from src.depends.services import get_service
from src.model.internal.session import UserSession
from src.model.view.menu_view_model import MenuViewModel
from src.model.view.user_view_model import UserViewModel
from src.router.session_router import AuthenticatedRoute
from src.services.batch_status_service import BatchStatusService
from src.services.session_service import set_session
from src.templates import templates
router = APIRouter()
router.route_class = AuthenticatedRoute
#########################
# Views #
#########################
@router.get('/menu', response_class=HTMLResponse)
def menu_view(
request: Request,
batch_status_service:BatchStatusService=Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# 日付マスターからバッチ情報を取得する
hdke_tbl_record = batch_status_service.hdke_table_record
batch_status = hdke_tbl_record.bch_actf
user = UserViewModel(
doc_flg=session.doc_flg,
inst_flg=session.inst_flg,
bio_flg=session.bio_flg,
master_mainte_flg=session.master_mainte_flg
)
menu = MenuViewModel(
batch_status=batch_status,
user_model=user
)
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
templates_response = templates.TemplateResponse(
'menu.html',
{
'request': request,
'menu': menu
},
headers={'session_key': session.session_key}
)
return templates_response

View File

View File

@ -0,0 +1,21 @@
"""FastAPIサーバーの起動・終了イベントのラッパー"""
from typing import Callable
from fastapi import FastAPI
from src.db.tasks import close_db, init_db
def create_start_app_handler(app: FastAPI) -> Callable:
def start_app() -> None:
init_db(app)
return start_app
def create_stop_app_handler(app: FastAPI) -> Callable:
def stop_app() -> None:
close_db(app)
return stop_app

Binary file not shown.

View File

View File

@ -0,0 +1,147 @@
from sqlalchemy import (Connection, CursorResult, Engine, QueuePool,
create_engine, text)
from sqlalchemy.engine.create import create_engine
from sqlalchemy.engine.url import URL
from src.error.exceptions import DBException
from src.system_var import environment
class Database:
"""データベース操作クラス"""
__connection: Connection = None
__engine: Engine = None
__host: str = None
__port: str = None
__username: str = None
__password: str = None
__schema: str = None
__connection_string:str = None
def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None:
"""このクラスの新たなインスタンスを初期化します
Args:
username (str): DBユーザー名
password (str): DBパスワード
host (str): DBホスト名
port (int): DBポート
schema (str): DBスキーマ名
"""
self.__username = username
self.__password = password
self.__host = host
self.__port = int(port)
self.__schema = schema
self.__connection_string = URL.create(
drivername='mysql+pymysql',
username=self.__username,
password=self.__password,
host=self.__host,
port=self.__port,
database=self.__schema,
query={"charset": "utf8mb4"}
)
self.__engine = create_engine(
self.__connection_string,
pool_timeout=5,
poolclass=QueuePool,
isolation_level="AUTOCOMMIT"
)
@classmethod
def get_instance(cls):
"""インスタンスを取得します
Returns:
Database: DB操作クラスインスタンス
"""
return cls(
username=environment.DB_USERNAME,
password=environment.DB_PASSWORD,
host=environment.DB_HOST,
port=environment.DB_PORT,
schema=environment.DB_SCHEMA
)
@property
def connection(self):
"""
DBの接続を返します
"""
return self.__connection
def connect(self):
"""
DBに接続します接続に失敗した場合リトライします
Raises:
DBException: 接続失敗
"""
self.__connection = self.__engine.connect()
def execute_select(self, select_query: str, parameters=None) -> list[dict]:
"""SELECTクエリを実行します。
Args:
select_query (str): SELECT文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
list[dict]: カラム名: 値の辞書リスト
"""
if self.__connection is None:
raise DBException('DBに接続していません')
try:
result = self.__connection.execute(text(select_query), parameters=parameters)
except Exception as e:
raise DBException(e)
result_rows = result.mappings().all()
return result_rows
def execute(self, query: str, parameters=None) -> CursorResult:
"""SQLクエリを実行します。
Args:
query (str): SQL文
parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None.
Raises:
DBException: DBエラー
Returns:
CursorResult: 取得結果
"""
if self.__connection is None:
raise DBException('DBに接続していません')
try:
result = self.__connection.execute(text(query), parameters=parameters)
except Exception as e:
raise DBException(e)
return result
def begin(self):
"""トランザクションを開始します。"""
if not self.__connection.in_transaction():
self.__connection.begin()
def commit(self):
"""トランザクションをコミットします"""
if self.__connection.in_transaction():
self.__connection.commit()
def rollback(self):
"""トランザクションをロールバックします"""
if self.__connection.in_transaction():
self.__connection.rollback()
def disconnect(self):
"""DB接続を切断します。"""
if self.__connection is not None:
self.__connection.close()
self.__connection = None

View File

@ -0,0 +1,35 @@
class SQLCondition:
column: str
operator: str
param: str
literal: bool
def __init__(self, column: str, operator: str, param: str, literal=False) -> None:
"""
Args:
column (str): カラム名
operator (str): 比較演算子
param (str): パラメータ(プレースホルダーかリテラル値か)
literal (bool, optional): リテラル値を埋め込むかどうか
画面から渡ってきた値を使うとSQLインジェクションの危険性があるため固定値で使用すること
"""
self.column = column
self.operator = operator
self.param = param
self.literal=literal
def apply(self):
# literalがFalseならプレースホルダー。Trueだったならは固定値。
param = f':{self.param}' if self.literal is False else self.param
return f' {self.column} {self.operator} {param}'
# 定数
EQ = '='
NE = '<>'
GT = '>'
LT = '<'
GE = '>='
LE = '<='
LIKE = 'LIKE'
IS = 'IS'
IS_NOT = 'IS NOT'

View File

@ -0,0 +1,14 @@
from fastapi import FastAPI
from src.db.database import Database
def init_db(app: FastAPI) -> None:
# DB接続モジュールを初期化
database = Database.get_instance()
# FastAPI App内で使える変数として追加
app.state._db = database
def close_db(app: FastAPI) -> None:
app.state._db = None

View File

@ -0,0 +1,48 @@
import datetime
from typing import Union
from fastapi import Depends
from fastapi.security import APIKeyCookie, APIKeyQuery
from src.error.exceptions import JWTTokenVerifyException
from src.model.internal.jwt_token import JWTToken
from src.model.internal.session import UserSession
from src.services.session_service import get_session
from src.system_var import environment
cookie_security = APIKeyCookie(name='session', auto_error=False)
code_security = APIKeyQuery(name='code', auto_error=False)
def get_current_session(session_key=Depends(cookie_security)):
if session_key is None:
return None
session = get_session(session_key)
# sessionが存在しない場合はNoneが返る
return session
def check_session_expired(session:Union[UserSession, None]=Depends(get_current_session)):
"""セッションの最後にアクセスした時間が、セッション有効期限切れであるかどうかをチェックする"""
if session is None:
return None
last_access_time = session.last_access_time
session_expired_period = datetime.datetime.fromtimestamp(last_access_time) + datetime.timedelta(minutes=environment.SESSION_EXPIRE_MINUTE)
if session_expired_period < datetime.datetime.now():
return None
return session
def verify_session(session:Union[UserSession, None]=Depends(check_session_expired)):
if session is None:
return None
jwt_token = JWTToken(session.id_token, session.refresh_token)
try:
jwt_token.verify_token()
except JWTTokenVerifyException as e:
print(e)
return None
return session

View File

@ -0,0 +1,17 @@
from typing import Callable, Type
from fastapi import Depends
from starlette.requests import Request
from src.db.database import Database
from src.repositories.base_repository import BaseRepository
def get_database(request: Request) -> Database:
return request.app.state._db
def get_repository(Repo_type: Type[BaseRepository]) -> Callable:
def get_repo(db: Database = Depends(get_database)) -> Type[BaseRepository]:
return Repo_type(db)
return get_repo

View File

@ -0,0 +1,16 @@
from typing import Callable, Type
from fastapi import Depends
from starlette.requests import Request
from src.db.database import Database
from src.depends.database import get_database
from src.services.base_service import BaseService
def get_service(Service_type: Type[BaseService]) -> Callable:
def get_service(db: Database=Depends(get_database)) -> Type[BaseService]:
repositories = {key: repository(db) for key, repository in Service_type.REPOSITORIES.items()}
clients = {key: client() for key, client in Service_type.CLIENTS.items()}
return Service_type(repositories=repositories, clients=clients)
return get_service

View File

View File

@ -0,0 +1,15 @@
from urllib import parse
from fastapi import Request
from fastapi.exceptions import HTTPException
from fastapi.responses import RedirectResponse
from starlette import status
def http_exception_handler(request: Request, exc: HTTPException):
# 非同期API呼び出しの場合、detailにdictが入ってくるため、そのまま流す
if hasattr(exc, 'detail') is True and type(exc.detail) == dict:
raise exc
error_detail = exc.detail if hasattr(exc, 'detail') else ''
reason = parse.quote(error_detail)
return RedirectResponse(f'/logout?reason={reason}', status_code=status.HTTP_303_SEE_OTHER)

View File

@ -0,0 +1,33 @@
from typing import Union
from starlette import status
class MeDaCaException(Exception):
"""Webアプリの共通例外"""
pass
class NotAuthorizeException(MeDaCaException):
"""認証失敗の例外"""
pass
class JWTTokenVerifyException(MeDaCaException):
"""トークン検証失敗の例外"""
pass
class DBException(MeDaCaException):
"""DB関連の例外"""
pass
class UnexpectedException(MeDaCaException):
"""予期しない例外"""
# デフォルトを500エラーとする
default_status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
def __init__(
self,
detail: Union[str, dict],
) -> None:
self.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.detail = detail

View File

@ -0,0 +1,40 @@
import os.path as path
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from starlette import status
import src.static as static
from src.controller import bio, bio_download, healthcheck, login, logout, menu
from src.core import tasks
from src.error.exception_handler import http_exception_handler
from src.error.exceptions import UnexpectedException
app = FastAPI()
# 静的ファイルをマウント
app.mount('/static', StaticFiles(directory=path.dirname(static.__file__)), name='static')
# ログイン関連のルーター
app.include_router(login.router)
# ログアウト関連のルーター
app.include_router(logout.router)
# メニュー画面関連のルーター
app.include_router(menu.router)
# 生物由来関連のルーター
app.include_router(bio.router)
# 生物由来のダウンロード用APIルーター。
# クライアントから非同期呼出しされるため、共通ルーターとは異なる扱いとする。
app.include_router(bio_download.router)
# ヘルスチェック用のルーター
app.include_router(healthcheck.router)
# エラー発生時にログアウト画面に遷移させるハンドラー
app.add_exception_handler(status.HTTP_401_UNAUTHORIZED, http_exception_handler)
app.add_exception_handler(status.HTTP_403_FORBIDDEN, http_exception_handler)
# サーバーエラーが発生した場合のハンドラー。HTTPExceptionではハンドリングできないため、個別に設定
app.add_exception_handler(UnexpectedException, http_exception_handler)
# サーバー起動・終了イベントを登録
app.add_event_handler('startup', tasks.create_start_app_handler(app))
app.add_event_handler('shutdown', tasks.create_stop_app_handler(app))

View File

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class BaseDBModel(BaseModel):
pass

View File

@ -0,0 +1,74 @@
from datetime import date, datetime
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class BioSalesViewModel(BaseDBModel):
conv_cd: Optional[int]
rec_data: Optional[str]
rec_whs_cd: Optional[str]
rec_whs_sub_cd: Optional[str]
rec_whs_org_cd: Optional[str]
rec_cust_cd: Optional[str]
rec_comm_cd: Optional[str]
rec_tran_kbn: Optional[str]
rev_hsdnymd_wrk: Optional[str]
rev_hsdnymd_srk: Optional[str]
rec_urag_no: Optional[str]
rec_comm_nm: Optional[str]
rec_nnskfcl_nm: Optional[str]
rec_nnsk_fcl_addr: Optional[str]
rec_lot_num: Optional[str]
rec_amt: Optional[str]
rec_ymd: Optional[str]
sale_data_cat: Optional[str]
slip_file_nm: Optional[str]
slip_mgt_no: Optional[str]
row_num: Optional[int]
hsdn_ymd: Optional[str]
exec_dt: Optional[str]
v_tran_cd: Optional[int]
tran_kbn_nm: Optional[str]
whs_org_cd: Optional[str]
v_whsorg_cd: Optional[str]
whs_org_nm: Optional[str]
whs_org_kn: Optional[str]
v_whs_cd: Optional[int]
whs_nm: Optional[str]
nnsk_cd: Optional[str]
v_inst_cd: Optional[str]
v_inst_kn: Optional[str]
v_inst_nm: Optional[str]
v_inst_addr: Optional[str]
comm_cd: Optional[str]
comm_nm: Optional[str]
whs_rep_comm_nm: Optional[str]
whs_rep_nnskfcl_nm: Optional[str]
whs_rep_nnsk_fcl_addr: Optional[str]
mkr_inf_1: Optional[str]
mkr_cd: Optional[str]
htdnymd_err_kbn: Optional[str]
prd_exis_kbn: Optional[str]
fcl_exis_kbn: Optional[str]
amt: Optional[int]
slip_org_kbn: Optional[str]
bef_slip_mgt_no: Optional[str]
lot_no_err_flg: Optional[str]
iko_flg: Optional[str]
kjyo_ym: Optional[str]
tksnbk_kbn: Optional[str]
fcl_exec_kbn: Optional[str]
rec_sts_kbn: Optional[str]
ins_dt: Optional[datetime]
ins_usr: Optional[str]
dcf_inst_cd: Optional[str]
inst_cd: Optional[str]
inst_name_form: Optional[str]
address: Optional[str]
tel_no: Optional[str]
data_kbn: Optional[str]
ser_no: Optional[str]
lot_num: Optional[str]
expr_dt: Optional[date]
amt_fugo: Optional[str]

View File

@ -0,0 +1,8 @@
from datetime import datetime
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class HdkeTblModel(BaseDBModel):
bch_actf: Optional[str]

View File

@ -0,0 +1,5 @@
from src.model.db.base_db_model import BaseDBModel
class PharmacyProductMasterModel(BaseDBModel):
mkr_cd_nm: str

View File

@ -0,0 +1,36 @@
from datetime import datetime
from typing import Optional
from src.model.db.base_db_model import BaseDBModel
class UserMasterModel(BaseDBModel):
user_id: Optional[str]
mail_adr: Optional[str]
user_nm: Optional[str]
auth_flg1: Optional[str]
auth_flg2: Optional[str]
auth_flg3: Optional[str]
auth_flg4: Optional[str]
auth_flg5: Optional[str]
auth_flg6: Optional[str]
auth_flg7: Optional[str]
auth_flg8: Optional[str]
auth_flg9: Optional[str]
auth_flg10: Optional[str]
pwd: Optional[str]
enabled_flg: Optional[str]
creater: Optional[str]
create_date: Optional[datetime]
updater: Optional[str]
update_date: Optional[datetime]
mntuser_flg: Optional[str]
def is_enable_user(self):
return self.enabled_flg == 'Y'
def is_maintenance_user(self):
return self.mntuser_flg == '1'
def is_groupware_user(self):
return self.mntuser_flg == '0'

View File

@ -0,0 +1,8 @@
from src.model.db.base_db_model import BaseDBModel
class WholesalerMasterModel(BaseDBModel):
rec_whs_cd: str
rec_whs_sub_cd: str
nm: str
whs_nm: str

View File

@ -0,0 +1,152 @@
import base64
import json
from typing import Optional
import jwt
import requests
from starlette import status
from src.error.exceptions import JWTTokenVerifyException
from src.system_var import environment
class JWTToken:
id_token: str
refresh_token: str
verified_jwt: Optional[dict]
def __init__(self, id_token: str, refresh_token: str, verified_jwt: dict=None) -> None:
self.id_token = id_token
self.refresh_token = refresh_token
self.verified_jwt = verified_jwt
@property
def verified_token(self):
if self.verified_jwt is None:
raise JWTTokenVerifyException('検証されていないトークン')
return self.verified_jwt
@property
def user_id(self):
verified_token = self.verified_token
user_id: str = None
identities: dict = verified_token.get('identities')
if identities is not None:
# 一般ユーザーログインによる(SSO経由)
user_id = identities[0]['userId']
# <社員番号>@<ドメイン名>となっているため、@で分割
user_id = user_id.split('@')[0]
else:
# メンテユーザーによるログイン
user_id = verified_token.get('cognito:username')
return user_id
@classmethod
def request(cls, code: str):
"""JWTをリクエストし、新たなインスタンスを返す
Args:
code (str): セキュリティコード
Raises:
JWTTokenVerifyException: 認証失敗
Returns:
JWTToken: JWTTokenのモデルクラス
"""
token_url = f'{environment.COGNITO_AUTH_DOMAIN}/{environment.TOKEN_ENDPOINT}'
request_params = {
'grant_type': 'authorization_code',
'client_id': environment.COGNITO_CLIENT_ID,
'code': code,
'redirect_uri': environment.COGNITO_REDIRECT_URI
}
message = bytes(f'{environment.COGNITO_CLIENT_ID}:{environment.COGNITO_CLIENT_SECRET}', 'utf8')
auth_header_value = base64.b64encode(message).decode()
request_headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': f'Basic {auth_header_value}'
}
res = requests.post(token_url, params=request_params, headers=request_headers)
if res.status_code != status.HTTP_200_OK:
raise JWTTokenVerifyException(res.text)
token_response = json.loads(res.text)
return cls(id_token=token_response['id_token'], refresh_token=token_response['refresh_token'])
@classmethod
def refresh(cls, refresh_token: str):
"""JWTをリフレッシュし、新たなインスタンスを返す
Args:
refresh_token (str): リフレッシュトークン
Raises:
JWTTokenVerifyException: 認証失敗
Returns:
JWTToken: JWTTokenのモデルクラス
"""
token_url = f'{environment.COGNITO_AUTH_DOMAIN}/{environment.TOKEN_ENDPOINT}'
request_params = {
'grant_type': 'refresh_token',
'client_id': environment.COGNITO_CLIENT_ID,
'refresh_token': refresh_token,
'redirect_uri': environment.COGNITO_REDIRECT_URI
}
message = bytes(f'{environment.COGNITO_CLIENT_ID}:{environment.COGNITO_CLIENT_SECRET}', 'utf8')
auth_header_value = base64.b64encode(message).decode()
request_headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': f'Basic {auth_header_value}'
}
res = requests.post(token_url, params=request_params, headers=request_headers)
if res.status_code != status.HTTP_200_OK:
raise JWTTokenVerifyException(res.text)
token_response = json.loads(res.text)
return cls(id_token=token_response['id_token'], refresh_token=refresh_token)
def verify_token(self):
if self.id_token is None:
raise Exception('アクセストークンがない')
issuer = f'https://cognito-idp.{environment.AWS_REGION}.amazonaws.com/{environment.COGNITO_USER_POOL_ID}'
jwks_url = f'{issuer}/.well-known/jwks.json'
jwks_client = jwt.PyJWKClient(jwks_url)
signing_key = jwks_client.get_signing_key_from_jwt(self.id_token)
try:
verified_jwt = jwt.decode(
self.id_token,
signing_key.key,
algorithms=['RS256'],
audience=environment.COGNITO_CLIENT_ID,
issuer=issuer,
# Cognitoのサーバー時間とのズレにより、Issued atクレームの検証に失敗するパターンに対処する
options={'verify_iat': False}
)
# 有効期限(exp)が切れた場合、トークンをリフレッシュする
except jwt.ExpiredSignatureError:
refreshed_jwt_token = JWTToken.refresh(self.refresh_token)
return refreshed_jwt_token.verified_token()
# 有効期限以外の検証に失敗した場合は例外とする
except jwt.InvalidTokenError as e:
raise JWTTokenVerifyException('Invalid token', e)
# IDトークンを使用していることを検証する
if verified_jwt['token_use'] != 'id':
raise JWTTokenVerifyException('Invalid `token_use` claim, should be `id`.')
return JWTToken(
id_token=self.id_token,
refresh_token=self.refresh_token,
verified_jwt=verified_jwt
)

View File

@ -0,0 +1,53 @@
import datetime
import uuid
from pynamodb.attributes import NumberAttribute, UnicodeAttribute
from pynamodb.models import Model as DynamoDBTableModel
from src.system_var import environment
class UserSession(DynamoDBTableModel):
class Meta:
table_name = environment.SESSION_TABLE_NAME
region = environment.AWS_REGION
session_key = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute()
id_token = UnicodeAttribute()
doc_flg = UnicodeAttribute()
inst_flg = UnicodeAttribute()
bio_flg = UnicodeAttribute()
master_mainte_flg = UnicodeAttribute()
user_flg = UnicodeAttribute()
refresh_token = UnicodeAttribute()
csrf_token = UnicodeAttribute()
last_access_time = NumberAttribute()
record_expiration_time = NumberAttribute()
@classmethod
def new_last_access_time(cls):
return datetime.datetime.now().timestamp()
@classmethod
def new_record_expiration_time(cls, expire=environment.SESSION_EXPIRE_MINUTE):
last_access_time = datetime.datetime.fromtimestamp(cls.new_last_access_time())
return (last_access_time + datetime.timedelta(minutes=expire)).timestamp()
@classmethod
def new(
cls, user_id, id_token, refresh_token, csrf_token, doc_flg, inst_flg, bio_flg, master_mainte_flg, user_flg
):
return cls(
session_key=str(uuid.uuid4()),
user_id=user_id,
id_token=id_token,
refresh_token=refresh_token,
csrf_token=csrf_token,
doc_flg=doc_flg,
inst_flg=inst_flg,
bio_flg=bio_flg,
master_mainte_flg=master_mainte_flg,
user_flg=user_flg,
last_access_time=cls.new_last_access_time(),
record_expiration_time=cls.new_record_expiration_time()
)

View File

@ -0,0 +1,137 @@
from typing import Optional
from fastapi import Body, Form
from pydantic import BaseModel
from src.util.sanitize import sanitize
from src.util.string_util import is_not_empty
@sanitize
class BioModel(BaseModel):
wholesaler_code: Optional[str]
wholesaler_sub_code: Optional[str]
wholesaler_name: Optional[str]
org_kbn: Optional[str]
rec_ymd_from: Optional[str]
rec_ymd_to: Optional[str]
rec_lot_num: Optional[str]
data_kbn: Optional[str]
maker_cd: Optional[str]
rev_hsdnymd_srk_from: Optional[str]
rev_hsdnymd_srk_to: Optional[str]
ikoFlg: Optional[str]
@classmethod
def as_form(
cls,
ctrl_wholesaler: str = Form(None),
ctrl_org_kbn: str = Form(None),
ctrl_rec_ymd_from: str = Form(None),
ctrl_rec_ymd_to: str = Form(None),
ctrl_rec_lot_num: str = Form(None),
ctrl_data_kbn: str = Form(None),
ctrl_maker_cd: str = Form(None),
ctrl_rev_hsdnymd_srk_from: str = Form(None),
ctrl_rev_hsdnymd_srk_to: str = Form(None),
ikoFlg: str = Form(None)
):
return cls.__convert_request_param(
cls,
ctrl_wholesaler,
ctrl_org_kbn,
ctrl_rec_ymd_from,
ctrl_rec_ymd_to,
ctrl_rec_lot_num,
ctrl_data_kbn,
ctrl_maker_cd,
ctrl_rev_hsdnymd_srk_from,
ctrl_rev_hsdnymd_srk_to,
ikoFlg
)
@classmethod
def as_body(
cls,
ctrl_wholesaler: str = Body(None),
ctrl_org_kbn: str = Body(None),
ctrl_rec_ymd_from: str = Body(None),
ctrl_rec_ymd_to: str = Body(None),
ctrl_rec_lot_num: str = Body(None),
ctrl_data_kbn: str = Body(None),
ctrl_maker_cd: str = Body(None),
ctrl_rev_hsdnymd_srk_from: str = Body(None),
ctrl_rev_hsdnymd_srk_to: str = Body(None),
ikoFlg: str = Body(None)
):
return cls.__convert_request_param(
cls,
ctrl_wholesaler,
ctrl_org_kbn,
ctrl_rec_ymd_from,
ctrl_rec_ymd_to,
ctrl_rec_lot_num,
ctrl_data_kbn,
ctrl_maker_cd,
ctrl_rev_hsdnymd_srk_from,
ctrl_rev_hsdnymd_srk_to,
ikoFlg
)
def __convert_request_param(
cls,
ctrl_wholesaler: str,
ctrl_org_kbn: str,
ctrl_rec_ymd_from: str,
ctrl_rec_ymd_to: str,
ctrl_rec_lot_num: str,
ctrl_data_kbn: str,
ctrl_maker_cd: str,
ctrl_rev_hsdnymd_srk_from: str,
ctrl_rev_hsdnymd_srk_to: str,
ikoFlg: str
):
wholesaler_code = None
wholesaler_sub_code = None
wholesaler_name = None
# 卸コード・卸サブコード
if is_not_empty(ctrl_wholesaler):
# 卸コードは`020-01:卸名`という感じのデータで来るので、分割
wholesaler_without_name = ctrl_wholesaler.split(':')[0]
wholesaler_name = ctrl_wholesaler.split(':')[1]
wholesaler_code = wholesaler_without_name.split('-')[0]
wholesaler_sub_code = wholesaler_without_name.split('-')[1]
# 処理日
rec_ymd_from = None
rec_ymd_to = None
if is_not_empty(ctrl_rec_ymd_from):
rec_ymd_from = ctrl_rec_ymd_from.replace('/', '')
if is_not_empty(ctrl_rec_ymd_to):
rec_ymd_to = ctrl_rec_ymd_to.replace('/', '')
# 発伝年月日
rev_hsdnymd_srk_from = None
rev_hsdnymd_srk_to = None
if is_not_empty(ctrl_rev_hsdnymd_srk_from):
rev_hsdnymd_srk_from = ctrl_rev_hsdnymd_srk_from.replace('/', '')
if is_not_empty(ctrl_rev_hsdnymd_srk_to):
rev_hsdnymd_srk_to = ctrl_rev_hsdnymd_srk_to.replace('/', '')
return cls(
wholesaler_code=wholesaler_code,
wholesaler_sub_code=wholesaler_sub_code,
wholesaler_name=wholesaler_name,
org_kbn=ctrl_org_kbn,
rec_ymd_from=rec_ymd_from,
rec_ymd_to=rec_ymd_to,
rec_lot_num=ctrl_rec_lot_num,
data_kbn=ctrl_data_kbn,
maker_cd=ctrl_maker_cd,
rev_hsdnymd_srk_from=rev_hsdnymd_srk_from,
rev_hsdnymd_srk_to=rev_hsdnymd_srk_to,
ikoFlg=ikoFlg
)

View File

@ -0,0 +1,20 @@
from typing import Optional
from fastapi import Body
from pydantic import BaseModel
class BioDownloadModel(BaseModel):
user_id: str
kind: str
@classmethod
def as_body(
cls,
user_id: str = Body(),
kind: str = Body()
):
return cls(
user_id=user_id,
kind=kind
)

View File

@ -0,0 +1,15 @@
from fastapi import Form
from pydantic import BaseModel
class LoginModel(BaseModel):
username: str
password: str
@classmethod
def as_form(
cls,
ctrl_username: str = Form(),
ctrl_password: str = Form()
):
return cls(username=ctrl_username, password=ctrl_password)

View File

@ -0,0 +1,19 @@
from src.model.db.bio_sales_view import BioSalesViewModel
from src.system_var import constants
from src.util.sanitize import sanitize
@sanitize
class BisDisplayModel(BioSalesViewModel):
def __init__(self, param: BioSalesViewModel) -> None:
super().__init__(**param.dict())
# 区分・フラグの正式名称を設定
self.slip_org_kbn = constants.SLIP_ORG_KBN_FULL_NAME.get(self.slip_org_kbn)
self.data_kbn = constants.DATA_KBN_JP_NAME.get(self.data_kbn)
self.lot_no_err_flg = constants.LOT_NO_ERR_FLG_JP_NAME.get(self.lot_no_err_flg)
# 訂正前伝票管理番号がセットされているときのみ修正日時、修正者、エラー詳細種別をセット
if (self.bef_slip_mgt_no is None):
self.ins_dt = ""
self.ins_usr = ""

View File

@ -0,0 +1,137 @@
import json
from collections import OrderedDict
from datetime import datetime
from typing import Optional
from pydantic import BaseModel
from src.model.db.pharmacy_product_master import PharmacyProductMasterModel
from src.model.db.wholesaler_master import WholesalerMasterModel
from src.model.request.bio import BioModel
from src.model.view.bio_disp_model import BisDisplayModel
from src.system_var import environment
class BioViewModel(BaseModel):
subtitle: str = '生物由来検索一覧'
user_id: Optional[str]
batch_status: Optional[str]
whs_models: list[WholesalerMasterModel]
phm_models: list[PharmacyProductMasterModel]
bio_data: Optional[list[BisDisplayModel]] = []
form_data: Optional[BioModel]
def display_wholesaler_names(self):
display_names = [
f'{whs_model.rec_whs_cd}-{whs_model.rec_whs_sub_cd}:{whs_model.nm}'
for whs_model in self.whs_models
]
return display_names
def display_org_kbn(self):
return OrderedDict(
{
'': '',
'J': 'JD-NET',
'N': 'NHI',
'H': '手入力'
}
)
def display_data_kbn(self):
return OrderedDict(
{
'' : '',
'0': '正常',
'1': 'ロットエラー',
'3': 'ロット不明',
'9': 'エラー(解消済)',
'2': '除外'
}
)
def bio_data_json_str(self):
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
return json.dumps([model.dict() for model in self.bio_data], ensure_ascii=False, default=date_handler)
def is_selected_whs_name(self, selected_wholesaler):
if not self.is_form_submitted():
return ''
form_wholesaler_full_name = f'{self.form_data.wholesaler_code}-{self.form_data.wholesaler_sub_code}:{self.form_data.wholesaler_name}'
return self._selected_value(form_wholesaler_full_name, selected_wholesaler)
def is_selected_org_kbn(self, selected_org_kbn):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.org_kbn, selected_org_kbn)
def is_input_rec_ymd_from(self):
if not self.is_form_submitted():
return ''
return self._format_date_string(self.form_data.rec_ymd_from)
def is_input_rec_ymd_to(self):
if not self.is_form_submitted():
return ''
return self._format_date_string(self.form_data.rec_ymd_to)
def is_input_lot_num(self):
if not self.is_form_submitted():
return ''
return self.form_data.rec_lot_num or ''
def is_selected_data_kbn(self, selected_data_kbn):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.data_kbn, selected_data_kbn)
def is_selected_maker_cd(self, selected_maker_cd):
if not self.is_form_submitted():
return ''
return self._selected_value(self.form_data.maker_cd, selected_maker_cd)
def is_input_rev_hsdnymd_srk_from(self):
if not self.is_form_submitted():
return ''
return self._format_date_string(self.form_data.rev_hsdnymd_srk_from)
def is_input_rev_hsdnymd_srk_to(self):
if not self.is_form_submitted():
return ''
return self._format_date_string(self.form_data.rev_hsdnymd_srk_to)
def is_checked_iko_flg(self):
if not self.is_form_submitted():
return ''
return 'checked' if self.form_data.ikoFlg else ''
def disabled_button(self):
return 'disabled' if self.is_data_empty() or self.is_data_overflow_max_length() else ''
def is_form_submitted(self):
return self.form_data is not None
def is_data_empty(self):
return len(self.bio_data) == 0
def is_data_overflow_max_length(self):
return len(self.bio_data) >= environment.BIO_SEARCH_RESULT_MAX_COUNT
def _format_date_string(self, date_string):
if date_string is None:
return ''
date = datetime.strptime(date_string, '%Y%m%d')
return date.strftime('%Y/%m/%d')
def _selected_value(self, form_value: str, current_value: str):
return 'selected' if form_value == current_value else ''

View File

@ -0,0 +1,10 @@
from typing import Optional
from pydantic import BaseModel
class LogoutViewModel(BaseModel):
subtitle: str = 'MeDaCA Logout'
redirect_to: Optional[str]
reason: Optional[str]
link_text:Optional[str]

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class MainteLoginViewModel(BaseModel):
subtitle: str = 'MeDaCA Mainte Login'

View File

@ -0,0 +1,26 @@
from typing import Optional
from pydantic import BaseModel
from src.model.view.user_view_model import UserViewModel
class MenuViewModel(BaseModel):
subtitle: str = 'MeDaCA 機能メニュー'
batch_status: Optional[str]
user_model: UserViewModel
def is_batch_processing(self):
return self.batch_status == '1'
def is_available_ult_doctor_menu(self):
return self.user_model.has_ult_doctor_permission()
def is_available_ult_inst_menu(self):
return self.user_model.has_ult_inst_permission()
def is_available_bio_menu(self):
return self.user_model.has_bio_permission()
def is_available_master_maintenance_menu(self):
return self.user_model.has_master_maintenance_permission()

View File

@ -0,0 +1,26 @@
from typing import Optional
from pydantic import BaseModel
class UserViewModel(BaseModel):
bio_flg: str # AUTH_FLG1
doc_flg: str # AUTH_FLG2
inst_flg: str # AUTH_FLG3
master_mainte_flg: str # AUTH_FLG4
user_flg: Optional[str] # MNTUSER_FLG
def has_ult_doctor_permission(self):
return self.doc_flg == '1'
def has_ult_inst_permission(self):
return self.inst_flg == '1'
def has_bio_permission(self):
return self.bio_flg == '1'
def has_master_maintenance_permission(self):
return self.master_mainte_flg == '1'
def is_maintenance_user(self):
return self.user_flg == '1'

View File

@ -0,0 +1,44 @@
from abc import ABCMeta
import pandas as pd
from sqlalchemy import text
from src.db.database import Database
from src.model.db.base_db_model import BaseDBModel
class BaseRepository(metaclass=ABCMeta):
_database: Database
def __init__(self, db: Database) -> None:
self._database = db
def fetch_all(self) -> list[BaseDBModel]:
"""データ全件取得メソッド"""
pass
def fetch_one(self, parameter: dict) -> BaseDBModel:
"""データ1件取得メソッド"""
pass
def fetch_many(self, parameter: dict) -> list[BaseDBModel]:
"""条件付きデータ取得メソッド"""
pass
def fetch_as_data_frame(self, parameter: dict) -> pd.DataFrame:
pass
def _to_data_frame(self, query, parameter: BaseDBModel):
params = params=parameter.dict()
sql_query = pd.read_sql(
text(query),
con=self._database.connection,
params=params)
df = pd.DataFrame(
sql_query,
index=None
)
return df

View File

@ -0,0 +1,122 @@
from src.db import sql_condition as condition
from src.db.sql_condition import SQLCondition
from src.model.db.bio_sales_view import BioSalesViewModel
from src.model.request.bio import BioModel
from src.repositories.base_repository import BaseRepository
from src.util.string_util import is_not_empty
class BioSalesViewRepository(BaseRepository):
FETCH_SQL = """\
SELECT
(
CASE
WHEN LEFT(bs.v_tran_cd, 1) = 2
AND bs.amt >= 1 THEN CONCAT('-', bs.amt)
ELSE bs.amt
END
) AS amt_fugo,
bs.*,
ln.ser_no,
ln.lot_num,
ln.expr_dt
FROM
src05.bio_sales_view bs
LEFT OUTER JOIN
src05.lot_num_mst ln
ON bs.mkr_cd = ln.ser_no
AND bs.rec_lot_num = ln.lot_num
WHERE
{where_clause}
ORDER BY
bs.rec_whs_cd,
bs.rec_whs_sub_cd,
bs.rev_hsdnymd_srk,
bs.slip_mgt_no
ASC\
"""
def fetch_many(self, parameter: BioModel) -> list[BioSalesViewModel]:
try:
self._database.connect()
where_clause = self.__build_condition(parameter)
# error_log(date("Y/m/d H:i:s") . " [INFO] DB Return=" . $result . "\r\n", 3, "$execLog");
# error_log(date("Y/m/d H:i:s") . " [INFO] DB参照実行" . "\r\n", 3, "$execLog");
query = self.FETCH_SQL.format(where_clause=where_clause)
# error_log(date("Y/m/d H:i:s") . " [INFO] SQL: " . $query . "\r\n", 3, "$execLog");
result = self._database.execute_select(query, parameter.dict())
models = [BioSalesViewModel(**r) for r in result]
# error_log(date("Y/m/d H:i:s") . " [INFO] count=" . $count . "\r\n", 3, "$execLog");
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
def fetch_as_data_frame(self, parameter: BioModel):
try:
self._database.connect()
where_clause = self.__build_condition(parameter)
# error_log(date("Y/m/d H:i:s") . " [INFO] DB Return=" . $result . "\r\n", 3, "$execLog");
# error_log(date("Y/m/d H:i:s") . " [INFO] DB参照実行" . "\r\n", 3, "$execLog");
query = self.FETCH_SQL.format(where_clause=where_clause)
# error_log(date("Y/m/d H:i:s") . " [INFO] SQL: " . $query . "\r\n", 3, "$execLog");
# models = [BioSalesViewModel(**r) for r in result]
# error_log(date("Y/m/d H:i:s") . " [INFO] count=" . $count . "\r\n", 3, "$execLog");
df = self._to_data_frame(query, parameter)
return df
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()
def __build_condition(self, parameter: BioModel):
where_clauses: list[SQLCondition] = []
# 卸(コード/サブコード)
if is_not_empty(parameter.wholesaler_code) and is_not_empty(parameter.wholesaler_sub_code):
where_clauses.append(SQLCondition('rec_whs_cd', condition.EQ, 'wholesaler_code'))
where_clauses.append(SQLCondition('rec_whs_sub_cd', condition.EQ, 'wholesaler_sub_code'))
# データ種別
if is_not_empty(parameter.org_kbn):
where_clauses.append(SQLCondition('slip_org_kbn', condition.EQ, 'org_kbn'))
# 処理日 開始日
if is_not_empty(parameter.rec_ymd_from):
where_clauses.append(SQLCondition('rec_ymd', condition.GE, 'rec_ymd_from'))
# 処理日 終了日
if is_not_empty(parameter.rec_ymd_to):
where_clauses.append(SQLCondition('rec_ymd', condition.LE, 'rec_ymd_to'))
# ロット番号
if is_not_empty(parameter.rec_lot_num):
rec_lot_num = parameter.rec_lot_num
# あいまい検索文字列('%')が含まれる場合は'LIKE'、でなければ'='で検索
rec_lot_num_comparator = condition.LIKE if rec_lot_num in '%' else condition.EQ
where_clauses.append(SQLCondition('rec_lot_num', rec_lot_num_comparator, 'rec_lot_num'))
# データ区分
if is_not_empty(parameter.data_kbn):
where_clauses.append(SQLCondition('data_kbn', condition.EQ, 'data_kbn'))
# 製品
if is_not_empty(parameter.maker_cd):
where_clauses.append(SQLCondition('mkr_cd', condition.EQ, 'maker_cd'))
# 発伝年月日 開始日
if is_not_empty(parameter.rev_hsdnymd_srk_from):
where_clauses.append(SQLCondition('rev_hsdnymd_srk', condition.GE, 'rev_hsdnymd_srk_from'))
# 発伝年月日 終了日
if is_not_empty(parameter.rev_hsdnymd_srk_to):
where_clauses.append(SQLCondition('rev_hsdnymd_srk', condition.LE, 'rev_hsdnymd_srk_to'))
# 移行フラグ
# チェックが入っていない場合、移行対象(IKO_FLG = '*')を省く
if parameter.ikoFlg is None:
where_clauses.append(SQLCondition('iko_flg', condition.IS, 'NULL', literal=True))
# 固定条件
# Viewで返されるロット番号9件をNull以外で抽出
where_clauses.append(SQLCondition('LENGTH(TRIM(rec_lot_num))', condition.GT, '0', literal=True))
where_clauses_str = ' AND '.join([condition.apply() for condition in where_clauses])
# error_log(date("Y/m/d H:i:s") . " [INFO] 条件設定終了:" . $szConditions . "\r\n", 3, "$execLog");
return where_clauses_str

View File

@ -0,0 +1,21 @@
from src.model.db.hdke_tbl import HdkeTblModel
from src.model.request.bio import BioModel
from src.repositories.base_repository import BaseRepository
class HdkeTblRepository(BaseRepository):
FETCH_SQL = "SELECT bch_actf FROM src05.hdke_tbl"
def fetch_all(self) -> list[HdkeTblModel]:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query)
models = [HdkeTblModel(**r) for r in result]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,39 @@
from src.model.db.pharmacy_product_master import PharmacyProductMasterModel
from src.repositories.base_repository import BaseRepository
class PharmacyProductMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT
CONCAT(IFNULL(mkr_cd, ''), ' ', IFNULL(mkr_inf_1, '')) AS mkr_cd_nm
FROM
src05.phm_prd_mst_v t1
INNER JOIN
(
SELECT
prd_cd,MAX(sub_no) AS sno
FROM
src05.phm_prd_mst_v
WHERE rec_sts_kbn <> '9'
GROUP BY prd_cd
) fmv2
ON t1.prd_cd = fmv2.prd_cd AND t1.sub_no = fmv2.sno
WHERE
mkr_cd IS NOT NULL
ORDER BY mkr_cd
"""
def fetch_all(self) -> list[PharmacyProductMasterModel]:
try:
self._database.connect()
result = self._database.execute_select(self.FETCH_SQL)
models = [PharmacyProductMasterModel(**r) for r in result]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] getOroshiData DB Error. ")
print(f"[ERROR] ErrorMessage: {e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,30 @@
from src.model.db.user_master import UserMasterModel
from src.model.request.bio import BioModel
from src.repositories.base_repository import BaseRepository
class UserMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT
*
FROM
src05.user_mst
WHERE
user_id = :user_id\
"""
def fetch_one(self, parameter: dict) -> UserMasterModel:
try:
self._database.connect()
query = self.FETCH_SQL
result = self._database.execute_select(query, parameter)
models = [UserMasterModel(**r) for r in result]
if len(models) == 0:
return None
return models[0]
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] DB Error : Exception={e.args}")
raise e
finally:
self._database.disconnect()

View File

@ -0,0 +1,38 @@
from src.model.db.wholesaler_master import WholesalerMasterModel
from src.repositories.base_repository import BaseRepository
class WholesalerMasterRepository(BaseRepository):
FETCH_SQL = """\
SELECT DISTINCT
b.rec_whs_cd,
b.rec_whs_sub_cd,
v2.nm,
b.whs_nm
FROM src05.bio_sales b
LEFT OUTER JOIN
(
SELECT sub_no, nm, v_whs_cd, rec_sts_kbn
FROM src05.whs_mst_v
WHERE (SELECT STR_TO_DATE(syor_date, '%Y%m%d') FROM src05.hdke_tbl) BETWEEN start_date AND end_date
) v2
ON b.v_whs_cd = v2.v_whs_cd
AND v2.rec_sts_kbn <> '9'
ORDER BY b.rec_whs_cd, b.rec_whs_sub_cd , b.whs_nm DESC
"""
def fetch_all(self) -> list[WholesalerMasterModel]:
try:
self._database.connect()
result = self._database.execute_select(self.FETCH_SQL)
result_data = [res for res in result]
models = [WholesalerMasterModel(**r) for r in result_data]
return models
except Exception as e:
# TODO: ファイルへの書き出しはloggerでやる
print(f"[ERROR] getOroshiData DB Error. ")
print(f"[ERROR] ErrorMessage: {e.args}")
raise e
finally:
self._database.disconnect()

View File

View File

@ -0,0 +1,125 @@
import logging
from typing import Callable
from fastapi import Request, Response
from fastapi.exceptions import HTTPException
from fastapi.routing import APIRoute
from starlette import status
from src.depends.auth import (check_session_expired, get_current_session,
verify_session)
from src.error.exceptions import UnexpectedException
from src.system_var import constants, environment
logger = logging.getLogger('uvicorn')
class MeDaCaRoute(APIRoute):
"""アプリケーションのカスタムルーター
Args:
APIRoute (APIRoute): FastAPIの標準APIRoute
"""
def get_route_handler(self) -> Callable:
"""前後処理を付加するルートハンドラーを返す
Raises:
e: HTTPException
UnexpectedException: HTTPException以外の例外をカスタム例外にする
Returns:
Callable: カスタムルートハンドラー
"""
original_route_handler = super().get_route_handler()
# 返却するルートハンドラーを定義。必ず非同期関数にする必要がある。
async def custom_route_handler(request: Request) -> Response:
try:
logger.info('pre routing process')
# 事前処理
request = await self.pre_process_route(request)
# 本来のルーティング処理
logger.info('routing process')
response = await original_route_handler(request)
# 事後処理
logger.info('post routing process')
return await self.post_process_route(request, response)
except HTTPException as e:
raise e
except Exception as e:
logger.exception(e)
raise UnexpectedException(detail=constants.LOGOUT_REASON_UNEXPECTED)
return custom_route_handler
async def pre_process_route(self, request: Request) -> Request:
"""ルートハンドラーの事前処理
Args:
request (Request): FastAPIのリクエストクラス
Returns:
Request: 加工後のRequestインスタンス
"""
return request
async def post_process_route(self, request: Request, response: Response) -> Response:
"""ルートハンドラーの事後処理
Args:
request (Request): FastAPIのリクエストインスタンス
response (Response): FastAPIのレスポンスインスタンス(original_route_handlerのレスポンス)
Returns:
Response: 加工後のResponseインスタンス
"""
return response
class BeforeCheckSessionRoute(MeDaCaRoute):
"""事前処理として、セッションチェックを行うルートハンドラー
Args:
MeDaCaRoute (MeDaCaRoute): 共通ルートハンドラー
"""
async def pre_process_route(self, request: Request):
request = await super().pre_process_route(request)
# セッションを取得
session_key = request.cookies.get('session')
current_session = get_current_session(session_key)
checked_session = check_session_expired(current_session)
verified_session = verify_session(checked_session)
# セッションが有効でない場合、エラーにする
if verified_session is None:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED)
scope = request.scope
scope['session'] = verified_session
session_request = Request(receive=request.receive, scope=scope)
return session_request
class AfterSetCookieSessionRoute(MeDaCaRoute):
"""事後処理として、セッションキーをcookieに設定するカスタムルートハンドラー
Args:
MeDaCaRoute (MeDaCaRoute): 共通ルートハンドラー
"""
async def post_process_route(self, request: Request, response: Response):
response = await super().post_process_route(request, response)
session_key = response.headers.get('session_key', None)
# セッションキーがない場合はセットせずに返す
if session_key is None:
return response
del response.headers['session_key']
# クッキーにセッションを設定
response.set_cookie(
key='session',
value=session_key,
max_age=environment.SESSION_EXPIRE_MINUTE * 60, # cookieの有効期限は秒数指定なので、60秒をかける
secure=True,
httponly=True
)
return response
class AuthenticatedRoute(BeforeCheckSessionRoute, AfterSetCookieSessionRoute):
async def pre_process_route(self, request: Request):
request = await super().pre_process_route(request)
return request
async def post_process_route(self, request: Request, response: Response):
response = await super().post_process_route(request, response)
return response

View File

@ -0,0 +1,13 @@
from abc import ABCMeta
from src.aws.aws_api_client import AWSAPIClient
from src.repositories.base_repository import BaseRepository
class BaseService(metaclass=ABCMeta):
# 各サービスが依存するrepositoryクラスのマップ
REPOSITORIES: dict[str, BaseRepository] = {}
# 各サービスが依存するAWS APIクライアントクラスのマップ
CLIENTS: dict[str, AWSAPIClient] = {}
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
pass

View File

@ -0,0 +1,41 @@
from src.aws.aws_api_client import AWSAPIClient
from src.error.exceptions import DBException
from src.model.db.hdke_tbl import HdkeTblModel
from src.repositories.base_repository import BaseRepository
from src.repositories.hdke_tbl_repository import HdkeTblRepository
from src.services.base_service import BaseService
class BatchStatusService(BaseService):
REPOSITORIES = {
'hdke_table_repository': HdkeTblRepository
}
hdke_table_repository: HdkeTblRepository
__hdke_table_record: list[HdkeTblModel] = []
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.hdke_table_repository = repositories['hdke_table_repository']
# サービスインスタンス生成時に日付テーブルを取得する。取得できない場合は例外とする
try:
self.__hdke_table_record = self.hdke_table_repository.fetch_all()
except Exception as e:
raise DBException(e)
@property
def hdke_table_record(self) -> HdkeTblModel:
# 日付マスタのレコードがあることを確認
self.__assert_record_exists()
# 日付テーブルのレコードは必ず1件
return self.__hdke_table_record[0]
def is_batch_processing(self):
# 日付マスタのレコードがあることを確認
self.__assert_record_exists()
return self.hdke_table_record.bch_actf == '1' # TODO: 定数化する
def __assert_record_exists(self):
# 日付マスタのレコードがない場合は例外とする
if len(self.__hdke_table_record) == 0:
raise DBException('日付テーブルのレコードが存在しません')

View File

@ -0,0 +1,119 @@
import os.path as path
import shutil
from datetime import datetime
import pandas as pd
from src.aws.aws_api_client import AWSAPIClient
from src.aws.s3 import S3Client
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.view.bio_disp_model import BisDisplayModel
from src.model.view.bio_view_model import BioViewModel
from src.repositories.base_repository import BaseRepository
from src.repositories.bio_sales_view_repository import BioSalesViewRepository
from src.repositories.pharmacy_product_master_repository import \
PharmacyProductMasterRepository
from src.repositories.wholesaler_master_repository import \
WholesalerMasterRepository
from src.services.base_service import BaseService
from src.system_var import constants, environment
class BioViewService(BaseService):
REPOSITORIES = {
'whs_repository': WholesalerMasterRepository,
'phm_repository': PharmacyProductMasterRepository,
'bio_sales_repository': BioSalesViewRepository
}
CLIENTS = {
's3_client': S3Client
}
whs_repository: WholesalerMasterRepository
phm_repository: PharmacyProductMasterRepository
bio_sales_repository: BioSalesViewRepository
s3_client: S3Client
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.whs_repository = repositories['whs_repository']
self.phm_repository = repositories['phm_repository']
self.bio_sales_repository = repositories['bio_sales_repository']
self.s3_client = clients['s3_client']
def prepare_bio_view(
self,
session: UserSession
) ->BioViewModel:
# 卸リストを取得
wholesalers = self.whs_repository.fetch_all()
# 製品リストを取得
products = self.phm_repository.fetch_all()
bio = BioViewModel(
whs_models=wholesalers,
phm_models=products,
user_id=session.user_id
)
return bio
def search_bio_data(self, search_params: BioModel):
# 生物由来データを検索
bio_sales_view_data = self.bio_sales_repository.fetch_many(parameter=search_params)
# 画面表示用に加工
display_bio_data: list[BisDisplayModel] = [BisDisplayModel(data) for data in bio_sales_view_data]
return display_bio_data
def search_download_bio_data(self, search_params: BioModel):
# 生物由来データをダウンロードするために、DBから検索した結果をデータフレームに変換
bio_sales_data_frame = self.bio_sales_repository.fetch_as_data_frame(parameter=search_params)
return bio_sales_data_frame
def write_excel_file(self, data_frame: pd.DataFrame, user_id: str, timestamp: datetime):
# Excelに書き込み
output_file_path = path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, f'Result_{user_id}_{timestamp:%Y%m%d%H%M%S%f}.xlsx')
# テンプレートファイルをコピーして出力ファイルの枠だけを作る
shutil.copyfile(
src=constants.BIO_EXCEL_TEMPLATE_FILE_PATH,
dst=output_file_path
)
# ExcelWriterの追記モード(`mode`='a')でファイルを開く
# `engine``='openpyxlは、追記モードでExcelを開くためのおまじない(xlsxしか動作しないが、こちらが出すものなので問題ナシ)
# 既存シートへの書き込みは、`if_sheet_exists='overlay'を指定する
with pd.ExcelWriter(output_file_path, engine='openpyxl', mode='a', if_sheet_exists='overlay') as writer:
# `sheet_name`引数を省略した場合は、「Sheet1」に書き込む。
# DF内のヘッダと連番を書き込みたくない場合、`header`と`index`をFalseに指定する。
# `startrow`と`startcol`で、Excelの書き込み位置を決定する。省略した場合はA1セルから書く。
data_frame.to_excel(writer, header=False, index=False, startrow=1, startcol=0)
return output_file_path
def write_csv_file(self, data_frame: pd.DataFrame, user_id: str, header: list[str], timestamp: datetime):
# csvに書き込み
output_file_path = path.join(constants.BIO_TEMPORARY_FILE_DIR_PATH, f'Result_{user_id}_{timestamp:%Y%m%d%H%M%S%f}.csv')
# 横長のDataFrameとするため、ヘッダーの加工処理
header_data = {}
for df_column, header_column in zip(data_frame.columns, header):
header_data[df_column] = header_column
header_df = pd.DataFrame([header_data], index=None)
output_df = pd.concat([header_df, data_frame])
# ヘッダー行としてではなく、1レコードとして出力する
output_df.to_csv(output_file_path, index=False, header=False)
return output_file_path
def upload_bio_data_file(self, local_file_path: str) -> None:
bucket_name = environment.BIO_ACCESS_LOG_BUCKET
# TODO: フォルダを変える
file_key =f'bio/{path.basename(local_file_path)}'
self.s3_client.upload_file(local_file_path, bucket_name, file_key)
def generate_download_file_url(self, local_file_path:str, user_id: str, kind: str) -> str:
bucket_name = environment.BIO_ACCESS_LOG_BUCKET
# TODO: フォルダを変える
file_key = f'bio/{path.basename(local_file_path)}'
download_filename = f'{user_id}_生物由来卸販売データ.{kind}'
return self.s3_client.generate_presigned_url(bucket_name, file_key, download_filename)

View File

@ -0,0 +1,57 @@
import base64
import hashlib
import hmac
from src.aws.aws_api_client import AWSAPIClient
from src.aws.cognito import CognitoClient
from src.error.exceptions import NotAuthorizeException
from src.model.db.user_master import UserMasterModel
from src.model.internal.jwt_token import JWTToken
from src.repositories.base_repository import BaseRepository
from src.repositories.user_master_repository import UserMasterRepository
from src.services.base_service import BaseService
from src.system_var import environment
class LoginService(BaseService):
REPOSITORIES = {
'user_repository': UserMasterRepository
}
CLIENTS = {
'cognito_client': CognitoClient
}
def __init__(self, repositories: dict[str, BaseRepository], clients: dict[str, AWSAPIClient]) -> None:
super().__init__(repositories, clients)
self.user_repository = repositories['user_repository']
self.cognito_client = clients['cognito_client']
def login(self, username: str, password: str) -> JWTToken:
try:
id_token, refresh_token = self.cognito_client.login_by_user_password_flow(
username,
password,
self.__secret_hash(username)
)
except Exception as e:
if e.response['Error']['Code'] == 'NotAuthorizedException':
raise NotAuthorizeException(e)
else:
raise e
return JWTToken(id_token, refresh_token)
def login_with_security_code(self, code: str) -> JWTToken:
return JWTToken.request(code)
def logged_in_user(self, user_id):
user_record: UserMasterModel = self.user_repository.fetch_one({'user_id': user_id})
return user_record
def __secret_hash(self, username: str):
# see - https://aws.amazon.com/jp/premiumsupport/knowledge-center/cognito-unable-to-verify-secret-hash/ # noqa
message = bytes(username + environment.COGNITO_CLIENT_ID, 'utf-8')
key = bytes(environment.COGNITO_CLIENT_SECRET, 'utf-8')
digest = hmac.new(key, message, digestmod=hashlib.sha256).digest()
return base64.b64encode(digest).decode()

View File

@ -0,0 +1,15 @@
from src.model.internal.session import UserSession
def set_session(session: UserSession) -> str:
session.save()
return session.session_key
def get_session(key: str) -> UserSession:
try:
session = UserSession.get(hash_key=key, consistent_read=True)
return session
except UserSession.DoesNotExist as e:
print(e)
return None

View File

View File

@ -0,0 +1,291 @@
body {
white-space: nowrap;
background-color: LightCyan;
font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, " Pゴシック", "MS PGothic", sans-serif;
}
h1 {
font-size: 155%;
margin-left: 2%;
margin-top: 0%;
margin-bottom: 0%;
}
.title {
width: 800px;
}
table{
border-collapse : collapse;
}
.search_table {
margin-bottom: 30px;
padding-bottom: 15px;
border-bottom: solid 1px gray;
width: 1132px;
}
._form {
width: 1132px;
margin-left: 10px;
margin-right: 20px;
}
.back_bt {
padding-bottom: 10px;
}
._form input[type=text] {
width: 193px;
height: 25px;
}
._form input[type=checkbox] {
width: 13px;
height: 13px;
}
._form select {
width: 193px;
height: 25px;
}
.result_info {
text-align: right;
}
.search_tb {
padding-right: 25px;
}
.search_bt {
/* width: 60px; */
margin-left: 10px;
}
.clear_bt{
margin-left: 120px;
/* width: 60px */
}
.search_dropdown {
width: 175px;
}
.bioScroll_div {
overflow: auto;
padding-top: 10px;
height: 250px;
width: 1132px;
}
.noLine{
text-decoration: none;
}
.resultAreaMsg {
margin-top: 5%;
text-align: center;
font-size: 150%;
}
.search_btTd {
text-align: right;
}
.selection {
display: none;
}
#page-1 {
display: block;
}
.search_middleTd {
padding-right: 25px;
width : 450px;
}
.docSearchScroll_div {
overflow: auto;
height: 200px;
width: 1132px;
}
.transition{
text-align: right;
margin-right: 60px;
}
.transition_bt{
width: 110px;
height: 40px;
margin-left: 15px;
margin-right: 15px;
}
.instutionInfo_table{
width: 1132px;
margin-bottom: 50px;
}
.institution_column {
width : 160px;
background : rgb(225, 233, 250);
border : solid 1px;
}
.institution_data {
background : rgb(244, 244, 244);
border : solid 1px;
padding-left : 0.5em;
padding-right : 0.5em;
}
.data_width_long {
width : 500px;
}
.data_width_middle {
width : 300px;
}
.data_width_short {
width : 100px;
}
.checkbox_margin {
margin-left : 20px;
}
.border_top_none {
border-top-style:none;
}
.border_bottom_none {
border-bottom-style:none;
}
.textbox_margin {
margin-left : 20px;
}
.textbox_margin_short {
margin-left : 5px;
}
.label_margin {
margin-left: 10px;
margin-right: 10px;
}
.trt_course{
width: 70px;
}
.small_tb{
width: 100px;
}
.docBelongScroll_div {
overflow: auto;
height: 100px;
width: 500px;
margin: 0px 30px 0px 30px;
}
.rightPadding_table{
padding-right: 50px;
}
.verticalBar_td{
width: 1px;
height: 150px;
background-color: gray;
}
.docPlaceScroll_div {
overflow: auto;
height: 150px;
width: 700px;
margin: 0px 30px 0px 30px;
}
.result_tr{
overflow-y: scroll;
overflow-x: scroll;
}
.result_data{
overflow-y: scroll;
overflow-x: scroll;
width: 50px;
}
/* tablesoter */
table.tablesorter {
font-family:arial;
background-color: #CDCDCD;
font-size: 12pt;
text-align: left;
}
table.tablesorter thead tr th, table.tablesorter tfoot tr th {
background-color: #e6EEEE;
border: 0.1px solid silver;
font-size: 12pt;
padding: 4px;
padding-right: 20px;
}
table.tablesorter thead tr .header {
background-image: url(bg.gif);
background-repeat: no-repeat;
background-position: center right;
cursor: pointer;
}
table.tablesorter tbody td {
color: #3D3D3D;
padding: 4px;
background-color: #FFF;
border: 0.1px solid silver;
vertical-align: top;
}
table.tablesorter tbody td div{
float: right;
}
table.tablesorter tbody tr.odd td {
background-color:#F0F0F6;
}
table.tablesorter thead tr .headerSortUp {
background-image: url(asc.gif);
}
table.tablesorter thead tr .headerSortDown {
background-image: url(desc.gif);
}
table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp {
background-color: #8dbdd8;
}
#loading {
z-index: 10000;
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: #FFF;
overflow-x: hidden;
overflow-y: auto;
outline: 0;
text-align: center;
display: none;
opacity: 0.7;
}
#loading_content {
position: absolute;
top: 50%;
left: 50%;
}

View File

@ -0,0 +1,11 @@
/* 日曜日:赤 */
.flatpickr-calendar .flatpickr-innerContainer .flatpickr-weekdays .flatpickr-weekday:nth-child(7n + 1),
.flatpickr-calendar .flatpickr-innerContainer .flatpickr-days .flatpickr-day:not(.flatpickr-disabled):not(.prevMonthDay):not(.nextMonthDay):nth-child(7n + 1) {
color: red;
}
/* 土曜日:青 */
.flatpickr-calendar .flatpickr-innerContainer .flatpickr-weekdays .flatpickr-weekday:nth-child(7),
.flatpickr-calendar .flatpickr-innerContainer .flatpickr-days .flatpickr-day:not(.flatpickr-disabled):not(.prevMonthDay):not(.nextMonthDay):nth-child(7n) {
color: blue;
}

View File

@ -0,0 +1,49 @@
body{
background-color: LightCyan;
background-size: 220%,220%;
font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, " Pゴシック", "MS PGothic", sans-serif;
}
.background{
margin-top: 5%;
padding: 2%;
background-color: white;
width: 40%;
border-radius: 25px;
box-shadow:5px 5px rgba(0,0,0,0.4);;
}
.btn_width {
width: 80%;
}
.form_login{
width: 80%;
font-size: 180%;
margin: 1%;
}
.form_login::-webkit-input-placeholder{
color: gray;
}
.form_login:-ms-input-placeholder{
color: gray;
}
.form_login::-moz-placeholder{
color: gray;
}
.logout_p{
font-size: 160%;
}
.notUseBioMsg{
font-size: 143%;
color: red;
}
.batchMsg{
color: red;
font-size: 120%;
text-align: center;
}

Some files were not shown because too many files have changed in this diff Show More