Merge pull request #44 NEWDWH2021-547 into develop-6crm
This commit is contained in:
commit
0ab9bfdebb
2
.gitignore
vendored
2
.gitignore
vendored
@ -4,4 +4,4 @@ lambda/mbj-newdwh2021-staging-PublishFromLog/package-lock.json
|
||||
lambda/mbj-newdwh2021-staging-PublishFromLog/node_modules/*
|
||||
__pycache__/
|
||||
.env
|
||||
settings.json
|
||||
**/.vscode/settings.json
|
||||
|
||||
16
ecs/crm-datafetch/.vscode/launch.json
vendored
Normal file
16
ecs/crm-datafetch/.vscode/launch.json
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
// エントリーポイントのファイルに変更すること
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
// 環境変数が必要な場合に読み込む環境変数ファイル
|
||||
"envFile": "${workspaceFolder}/.env",
|
||||
}
|
||||
]
|
||||
}
|
||||
16
ecs/crm-datafetch/.vscode/recommend_settings.json
vendored
Normal file
16
ecs/crm-datafetch/.vscode/recommend_settings.json
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": null,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
}
|
||||
},
|
||||
"python.linting.lintOnSave": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": false,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.flake8Args": ["--max-line-length=150", "--ignore=F541"],
|
||||
"python.formatting.provider": "autopep8",
|
||||
"python.formatting.autopep8Args": ["--max-line-length", "150"]
|
||||
}
|
||||
19
ecs/crm-datafetch/Dockerfile
Normal file
19
ecs/crm-datafetch/Dockerfile
Normal file
@ -0,0 +1,19 @@
|
||||
FROM python:3.8
|
||||
|
||||
ENV TZ="Asia/Tokyo"
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY Pipfile Pipfile.lock ./
|
||||
RUN \
|
||||
apt update -y && \
|
||||
# パッケージのセキュリティアップデートのみを適用するコマンド
|
||||
apt install -y unattended-upgrades && \
|
||||
unattended-upgrades && \
|
||||
pip install pipenv --no-cache-dir && \
|
||||
pipenv install --system --deploy && \
|
||||
pip uninstall -y pipenv virtualenv-clone virtualenv
|
||||
|
||||
COPY main.py ./
|
||||
COPY src ./
|
||||
|
||||
CMD [ "python", "./main.py" ]
|
||||
16
ecs/crm-datafetch/Pipfile
Normal file
16
ecs/crm-datafetch/Pipfile
Normal file
@ -0,0 +1,16 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
boto3 = "*"
|
||||
simple-salesforce = "*"
|
||||
tenacity = "*"
|
||||
|
||||
[dev-packages]
|
||||
autopep8 = "*"
|
||||
flake8 = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.8"
|
||||
419
ecs/crm-datafetch/Pipfile.lock
generated
Normal file
419
ecs/crm-datafetch/Pipfile.lock
generated
Normal file
@ -0,0 +1,419 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "ec1d83143aff859500979be73f67196dcfe2298ad3553a7d81ad0605a277d672"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.8"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4",
|
||||
"sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==21.4.0"
|
||||
},
|
||||
"authlib": {
|
||||
"hashes": [
|
||||
"sha256:1286e2d5ef5bfe5a11cc2d0a0d1031f0393f6ce4d61f5121cfe87fa0054e98bd",
|
||||
"sha256:6e74a4846ac36dfc882b3cc2fbd3d9eb410a627f2f2dc11771276655345223b1"
|
||||
],
|
||||
"version": "==1.0.1"
|
||||
},
|
||||
"boto3": {
|
||||
"hashes": [
|
||||
"sha256:5c775dcb12ca5d6be3f5aa3c49d77783faa64eb30fd3f4af93ff116bb42f9ffb",
|
||||
"sha256:5d9bcc355cf6edd7f3849fedac4252e12a0aa2b436cdbc0d4371b16a0f852a30"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.24.34"
|
||||
},
|
||||
"botocore": {
|
||||
"hashes": [
|
||||
"sha256:0d824a5315f5f5c3bea53c14107a69695ef43190edf647f1281bac8f172ca77c",
|
||||
"sha256:9c695d47f1f1212f3e306e51f7bacdf67e58055194ddcf7d8296660b124cf135"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==1.27.34"
|
||||
},
|
||||
"cached-property": {
|
||||
"hashes": [
|
||||
"sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130",
|
||||
"sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"
|
||||
],
|
||||
"version": "==1.5.2"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d",
|
||||
"sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2022.6.15"
|
||||
},
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5",
|
||||
"sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef",
|
||||
"sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104",
|
||||
"sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426",
|
||||
"sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405",
|
||||
"sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375",
|
||||
"sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a",
|
||||
"sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e",
|
||||
"sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc",
|
||||
"sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf",
|
||||
"sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185",
|
||||
"sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497",
|
||||
"sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3",
|
||||
"sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35",
|
||||
"sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c",
|
||||
"sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83",
|
||||
"sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21",
|
||||
"sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca",
|
||||
"sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984",
|
||||
"sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac",
|
||||
"sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd",
|
||||
"sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee",
|
||||
"sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a",
|
||||
"sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2",
|
||||
"sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192",
|
||||
"sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7",
|
||||
"sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585",
|
||||
"sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f",
|
||||
"sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e",
|
||||
"sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27",
|
||||
"sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b",
|
||||
"sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e",
|
||||
"sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e",
|
||||
"sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d",
|
||||
"sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c",
|
||||
"sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415",
|
||||
"sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82",
|
||||
"sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02",
|
||||
"sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314",
|
||||
"sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325",
|
||||
"sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c",
|
||||
"sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3",
|
||||
"sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914",
|
||||
"sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045",
|
||||
"sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d",
|
||||
"sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9",
|
||||
"sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5",
|
||||
"sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2",
|
||||
"sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c",
|
||||
"sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3",
|
||||
"sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2",
|
||||
"sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8",
|
||||
"sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d",
|
||||
"sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d",
|
||||
"sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9",
|
||||
"sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162",
|
||||
"sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76",
|
||||
"sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4",
|
||||
"sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e",
|
||||
"sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9",
|
||||
"sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6",
|
||||
"sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b",
|
||||
"sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01",
|
||||
"sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"
|
||||
],
|
||||
"version": "==1.15.1"
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5",
|
||||
"sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.1.0"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59",
|
||||
"sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596",
|
||||
"sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3",
|
||||
"sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5",
|
||||
"sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab",
|
||||
"sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884",
|
||||
"sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82",
|
||||
"sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b",
|
||||
"sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441",
|
||||
"sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa",
|
||||
"sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d",
|
||||
"sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b",
|
||||
"sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a",
|
||||
"sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6",
|
||||
"sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157",
|
||||
"sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280",
|
||||
"sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282",
|
||||
"sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67",
|
||||
"sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8",
|
||||
"sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046",
|
||||
"sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327",
|
||||
"sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==37.0.4"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
|
||||
"sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==3.3"
|
||||
},
|
||||
"isodate": {
|
||||
"hashes": [
|
||||
"sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96",
|
||||
"sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"
|
||||
],
|
||||
"version": "==0.6.1"
|
||||
},
|
||||
"jmespath": {
|
||||
"hashes": [
|
||||
"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980",
|
||||
"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==1.0.1"
|
||||
},
|
||||
"lxml": {
|
||||
"hashes": [
|
||||
"sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318",
|
||||
"sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c",
|
||||
"sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b",
|
||||
"sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000",
|
||||
"sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73",
|
||||
"sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d",
|
||||
"sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb",
|
||||
"sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8",
|
||||
"sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2",
|
||||
"sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345",
|
||||
"sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94",
|
||||
"sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e",
|
||||
"sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b",
|
||||
"sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc",
|
||||
"sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a",
|
||||
"sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9",
|
||||
"sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc",
|
||||
"sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387",
|
||||
"sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb",
|
||||
"sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7",
|
||||
"sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4",
|
||||
"sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97",
|
||||
"sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67",
|
||||
"sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627",
|
||||
"sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7",
|
||||
"sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd",
|
||||
"sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3",
|
||||
"sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7",
|
||||
"sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130",
|
||||
"sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b",
|
||||
"sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036",
|
||||
"sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785",
|
||||
"sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca",
|
||||
"sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91",
|
||||
"sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc",
|
||||
"sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536",
|
||||
"sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391",
|
||||
"sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3",
|
||||
"sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d",
|
||||
"sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21",
|
||||
"sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3",
|
||||
"sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d",
|
||||
"sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29",
|
||||
"sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715",
|
||||
"sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed",
|
||||
"sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25",
|
||||
"sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c",
|
||||
"sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785",
|
||||
"sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837",
|
||||
"sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4",
|
||||
"sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b",
|
||||
"sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2",
|
||||
"sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067",
|
||||
"sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448",
|
||||
"sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d",
|
||||
"sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2",
|
||||
"sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc",
|
||||
"sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c",
|
||||
"sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5",
|
||||
"sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84",
|
||||
"sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8",
|
||||
"sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf",
|
||||
"sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7",
|
||||
"sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e",
|
||||
"sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb",
|
||||
"sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b",
|
||||
"sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3",
|
||||
"sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad",
|
||||
"sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8",
|
||||
"sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==4.9.1"
|
||||
},
|
||||
"platformdirs": {
|
||||
"hashes": [
|
||||
"sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788",
|
||||
"sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.5.2"
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
|
||||
"sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
|
||||
],
|
||||
"version": "==2.21"
|
||||
},
|
||||
"python-dateutil": {
|
||||
"hashes": [
|
||||
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
|
||||
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.8.2"
|
||||
},
|
||||
"pytz": {
|
||||
"hashes": [
|
||||
"sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7",
|
||||
"sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"
|
||||
],
|
||||
"version": "==2022.1"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
|
||||
"sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
|
||||
],
|
||||
"markers": "python_version >= '3.7' and python_version < '4'",
|
||||
"version": "==2.28.1"
|
||||
},
|
||||
"requests-file": {
|
||||
"hashes": [
|
||||
"sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e",
|
||||
"sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"
|
||||
],
|
||||
"version": "==1.5.1"
|
||||
},
|
||||
"requests-toolbelt": {
|
||||
"hashes": [
|
||||
"sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f",
|
||||
"sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
|
||||
],
|
||||
"version": "==0.9.1"
|
||||
},
|
||||
"s3transfer": {
|
||||
"hashes": [
|
||||
"sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
|
||||
"sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==0.6.0"
|
||||
},
|
||||
"simple-salesforce": {
|
||||
"hashes": [
|
||||
"sha256:15d6943e52252c9cc28e1779803354f2a36c88b72056499e07eb06cd652f149c",
|
||||
"sha256:7931038081c445e9459ddc014aaf7f540b1131a31596956cb5d7c0e7b7e0c4cb"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.12.1"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.16.0"
|
||||
},
|
||||
"tenacity": {
|
||||
"hashes": [
|
||||
"sha256:43242a20e3e73291a28bcbcacfd6e000b02d3857a9a9fff56b297a27afdc932f",
|
||||
"sha256:f78f4ea81b0fabc06728c11dc2a8c01277bfc5181b321a4770471902e3eb844a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==8.0.1"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec",
|
||||
"sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'",
|
||||
"version": "==1.26.10"
|
||||
},
|
||||
"zeep": {
|
||||
"hashes": [
|
||||
"sha256:5867f2eadd6b028d9751f4155af590d3aaf9280e3a0ed5e15a53343921c956e5",
|
||||
"sha256:81c491092b71f5b276de8c63dfd452be3f322622c48a54f3a497cf913bdfb2f4"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==4.1.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
"autopep8": {
|
||||
"hashes": [
|
||||
"sha256:44f0932855039d2c15c4510d6df665e4730f2b8582704fa48f9c55bd3e17d979",
|
||||
"sha256:ed77137193bbac52d029a52c59bec1b0629b5a186c495f1eb21b126ac466083f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.6.0"
|
||||
},
|
||||
"flake8": {
|
||||
"hashes": [
|
||||
"sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d",
|
||||
"sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.0.1"
|
||||
},
|
||||
"mccabe": {
|
||||
"hashes": [
|
||||
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
|
||||
"sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
|
||||
],
|
||||
"version": "==0.6.1"
|
||||
},
|
||||
"pycodestyle": {
|
||||
"hashes": [
|
||||
"sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20",
|
||||
"sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==2.8.0"
|
||||
},
|
||||
"pyflakes": {
|
||||
"hashes": [
|
||||
"sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c",
|
||||
"sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.4.0"
|
||||
},
|
||||
"toml": {
|
||||
"hashes": [
|
||||
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
||||
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==0.10.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
69
ecs/crm-datafetch/README.md
Normal file
69
ecs/crm-datafetch/README.md
Normal file
@ -0,0 +1,69 @@
|
||||
# CRMデータ連携 データ取得処理 ECSタスク
|
||||
|
||||
## 前提事項
|
||||
|
||||
### ツールのバージョン
|
||||
|
||||
- Python 3.8.x
|
||||
- PipEnv(Pythonの依存関係管理用モジュール)
|
||||
|
||||
### 開発環境
|
||||
|
||||
- Visual Studio Code
|
||||
|
||||
## 開発環境構築
|
||||
|
||||
※下記の操作は基本的にVSCode上で行います。
|
||||
|
||||
- [ファイル]-[フォルダーを開く]から、当フォルダを選択して開く
|
||||
|
||||
- [Wiki | Pythonの環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)にて、pyenvの導入まで完了させる
|
||||
- **pyenvの導入はマストではないが、Pythonのバージョンが前提のバージョンと同一であることを確認して開発を進めてください**
|
||||
- **確認しながら開発するのは煩わしいため、導入を強く推奨します。**
|
||||
|
||||
- ローカルのPythonでPipEnvをインストールする
|
||||
|
||||
```sh
|
||||
pip install pipenv
|
||||
```
|
||||
|
||||
- pipenvの仮想環境と依存パッケージをインストール。このとき、初回実行にはpythonの仮想環境のパスがターミナルに表示されるため、控えておく
|
||||
|
||||
```sh
|
||||
# 開発用パッケージも含めてインストール
|
||||
pipenv install --dev
|
||||
```
|
||||
|
||||
- VSCodeのコマンドパレットを[表示]-[コマンドパレット]から開き、`Python: Select interpreter`を選択して実行する
|
||||
- Pythonの実行環境を聞かれるため、先に控えたパスと一致するものを選択する
|
||||
- 出てこない場合、一度VSCodeを閉じて再度開き直す
|
||||
|
||||
- 当フォルダ直下の`.vscode`フォルダ内にある`recommend_settings.json`をコピーし、同フォルダ内に`settings.json`を作成する
|
||||
|
||||
## ローカルでの起動方法
|
||||
|
||||
- 当フォルダ直下の`.vscode`フォルダ内に`launch.json`を作成する
|
||||
- 以下のJSONを入力して保存する
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
// エントリーポイントのファイルに変更すること
|
||||
"program": "<エントリーポイントになるファイル>",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
// 環境変数が必要な場合に読み込む環境変数ファイル
|
||||
"envFile": "${workspaceFolder}/.env",
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- 環境変数が必要な場合、直接設定するか、上記JSONの`"envFile"`に設定されたパスに`.env`ファイルを作成し、環境変数を入力する
|
||||
- キーボードの「F5」キーを押して起動する
|
||||
- デバッグモードで実行されるため、適当なところにブレークポイントを置いてデバッグすることができる
|
||||
5
ecs/crm-datafetch/main.py
Normal file
5
ecs/crm-datafetch/main.py
Normal file
@ -0,0 +1,5 @@
|
||||
from src.controller import controller
|
||||
|
||||
"""CRMデータ取得処理のエントリーポイント"""
|
||||
if __name__ == '__main__':
|
||||
controller()
|
||||
0
ecs/crm-datafetch/src/__init__.py
Normal file
0
ecs/crm-datafetch/src/__init__.py
Normal file
0
ecs/crm-datafetch/src/aws/__init__.py
Normal file
0
ecs/crm-datafetch/src/aws/__init__.py
Normal file
101
ecs/crm-datafetch/src/aws/s3.py
Normal file
101
ecs/crm-datafetch/src/aws/s3.py
Normal file
@ -0,0 +1,101 @@
|
||||
import json
|
||||
|
||||
import boto3
|
||||
from src.system_var.constants import (AWS_RESOURCE_S3, S3_CHAR_CODE,
|
||||
S3_RESPONSE_BODY)
|
||||
from src.system_var.environments import (CRM_BACKUP_BUCKET, CRM_CONFIG_BUCKET,
|
||||
CRM_IMPORT_DATA_BACKUP_FOLDER,
|
||||
CRM_IMPORT_DATA_FOLDER,
|
||||
IMPORT_DATA_BUCKET,
|
||||
LAST_FETCH_DATE_FOLDER,
|
||||
OBJECT_INFO_FILENAME,
|
||||
OBJECT_INFO_FOLDER,
|
||||
PROCESS_RESULT_FOLDER,
|
||||
RESPONSE_JSON_BACKUP_FOLDER)
|
||||
|
||||
|
||||
class S3Resource:
|
||||
def __init__(self, bucket_name: str) -> None:
|
||||
self.__s3_resource = boto3.resource(AWS_RESOURCE_S3)
|
||||
self.__s3_bucket = self.__s3_resource.Bucket(bucket_name)
|
||||
|
||||
def get_object(self, object_key: str) -> str:
|
||||
response = self.__s3_bucket.Object(object_key).get()
|
||||
body = response[S3_RESPONSE_BODY].read()
|
||||
return body.decode(S3_CHAR_CODE)
|
||||
|
||||
def put_object(self, object_key: str, data: str) -> None:
|
||||
s3_object = self.__s3_bucket.Object(object_key)
|
||||
s3_object.put(Body=data.encode(S3_CHAR_CODE), ContentEncoding=S3_CHAR_CODE)
|
||||
return
|
||||
|
||||
def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None:
|
||||
copy_source = {'Bucket': src_bucket, 'Key': src_key}
|
||||
self.__s3_resource.meta.client.copy(copy_source, dest_bucket, dest_key)
|
||||
return
|
||||
|
||||
|
||||
class ConfigBucket:
|
||||
__s3_resource: S3Resource = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.__s3_resource = S3Resource(CRM_CONFIG_BUCKET)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return CRM_CONFIG_BUCKET
|
||||
|
||||
def get_object_info_file(self) -> str:
|
||||
return self.__s3_resource.get_object(f'{OBJECT_INFO_FOLDER}/{OBJECT_INFO_FILENAME}')
|
||||
|
||||
def get_last_fetch_datetime_file(self, file_path: str) -> str:
|
||||
return self.__s3_resource.get_object(f'{LAST_FETCH_DATE_FOLDER}/{file_path}')
|
||||
|
||||
def put_last_fetch_datetime_file(self, file_path: str, data: str) -> None:
|
||||
self.__s3_resource.put_object(
|
||||
f'{LAST_FETCH_DATE_FOLDER}/{file_path}', data)
|
||||
return
|
||||
|
||||
|
||||
class DataBucket:
|
||||
__s3_resource: S3Resource = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.__s3_resource = S3Resource(IMPORT_DATA_BUCKET)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return IMPORT_DATA_BUCKET
|
||||
|
||||
def put_csv(self, file_path: str, data: str) -> None:
|
||||
object_key = f'{CRM_IMPORT_DATA_FOLDER}/{file_path}'
|
||||
self.__s3_resource.put_object(object_key, data)
|
||||
return
|
||||
|
||||
def put_csv_from(self, src_bucket: str, src_key: str):
|
||||
dest_filename = src_key.split('/')[-1]
|
||||
self.__s3_resource.copy(src_bucket, src_key, str(self), f'{CRM_IMPORT_DATA_FOLDER}/{dest_filename}')
|
||||
return
|
||||
|
||||
|
||||
class BackupBucket:
|
||||
__s3_resource: S3Resource = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.__s3_resource = S3Resource(CRM_BACKUP_BUCKET)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return CRM_BACKUP_BUCKET
|
||||
|
||||
def put_response_json(self, file_path: str, data: dict) -> None:
|
||||
object_key = f'{RESPONSE_JSON_BACKUP_FOLDER}/{file_path}'
|
||||
self.__s3_resource.put_object(object_key, json.dumps(data))
|
||||
return
|
||||
|
||||
def put_csv(self, file_path: str, data: str) -> None:
|
||||
object_key = f'{CRM_IMPORT_DATA_BACKUP_FOLDER}/{file_path}'
|
||||
self.__s3_resource.put_object(object_key, data)
|
||||
return
|
||||
|
||||
def put_result_json(self, file_path: str, data: dict) -> None:
|
||||
object_key = f'{PROCESS_RESULT_FOLDER}/{file_path}'
|
||||
self.__s3_resource.put_object(object_key, json.dumps(data))
|
||||
return
|
||||
47
ecs/crm-datafetch/src/backup_crm_csv_data_process.py
Normal file
47
ecs/crm-datafetch/src/backup_crm_csv_data_process.py
Normal file
@ -0,0 +1,47 @@
|
||||
from src.aws.s3 import BackupBucket
|
||||
from src.config.objects import TargetObject
|
||||
from src.error.exceptions import FileUploadException
|
||||
from src.system_var.constants import CSVBK_JP_NAME
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def backup_crm_csv_data_process(target_object: TargetObject, execute_datetime: ExecuteDateTime, csv_string: str):
|
||||
"""CSVバックアップ処理
|
||||
|
||||
Args:
|
||||
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
csv_string (str): csvデータ
|
||||
|
||||
Raises:
|
||||
FileUploadException: S3のファイルアップロード失敗
|
||||
"""
|
||||
|
||||
# ① CSVバックアップ処理の開始ログを出力する
|
||||
target_object_name = target_object.object_name
|
||||
upload_file_name = target_object.upload_file_name
|
||||
|
||||
logger.info(
|
||||
f'I-CSVBK-01 [{target_object_name}] のCSVデータのバックアップ処理を開始します ファイル名:[{upload_file_name}.csv]')
|
||||
|
||||
try:
|
||||
# ② CRMバックアップ保管用バケットに、変換後のCSVデータのバックアップを保管する
|
||||
backup_bucket = BackupBucket()
|
||||
backup_bucket.put_csv(
|
||||
f'{execute_datetime.to_path()}/{upload_file_name}.csv', csv_string)
|
||||
|
||||
logger.debug(
|
||||
f'D-CSVBK-02 [{target_object_name}] のCSVデータバックアップ 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise FileUploadException(
|
||||
'E-CSVBK-01',
|
||||
CSVBK_JP_NAME, f'[{target_object_name}] CSVデータのバックアップに失敗しました ファイル名:[{upload_file_name}.csv] エラー内容:[{e}]')
|
||||
|
||||
# ③ CSVバックアップ処理の終了ログを出力する
|
||||
logger.info(
|
||||
f'I-CSVBK-03 [{target_object_name}] のCSVデータのバックアップ処理を終了します')
|
||||
|
||||
# ④ 次の処理へ移行する
|
||||
return
|
||||
40
ecs/crm-datafetch/src/backup_crm_data_process.py
Normal file
40
ecs/crm-datafetch/src/backup_crm_data_process.py
Normal file
@ -0,0 +1,40 @@
|
||||
from src.aws.s3 import BackupBucket
|
||||
from src.error.exceptions import FileUploadException
|
||||
from src.system_var.constants import RESBK_JP_NAME
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def backup_crm_data_process(object_name: str, sf_object_dict: dict, execute_datetime: ExecuteDateTime):
|
||||
"""CRM電文データバックアップ処理
|
||||
|
||||
Args:
|
||||
object_name (str): 取得対象オブジェクト名
|
||||
sf_object_dict (dict): Salesforceオブジェクトデータ
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
|
||||
Raises:
|
||||
FileUploadException: S3のファイルアップロード失敗
|
||||
"""
|
||||
|
||||
# ① CRM電文データバックアップ処理の開始ログを出力する
|
||||
logger.info(f'I-RESBK-01 [{object_name}] のCRM電文データバックアップ処理を開始します')
|
||||
|
||||
try:
|
||||
# ② CRMバックアップ保管用バケットに、CRMから取得したJSONの電文データのバックアップを保管する
|
||||
file_name = f'{execute_datetime.to_path()}/{object_name}.json'
|
||||
|
||||
backup_bucket = BackupBucket()
|
||||
backup_bucket.put_response_json(file_name, sf_object_dict)
|
||||
|
||||
logger.debug(f'D-RESBK-02 [{object_name}] のJSONデータバックアップ 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise FileUploadException(
|
||||
'E-RESBK-01', RESBK_JP_NAME, f'[{object_name}] 電文データのバックアップに失敗しました ファイル名:[{object_name}.json] エラー内容:[{e}]')
|
||||
|
||||
# ③ CRM電文データバックアップ処理の終了ログを出力する
|
||||
logger.info(f'I-RESBK-03 [{object_name}] のCRM電文データバックアップ処理を終了します')
|
||||
|
||||
# ④ 次の処理へ移行する
|
||||
return
|
||||
37
ecs/crm-datafetch/src/check_object_info_process.py
Normal file
37
ecs/crm-datafetch/src/check_object_info_process.py
Normal file
@ -0,0 +1,37 @@
|
||||
from src.config.objects import TargetObject
|
||||
from src.error.exceptions import InvalidConfigException
|
||||
from src.system_var.constants import CHK_JP_NAME
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def check_object_info_process(object_info: dict, execute_datetime: ExecuteDateTime):
|
||||
"""オブジェクト情報形式チェック処理
|
||||
|
||||
Args:
|
||||
object_info (dict): 取得対象オブジェクト情報
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
|
||||
Raises:
|
||||
InvalidConfigException: オブジェクト情報定義が不正だった場合
|
||||
|
||||
Returns:
|
||||
target_object: 取得対象オブジェクト情報インスタンス
|
||||
"""
|
||||
|
||||
# ① オブジェクト情報形式チェック処理開始ログを出力する
|
||||
logger.info('I-CHK-01 オブジェクト情報形式チェック処理を開始します')
|
||||
|
||||
try:
|
||||
# ② オブジェクト情報形式チェック
|
||||
target_object = TargetObject(object_info, execute_datetime)
|
||||
|
||||
except Exception as e:
|
||||
raise InvalidConfigException(
|
||||
'E-CHK-01', CHK_JP_NAME, f'オブジェクト情報形式チェック処理が失敗しました エラー内容:[{e}]')
|
||||
|
||||
# ③ チェック処理終了ログを出力する
|
||||
logger.info('I-CHK-02 オブジェクト情報形式チェック処理を終了します')
|
||||
|
||||
# ④ 次の処理へ移行する
|
||||
return target_object
|
||||
0
ecs/crm-datafetch/src/config/__init__.py
Normal file
0
ecs/crm-datafetch/src/config/__init__.py
Normal file
139
ecs/crm-datafetch/src/config/objects.py
Normal file
139
ecs/crm-datafetch/src/config/objects.py
Normal file
@ -0,0 +1,139 @@
|
||||
from src.system_var.constants import (COLUMNS_KEY, COLUMNS_TYPE,
|
||||
DATE_PATTERN_YYYYMMDDTHHMMSSTZ,
|
||||
DATETIME_COLUMN_DEFAULT_VALUE,
|
||||
DATETIME_COLUMN_KEY,
|
||||
DATETIME_COLUMN_TYPE, IS_SKIP_KEY,
|
||||
IS_SKIP_TYPE,
|
||||
IS_UPDATE_LAST_FETCH_DATETIME_KEY,
|
||||
IS_UPDATE_LAST_FETCH_DATETIME_TYPE,
|
||||
LAST_FETCH_DATETIME_FILE_NAME_KEY,
|
||||
LAST_FETCH_DATETIME_FILE_NAME_TYPE,
|
||||
LAST_FETCH_DATETIME_FROM_KEY,
|
||||
LAST_FETCH_DATETIME_TO_KEY,
|
||||
OBJECT_NAME_KEY, OBJECT_NAME_TYPE,
|
||||
OBJECTS_KEY, OBJECTS_TYPE,
|
||||
UPLOAD_FILE_NAME_KEY,
|
||||
UPLOAD_FILE_NAME_TYPE)
|
||||
from src.util.dict_checker import DictChecker
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
|
||||
|
||||
class FetchTargetObjects():
|
||||
def __init__(self, object_info_file_dict) -> None:
|
||||
self.__objects = object_info_file_dict
|
||||
self.__dict_checker = DictChecker(self.__objects)
|
||||
self.validate()
|
||||
self.__i = 0
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
if self.__i == len(self.__objects[OBJECTS_KEY]):
|
||||
raise StopIteration()
|
||||
value = self.__objects[OBJECTS_KEY][self.__i]
|
||||
self.__i += 1
|
||||
return value
|
||||
|
||||
def validate(self) -> None:
|
||||
self.__dict_checker.assert_key_exist(OBJECTS_KEY)
|
||||
self.__dict_checker.assert_data_type(OBJECTS_KEY, OBJECTS_TYPE)
|
||||
|
||||
|
||||
class TargetObject():
|
||||
def __init__(self, object_info, execute_datetime: ExecuteDateTime) -> None:
|
||||
self.__dict_checker = DictChecker(object_info)
|
||||
self.__object_info = object_info
|
||||
self.__execute_datetime = execute_datetime
|
||||
self.__validate()
|
||||
|
||||
def __validate(self) -> None:
|
||||
self.__validate_required_properties()
|
||||
self.__validate_optional_properties()
|
||||
|
||||
return
|
||||
|
||||
def __validate_required_properties(self) -> None:
|
||||
self.__dict_checker.assert_key_exist(OBJECT_NAME_KEY)
|
||||
self.__dict_checker.assert_data_type(OBJECT_NAME_KEY, OBJECT_NAME_TYPE)
|
||||
self.__dict_checker.assert_key_exist(COLUMNS_KEY)
|
||||
self.__dict_checker.assert_data_type(COLUMNS_KEY, COLUMNS_TYPE)
|
||||
|
||||
return
|
||||
|
||||
def __validate_optional_properties(self) -> None:
|
||||
if self.__dict_checker.check_key_exist(IS_SKIP_KEY):
|
||||
self.__dict_checker.assert_data_type(IS_SKIP_KEY, IS_SKIP_TYPE)
|
||||
|
||||
if self.__dict_checker.check_key_exist(IS_UPDATE_LAST_FETCH_DATETIME_KEY):
|
||||
self.__dict_checker.assert_data_type(IS_UPDATE_LAST_FETCH_DATETIME_KEY, IS_UPDATE_LAST_FETCH_DATETIME_TYPE)
|
||||
|
||||
if self.__dict_checker.check_key_exist(LAST_FETCH_DATETIME_FILE_NAME_KEY):
|
||||
self.__dict_checker.assert_data_type(LAST_FETCH_DATETIME_FILE_NAME_KEY, LAST_FETCH_DATETIME_FILE_NAME_TYPE)
|
||||
|
||||
if self.__dict_checker.check_key_exist(UPLOAD_FILE_NAME_KEY):
|
||||
self.__dict_checker.assert_data_type(UPLOAD_FILE_NAME_KEY, UPLOAD_FILE_NAME_TYPE)
|
||||
|
||||
if self.__dict_checker.check_key_exist(DATETIME_COLUMN_KEY):
|
||||
self.__dict_checker.assert_data_type(DATETIME_COLUMN_KEY, DATETIME_COLUMN_TYPE)
|
||||
|
||||
return
|
||||
|
||||
@property
|
||||
def object_name(self) -> str:
|
||||
return self.__object_info[OBJECT_NAME_KEY]
|
||||
|
||||
@property
|
||||
def columns(self) -> list:
|
||||
return self.__object_info[COLUMNS_KEY]
|
||||
|
||||
@property
|
||||
def is_skip(self) -> bool:
|
||||
return self.__object_info[IS_SKIP_KEY] if self.__dict_checker.check_key_exist(IS_SKIP_KEY) else False
|
||||
|
||||
@property
|
||||
def is_update_last_fetch_datetime(self) -> bool:
|
||||
if self.__dict_checker.check_key_exist(IS_UPDATE_LAST_FETCH_DATETIME_KEY):
|
||||
return self.__object_info[IS_UPDATE_LAST_FETCH_DATETIME_KEY]
|
||||
return False
|
||||
|
||||
@property
|
||||
def last_fetch_datetime_file_name(self) -> str:
|
||||
if self.__dict_checker.check_key_exist(LAST_FETCH_DATETIME_FILE_NAME_KEY):
|
||||
return self.__object_info[LAST_FETCH_DATETIME_FILE_NAME_KEY]
|
||||
return f'{self.__object_info[OBJECT_NAME_KEY]}.json'
|
||||
|
||||
@property
|
||||
def upload_file_name(self) -> str:
|
||||
if self.__dict_checker.check_key_exist(UPLOAD_FILE_NAME_KEY):
|
||||
return self.__object_info[UPLOAD_FILE_NAME_KEY].format(execute_datetime=self.__execute_datetime.format_date())
|
||||
return f'{self.__object_info[OBJECT_NAME_KEY]}_{self.__execute_datetime.format_date()}'
|
||||
|
||||
@property
|
||||
def datetime_column(self) -> str:
|
||||
return self.__object_info[DATETIME_COLUMN_KEY] if self.__dict_checker.check_key_exist(DATETIME_COLUMN_KEY) else DATETIME_COLUMN_DEFAULT_VALUE
|
||||
|
||||
|
||||
class LastFetchDatetime():
|
||||
def __init__(self, last_fetch_datetime_file_dict, execute_datetime) -> None:
|
||||
self.__dict_checker = DictChecker(last_fetch_datetime_file_dict)
|
||||
self.__execute_datetime = execute_datetime
|
||||
self.__last_fetch_datetime_file_dict = last_fetch_datetime_file_dict
|
||||
self.__validate()
|
||||
|
||||
def __validate(self) -> None:
|
||||
if self.__dict_checker.check_key_exist(LAST_FETCH_DATETIME_FROM_KEY):
|
||||
self.__dict_checker.assert_match_pattern(LAST_FETCH_DATETIME_FROM_KEY, DATE_PATTERN_YYYYMMDDTHHMMSSTZ)
|
||||
if self.__dict_checker.check_key_exist(LAST_FETCH_DATETIME_TO_KEY):
|
||||
self.__dict_checker.assert_match_pattern(LAST_FETCH_DATETIME_TO_KEY, DATE_PATTERN_YYYYMMDDTHHMMSSTZ)
|
||||
return
|
||||
|
||||
@property
|
||||
def last_fetch_datetime_from(self) -> str:
|
||||
return self.__last_fetch_datetime_file_dict[LAST_FETCH_DATETIME_FROM_KEY]
|
||||
|
||||
@property
|
||||
def last_fetch_datetime_to(self) -> str:
|
||||
if self.__dict_checker.check_key_exist(LAST_FETCH_DATETIME_TO_KEY):
|
||||
return self.__last_fetch_datetime_file_dict[LAST_FETCH_DATETIME_TO_KEY]
|
||||
return self.__execute_datetime
|
||||
176
ecs/crm-datafetch/src/controller.py
Normal file
176
ecs/crm-datafetch/src/controller.py
Normal file
@ -0,0 +1,176 @@
|
||||
import gc
|
||||
|
||||
from src.backup_crm_csv_data_process import backup_crm_csv_data_process
|
||||
from src.backup_crm_data_process import backup_crm_data_process
|
||||
from src.check_object_info_process import check_object_info_process
|
||||
from src.config.objects import FetchTargetObjects
|
||||
from src.convert_crm_csv_data_process import convert_crm_csv_data_process
|
||||
from src.copy_crm_csv_data_process import copy_crm_csv_data_process
|
||||
from src.error.exceptions import MeDaCaCRMDataFetchException
|
||||
from src.fetch_crm_data_process import fetch_crm_data_process
|
||||
from src.prepare_data_fetch_process import prepare_data_fetch_process
|
||||
from src.set_datetime_period_process import set_datetime_period_process
|
||||
from src.system_var.constants import OBJECT_NAME_KEY
|
||||
from src.upload_last_fetch_datetime_process import \
|
||||
upload_last_fetch_datetime_process
|
||||
from src.upload_result_data_process import upload_result_data_process
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def controller() -> None:
|
||||
"""コントロール処理"""
|
||||
|
||||
try:
|
||||
# ① CRMデータ取得処理開始ログを出力する
|
||||
logger.info('I-CTRL-01 CRMデータ取得処理を開始します')
|
||||
|
||||
# ② データ取得準備処理を呼び出す
|
||||
logger.info('I-CTRL-02 データ取得準備処理呼び出し')
|
||||
|
||||
fetch_target_objects, execute_datetime, process_result = prepare_data_fetch_process()
|
||||
|
||||
# ③ object_infoのobjectsキーの値の件数分ループする
|
||||
logger.info('I-CTRL-03 取得対象オブジェクトのループ処理開始')
|
||||
|
||||
process_result = fetch_crm_data(fetch_target_objects, execute_datetime, process_result)
|
||||
|
||||
# ④ すべてのオブジェクトの処理が完了したことと、オブジェクト毎の処理結果をログに出力する
|
||||
logger.info(f'I-CTRL-17 すべてのオブジェクトの処理が終了しました 実行結果:[{process_result}]')
|
||||
|
||||
# ⑤ 取得処理実施結果アップロード処理を呼び出す
|
||||
logger.info('I-CTRL-18 CRM_取得処理実施結果ファイルアップロード処理開始')
|
||||
upload_result_data_process(process_result, execute_datetime)
|
||||
|
||||
# ⑥ 最終結果をチェックし、チェック結果をログに出力
|
||||
if not all([v == 'success' for v in process_result.values()]):
|
||||
logger.error('E-CTRL-01 一部のデータ取得に失敗しています 詳細はログをご確認ください')
|
||||
else:
|
||||
logger.info('I-CTRL-19 すべてのデータの取得に成功しました')
|
||||
|
||||
# ⑦ CRMデータ取得処理終了ログを出力する
|
||||
logger.info('I-CTRL-20 CRMデータ取得処理を終了します')
|
||||
|
||||
return exit(0)
|
||||
|
||||
except MeDaCaCRMDataFetchException as e:
|
||||
logger.error(f'E-ERR-01 [{e.func_name}]でエラーが発生したため、処理を終了します')
|
||||
logger.exception(f'{e.error_id} {e}')
|
||||
return exit(0)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception('E-ERR-02 予期せぬエラーが発生したため、処理を終了します', e)
|
||||
return exit(0)
|
||||
|
||||
|
||||
def fetch_crm_data(fetch_target_objects: FetchTargetObjects, execute_datetime: ExecuteDateTime, process_result: dict):
|
||||
"""取得対象オブジェクト情報をループし、1オブジェクトごとのデータを取得する
|
||||
|
||||
Args:
|
||||
fetch_target_objects (FetchTargetObjects): CRMオブジェクト情報インスタンス
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
process_result (dict): 取得処理実行結果辞書オブジェクト
|
||||
|
||||
Returns:
|
||||
process_result: 取得処理実行結果辞書オブジェクト
|
||||
"""
|
||||
|
||||
for object_info in fetch_target_objects:
|
||||
try:
|
||||
process_result[object_info.get(OBJECT_NAME_KEY)] = 'fail'
|
||||
|
||||
fetch_crm_data_per_object(object_info, execute_datetime)
|
||||
|
||||
process_result[object_info.get(OBJECT_NAME_KEY)] = 'success'
|
||||
|
||||
except MeDaCaCRMDataFetchException as e:
|
||||
logger.info(f'{e.error_id} {e}')
|
||||
logger.info(
|
||||
f'I-ERR-03 [{object_info.get(OBJECT_NAME_KEY)}] の[{e.func_name}]でエラーが発生しました 次のオブジェクトの処理に移行します', exc_info=True)
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f'I-ERR-04 [{object_info.get(OBJECT_NAME_KEY)}] の処理中に予期せぬエラーが発生しました 次のオブジェクトの処理に移行します', e, exc_info=True)
|
||||
continue
|
||||
|
||||
return process_result
|
||||
|
||||
|
||||
def fetch_crm_data_per_object(object_info: dict, execute_datetime: ExecuteDateTime) -> None:
|
||||
"""オブジェクトごとにCRMのデータを取得し、取込フォルダにアップロードする
|
||||
|
||||
Args:
|
||||
object_info (dict): 取得対象オブジェクト情報
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
"""
|
||||
|
||||
# 1. オブジェクト処理結果の初期化
|
||||
logger.debug(f'D-CTRL-04 対象のオブジェクト情報を出力します オブジェクト情報:[{object_info}]')
|
||||
|
||||
# 2. オブジェクト情報形式チェック処理を呼び出す
|
||||
logger.info('I-CTRL-05 オブジェクト情報形式チェック処理呼び出し')
|
||||
|
||||
target_object = check_object_info_process(object_info, execute_datetime)
|
||||
target_object_name = target_object.object_name
|
||||
|
||||
# 3. 処理対象のオブジェクト名をログ出力する
|
||||
logger.info(
|
||||
f'I-CTRL-06 [{target_object_name}]のデータ取得を開始します')
|
||||
|
||||
# 4. オブジェクト情報.is_skipがTrueの場合、次のオブジェクトの処理に移行する
|
||||
if target_object.is_skip is True:
|
||||
logger.info(
|
||||
f'I-CTRL-07 [{target_object_name}]のデータ取得処理をスキップします')
|
||||
return
|
||||
|
||||
# 5. データ取得期間設定処理を呼び出す
|
||||
logger.info(
|
||||
f'I-CTRL-08 [{target_object_name}]のデータ取得期間設定処理呼び出し')
|
||||
|
||||
last_fetch_datetime = set_datetime_period_process(target_object, execute_datetime)
|
||||
|
||||
# 6. CRMデータ取得処理を呼び出す
|
||||
logger.info(
|
||||
f'I-CTRL-09 [{target_object_name}]のデータ取得処理呼び出し')
|
||||
|
||||
crm_data_response = fetch_crm_data_process(target_object, last_fetch_datetime)
|
||||
|
||||
# 7. 出力ファイル名をログ出力する
|
||||
logger.info(
|
||||
f'I-CTRL-10 [{target_object_name}] の出力ファイル名は [{target_object.upload_file_name}] となります')
|
||||
|
||||
# 8. CRM電文データバックアップ処理を呼び出す
|
||||
logger.info(
|
||||
f'I-CTRL-11 [{target_object_name}] CRM電文データバックアップ処理呼び出し')
|
||||
backup_crm_data_process(target_object_name, crm_data_response, execute_datetime)
|
||||
|
||||
# 9. CSV変換処理を呼び出す
|
||||
logger.info(
|
||||
f'I-CTRL-12 [{target_object.object_name}] CSV変換処理呼び出し')
|
||||
csv_string = convert_crm_csv_data_process(target_object, crm_data_response)
|
||||
|
||||
# 10. CSVバックアップ処理を呼び出す
|
||||
logger.info(
|
||||
f'I-CTRL-13 [{target_object_name}] CSVデータバックアップ処理呼び出し')
|
||||
backup_crm_csv_data_process(target_object, execute_datetime, csv_string)
|
||||
|
||||
# 11. CSVアップロード処理を呼び出す
|
||||
logger.info(
|
||||
f'I-CTRL-14 [{target_object_name}] CSVデータアップロード処理呼び出し')
|
||||
copy_crm_csv_data_process(target_object, execute_datetime)
|
||||
|
||||
# 12. メモリ解放
|
||||
del crm_data_response
|
||||
del csv_string
|
||||
gc.collect()
|
||||
|
||||
# 13. 前回取得日時ファイル更新処理を呼びだす
|
||||
logger.info(
|
||||
f'I-CTRL-15 [{target_object_name}] 前回取得日時ファイル更新処理呼び出し')
|
||||
upload_last_fetch_datetime_process(target_object, last_fetch_datetime)
|
||||
|
||||
# 14. オブジェクトのアップロードが完了した旨をログに出力する
|
||||
logger.info(f'I-CTRL-16 [{target_object_name}] 処理正常終了')
|
||||
|
||||
return
|
||||
42
ecs/crm-datafetch/src/convert_crm_csv_data_process.py
Normal file
42
ecs/crm-datafetch/src/convert_crm_csv_data_process.py
Normal file
@ -0,0 +1,42 @@
|
||||
from src.config.objects import TargetObject
|
||||
from src.converter.converter import CSVStringConverter
|
||||
from src.error.exceptions import DataConvertException
|
||||
from src.system_var.constants import CONV_JP_NAME
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def convert_crm_csv_data_process(target_object: TargetObject, crm_data_response: dict):
|
||||
"""CSV変換処理
|
||||
|
||||
Args:
|
||||
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
|
||||
crm_data_response (dict): Salesforceオブジェクトデータ
|
||||
|
||||
Raises:
|
||||
DataConvertException: データ変換が失敗した場合
|
||||
|
||||
Returns:
|
||||
csv_string: csvデータ
|
||||
"""
|
||||
|
||||
# ① CSV変換処理の開始ログを出力する
|
||||
target_object_name = target_object.object_name
|
||||
|
||||
logger.info(f'I-CONV-01 [{target_object_name}] のCSV変換処理を開始します')
|
||||
|
||||
try:
|
||||
# ② CSV変換
|
||||
csv_string_converter = CSVStringConverter(target_object, crm_data_response)
|
||||
csv_string = csv_string_converter.convert()
|
||||
|
||||
logger.debug(f'D-CONV-02 [{target_object_name}] のCSV変換処理 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise DataConvertException(
|
||||
'E-CONV-01', CONV_JP_NAME, f'[{target_object_name}] CSV変換に失敗しました エラー内容:[{e}]')
|
||||
|
||||
# ③ CSV変換処理の終了ログを出力する
|
||||
logger.info(f'I-CONV-03 [{target_object_name}] のCSV変換処理を終了します')
|
||||
|
||||
# ④ 次の処理へ移行する
|
||||
return csv_string
|
||||
0
ecs/crm-datafetch/src/converter/__init__.py
Normal file
0
ecs/crm-datafetch/src/converter/__init__.py
Normal file
63
ecs/crm-datafetch/src/converter/convert_strategy.py
Normal file
63
ecs/crm-datafetch/src/converter/convert_strategy.py
Normal file
@ -0,0 +1,63 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.tz import gettz
|
||||
from src.system_var.constants import (CRM_DATETIME_FORMAT, CSV_FALSE_VALUE,
|
||||
CSV_TRUE_VALUE,
|
||||
DATE_PATTERN_YYYYMMDDHHMMSSFFF_UTC,
|
||||
YYYYMMDDHHMMSS)
|
||||
from src.system_var.environments import CONVERT_TZ
|
||||
|
||||
|
||||
class ConvertStrategyFactory:
|
||||
def __init__(self) -> None:
|
||||
self.__none_value_convert_strategy = NoneValueConvertStrategy()
|
||||
self.__float_convert_strategy = FloatConvertStrategy()
|
||||
self.__boolean_convert_strategy = BooleanConvertStrategy()
|
||||
self.__datetime_convert_strategy = DatetimeConvertStrategy()
|
||||
self.__non_convert_strategy = NonConvertStrategy()
|
||||
|
||||
def create(self, value):
|
||||
|
||||
if value is None:
|
||||
convert_strategy = self.__none_value_convert_strategy
|
||||
|
||||
elif type(value) == float:
|
||||
convert_strategy = self.__float_convert_strategy
|
||||
|
||||
elif type(value) == bool:
|
||||
convert_strategy = self.__boolean_convert_strategy
|
||||
|
||||
elif type(value) == str and re.fullmatch(DATE_PATTERN_YYYYMMDDHHMMSSFFF_UTC, value):
|
||||
convert_strategy = self.__datetime_convert_strategy
|
||||
|
||||
else:
|
||||
convert_strategy = self.__non_convert_strategy
|
||||
|
||||
return convert_strategy
|
||||
|
||||
|
||||
class NoneValueConvertStrategy:
|
||||
def convert_value(self, convert_value: None) -> str:
|
||||
return ''
|
||||
|
||||
|
||||
class BooleanConvertStrategy:
|
||||
def convert_value(self, convert_value: str) -> bool:
|
||||
return CSV_TRUE_VALUE if convert_value is True else CSV_FALSE_VALUE
|
||||
|
||||
|
||||
class DatetimeConvertStrategy:
|
||||
def convert_value(self, convert_value: str) -> str:
|
||||
# データ登録処理がJSTとして登録するため、変換処理内で事前にJSTの日時文字列に変換する
|
||||
return datetime.strptime(convert_value, CRM_DATETIME_FORMAT).astimezone(gettz(CONVERT_TZ)).strftime(YYYYMMDDHHMMSS)
|
||||
|
||||
|
||||
class FloatConvertStrategy:
|
||||
def convert_value(self, convert_value: str) -> int:
|
||||
return int(convert_value)
|
||||
|
||||
|
||||
class NonConvertStrategy:
|
||||
def convert_value(self, convert_value: str):
|
||||
return convert_value
|
||||
79
ecs/crm-datafetch/src/converter/converter.py
Normal file
79
ecs/crm-datafetch/src/converter/converter.py
Normal file
@ -0,0 +1,79 @@
|
||||
import csv
|
||||
import io
|
||||
|
||||
from src.config.objects import TargetObject
|
||||
from src.converter.convert_strategy import ConvertStrategyFactory
|
||||
|
||||
|
||||
class CSVStringConverter:
|
||||
def __init__(self, target_object: TargetObject, sf_object_jsons: dict) -> None:
|
||||
self.__target_object = target_object
|
||||
self.__sf_object_jsons = sf_object_jsons
|
||||
self.__convert_strategy_factory = ConvertStrategyFactory()
|
||||
|
||||
def convert(self) -> str:
|
||||
extracted_sf_object_jsons = self.__extract_sf_object_jsons()
|
||||
csv_data = self.__convert_to_csv(extracted_sf_object_jsons)
|
||||
csv_string = self.__write_csv_string(csv_data)
|
||||
return csv_string
|
||||
|
||||
def __extract_sf_object_jsons(self) -> list:
|
||||
try:
|
||||
extracted_sf_object_jsons = []
|
||||
for sf_object_json in self.__sf_object_jsons:
|
||||
extracted_sf_object_jsons.append(
|
||||
self.__extract_necessary_props_from(sf_object_json))
|
||||
|
||||
return extracted_sf_object_jsons
|
||||
|
||||
except Exception as e:
|
||||
raise Exception('必要なjsonのデータ抽出に失敗しました', e)
|
||||
|
||||
def __extract_necessary_props_from(self, sf_object_json) -> dict:
|
||||
try:
|
||||
clone_sf_object = {**sf_object_json}
|
||||
|
||||
del clone_sf_object['attributes']
|
||||
|
||||
uppercase_key_sf_object = {
|
||||
k.upper(): v for k, v in clone_sf_object.items()}
|
||||
|
||||
return uppercase_key_sf_object
|
||||
|
||||
except Exception as e:
|
||||
raise Exception('必要なjsonのデータ成形に失敗しました', e)
|
||||
|
||||
def __convert_to_csv(self, extracted_sf_object_jsons) -> list:
|
||||
try:
|
||||
columns = self.__target_object.columns
|
||||
csv_data = []
|
||||
for i, json_object in enumerate(extracted_sf_object_jsons, 1):
|
||||
csv_row = []
|
||||
for column in columns:
|
||||
v = json_object[column.upper()]
|
||||
|
||||
convert_strategy = self.__convert_strategy_factory.create(v)
|
||||
converted_value = convert_strategy.convert_value(v)
|
||||
|
||||
csv_row.append(converted_value)
|
||||
|
||||
csv_data.append(csv_row)
|
||||
return csv_data
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
f'CSV変換に失敗しました カラム名:[{column}] 行番号: [{i}] エラー内容:[{e}]')
|
||||
|
||||
def __write_csv_string(self, csv_data) -> str:
|
||||
try:
|
||||
with io.StringIO(newline='') as string_stream:
|
||||
writer = csv.writer(string_stream, delimiter=',', lineterminator='\r\n',
|
||||
doublequote=True, quotechar='"', quoting=csv.QUOTE_ALL, strict=True)
|
||||
writer.writerow(self.__target_object.columns)
|
||||
writer.writerows(csv_data)
|
||||
csv_value = string_stream.getvalue()
|
||||
|
||||
return csv_value
|
||||
|
||||
except Exception as e:
|
||||
raise Exception('csvデータの取得に失敗しました', e)
|
||||
46
ecs/crm-datafetch/src/copy_crm_csv_data_process.py
Normal file
46
ecs/crm-datafetch/src/copy_crm_csv_data_process.py
Normal file
@ -0,0 +1,46 @@
|
||||
from src.aws.s3 import BackupBucket, DataBucket
|
||||
from src.config.objects import TargetObject
|
||||
from src.error.exceptions import FileUploadException
|
||||
from src.system_var.constants import UPLD_JP_NAME
|
||||
from src.system_var.environments import CRM_IMPORT_DATA_BACKUP_FOLDER
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def copy_crm_csv_data_process(target_object: TargetObject, execute_datetime: ExecuteDateTime):
|
||||
"""CSVアップロード処理
|
||||
|
||||
Args:
|
||||
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
|
||||
Raises:
|
||||
FileUploadException: S3のファイルアップロード失敗
|
||||
"""
|
||||
|
||||
# ① CSVデータアップロード処理の開始ログを出力する
|
||||
target_object_name = target_object.object_name
|
||||
upload_file_name = target_object.upload_file_name
|
||||
|
||||
logger.info(
|
||||
f'I-UPLD-01 [{target_object_name}] のCSVデータアップロード処理を開始します ファイル名:[{upload_file_name}.csv]')
|
||||
|
||||
try:
|
||||
# ② CRMバックアップ保管用バケットに保管した変換後のCSVデータをデータ取込バケットにコピーする
|
||||
data_bucket = DataBucket()
|
||||
backup_bucket = BackupBucket()
|
||||
data_bucket.put_csv_from(str(backup_bucket), f'{CRM_IMPORT_DATA_BACKUP_FOLDER}/{execute_datetime.to_path()}/{upload_file_name}.csv')
|
||||
|
||||
logger.debug(
|
||||
f'D-UPLD-02 [{target_object_name}] のCSVデータアップロード 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise FileUploadException(
|
||||
'E-UPLD-01', UPLD_JP_NAME, f'[{target_object_name}] CSVデータのアップロードに失敗しました ファイル名:[{upload_file_name}.csv] エラー内容:[{e}]')
|
||||
|
||||
# ③ CSVデータアップロード処理の終了ログを出力する
|
||||
logger.info(
|
||||
f'I-UPLD-03 [{target_object_name}] のCSVデータのアップロード処理を終了します')
|
||||
|
||||
# ④ 次の処理へ移行する
|
||||
return
|
||||
0
ecs/crm-datafetch/src/error/__init__.py
Normal file
0
ecs/crm-datafetch/src/error/__init__.py
Normal file
35
ecs/crm-datafetch/src/error/exceptions.py
Normal file
35
ecs/crm-datafetch/src/error/exceptions.py
Normal file
@ -0,0 +1,35 @@
|
||||
from abc import ABCMeta
|
||||
|
||||
|
||||
class MeDaCaCRMDataFetchException(Exception, metaclass=ABCMeta):
|
||||
"""MeDaCaシステム固有のカスタムエラークラス"""
|
||||
|
||||
def __init__(self, error_id: str, func_name: str, message: str) -> None:
|
||||
super().__init__(message)
|
||||
self.func_name = func_name
|
||||
self.error_id = error_id
|
||||
|
||||
|
||||
class FileNotFoundException(MeDaCaCRMDataFetchException):
|
||||
"""S3のファイルが見つからない場合の例外"""
|
||||
pass
|
||||
|
||||
|
||||
class FileUploadException(MeDaCaCRMDataFetchException):
|
||||
"""S3のファイルアップロード失敗の例外"""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidConfigException(MeDaCaCRMDataFetchException):
|
||||
"""Configのバリデーションチェック失敗の例外"""
|
||||
pass
|
||||
|
||||
|
||||
class DataConvertException(MeDaCaCRMDataFetchException):
|
||||
"""データ変換が失敗した場合の例外"""
|
||||
pass
|
||||
|
||||
|
||||
class SalesforceAPIException(MeDaCaCRMDataFetchException):
|
||||
"""SalesforceのAPI実行失敗が発生した場合の例外"""
|
||||
pass
|
||||
147
ecs/crm-datafetch/src/fetch_crm_data_process.py
Normal file
147
ecs/crm-datafetch/src/fetch_crm_data_process.py
Normal file
@ -0,0 +1,147 @@
|
||||
from requests.exceptions import ConnectTimeout, ReadTimeout
|
||||
from tenacity import retry, stop_after_attempt
|
||||
from tenacity.wait import wait_exponential
|
||||
|
||||
from src.config.objects import LastFetchDatetime, TargetObject
|
||||
from src.error.exceptions import SalesforceAPIException
|
||||
from src.salesforce.salesforce_api import SalesforceApiClient
|
||||
from src.salesforce.soql_builder import SOQLBuilder
|
||||
from src.system_var.constants import FETCH_JP_NAME
|
||||
from src.system_var.environments import (
|
||||
CRM_AUTH_TIMEOUT, CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT,
|
||||
CRM_FETCH_RECORD_RETRY_INTERVAL, CRM_FETCH_RECORD_RETRY_MAX_INTERVAL,
|
||||
CRM_FETCH_RECORD_RETRY_MIN_INTERVAL, CRM_FETCH_RECORD_TIMEOUT,
|
||||
CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT,
|
||||
CRM_GET_RECORD_COUNT_RETRY_INTERVAL,
|
||||
CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL,
|
||||
CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL, CRM_GET_RECORD_COUNT_TIMEOUT)
|
||||
from src.util.counter_object import CounterObject
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def fetch_crm_data_process(target_object: TargetObject, last_fetch_datetime: LastFetchDatetime):
|
||||
"""CRMデータ取得処理
|
||||
|
||||
Args:
|
||||
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
|
||||
last_fetch_datetime (LastFetchDatetime): データ取得期間設定インスタンス
|
||||
|
||||
Raises:
|
||||
SalesforceAPIException: SalesforceのAPI実行失敗が発生した場合
|
||||
|
||||
Returns:
|
||||
crm_data_response: Salesforceオブジェクトデータ
|
||||
"""
|
||||
|
||||
# ① CRMデータ取得処理開始ログを出力する
|
||||
logger.info(
|
||||
f'I-FETCH-01 [{target_object.object_name}] のCRMからのデータ取得処理を開始します')
|
||||
|
||||
target_object_name = target_object.object_name
|
||||
|
||||
# リトライ回数判定用のカウンタオブジェクトを生成
|
||||
# @retryデコレータを利用した関数のリトライ処理で、基本データ型だとリトライ回数をカウントすることができないため、オブジェクト化する
|
||||
count_counter = CounterObject(1)
|
||||
data_counter = CounterObject(1)
|
||||
|
||||
try:
|
||||
# ② 取得対象オブジェクトの取得期間内のレコード件数を取得する
|
||||
logger.info(f'I-FETCH-02 [{target_object_name}] の件数取得を開始します')
|
||||
|
||||
soql_builder = SOQLBuilder(target_object, last_fetch_datetime)
|
||||
count_soql = soql_builder.create_count_soql()
|
||||
|
||||
record_count = fetch_record_count_retry(count_soql, target_object_name, count_counter)
|
||||
|
||||
logger.info(f'I-FETCH-03 [{target_object_name}] の件数:[{record_count}]')
|
||||
|
||||
except Exception as e:
|
||||
raise SalesforceAPIException(
|
||||
'E-FETCH-01', FETCH_JP_NAME, f'[{target_object_name}] の件数取得に失敗しました エラー内容:[{e}]')
|
||||
|
||||
try:
|
||||
# ③ 取得対象オブジェクトのレコードを取得する
|
||||
logger.info(f'I-FETCH-04 [{target_object_name}] のレコード取得を開始します')
|
||||
|
||||
fetch_soql = soql_builder.create_fetch_soql()
|
||||
|
||||
crm_data_response = fetch_sf_data_retry(fetch_soql, target_object_name, data_counter)
|
||||
|
||||
logger.info(f'I-FETCH-05 [{target_object_name}] のレコード取得が成功しました')
|
||||
|
||||
except Exception as e:
|
||||
raise SalesforceAPIException(
|
||||
'E-FETCH-02', FETCH_JP_NAME, f'[{target_object_name}] のレコード取得に失敗しました エラー内容:[{e}]')
|
||||
|
||||
# ④ CRMデータ取得処理終了ログを出力する
|
||||
logger.info(f'I-FETCH-06 [{target_object_name}] のCRMからのデータ取得処理を終了します')
|
||||
|
||||
# ⑤ 次の処理へ移行する
|
||||
return crm_data_response
|
||||
|
||||
|
||||
@retry(
|
||||
wait=wait_exponential(multiplier=CRM_GET_RECORD_COUNT_RETRY_INTERVAL,
|
||||
min=CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL, max=CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL),
|
||||
stop=stop_after_attempt(CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT))
|
||||
def fetch_record_count_retry(soql: str, target_object_name: str, count_counter: CounterObject):
|
||||
try:
|
||||
salesforce_api_client = SalesforceApiClient()
|
||||
return salesforce_api_client.fetch_sf_count(soql)
|
||||
|
||||
except ConnectTimeout as e:
|
||||
# 「リトライします」のメッセージ出力後、リトライせず例外終了になってしまうことを防ぐため、カウンタによる回数の判定を行う
|
||||
if count_counter.describe() < CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT:
|
||||
count_counter.increment(1)
|
||||
logger.warning(f'W-FETCH-01 CRMの接続処理がタイムアウトしため、リトライします:[{CRM_AUTH_TIMEOUT}] エラー内容:[{e}]')
|
||||
raise e
|
||||
|
||||
except ReadTimeout as e:
|
||||
|
||||
# 「リトライします」のメッセージ出力後、リトライせず例外終了になってしまうことを防ぐため、カウンタによる回数の判定を行う
|
||||
if count_counter.describe() < CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT:
|
||||
count_counter.increment(1)
|
||||
logger.warning(
|
||||
f'W-FETCH-02 [{target_object_name}] の件数取得処理がタイムアウトしたため、リトライします:[{CRM_GET_RECORD_COUNT_TIMEOUT}] エラー内容:[{e}]')
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
# 「リトライします」のメッセージ出力後、リトライせず例外終了になってしまうことを防ぐため、カウンタによる回数の判定を行う
|
||||
if count_counter.describe() < CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT:
|
||||
count_counter.increment(1)
|
||||
logger.warning(
|
||||
f'W-FETCH-03 [{target_object_name}] の件数取得に失敗したため、リトライします エラー内容:[{e}]')
|
||||
raise e
|
||||
|
||||
|
||||
@retry(
|
||||
wait=wait_exponential(multiplier=CRM_FETCH_RECORD_RETRY_INTERVAL,
|
||||
min=CRM_FETCH_RECORD_RETRY_MIN_INTERVAL, max=CRM_FETCH_RECORD_RETRY_MAX_INTERVAL),
|
||||
stop=stop_after_attempt(CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT))
|
||||
def fetch_sf_data_retry(soql: str, target_object_name: str, data_counter: CounterObject):
|
||||
try:
|
||||
salesforce_api_client = SalesforceApiClient()
|
||||
return salesforce_api_client.fetch_sf_data(soql)
|
||||
|
||||
except ConnectTimeout as e:
|
||||
# 「リトライします」のメッセージ出力後、リトライせず例外終了になってしまうことを防ぐため、カウンタによる回数の判定を行う
|
||||
if data_counter.describe() < CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT:
|
||||
data_counter.increment(1)
|
||||
logger.warning(f'W-FETCH-04 CRMの接続処理がタイムアウトしため、リトライします:[{CRM_AUTH_TIMEOUT}] エラー内容:[{e}]')
|
||||
raise e
|
||||
|
||||
except ReadTimeout as e:
|
||||
# 「リトライします」のメッセージ出力後、リトライせず例外終了になってしまうことを防ぐため、カウンタによる回数の判定を行う
|
||||
if data_counter.describe() < CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT:
|
||||
data_counter.increment(1)
|
||||
logger.warning(
|
||||
f'W-FETCH-05 [{target_object_name}] のレコード取得処理がタイムアウトしたため、リトライします:[{CRM_FETCH_RECORD_TIMEOUT}] エラー内容:[{e}]')
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
# 「リトライします」のメッセージ出力後、リトライせず例外終了になってしまうことを防ぐため、カウンタによる回数の判定を行う
|
||||
if data_counter.describe() < CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT:
|
||||
data_counter.increment(1)
|
||||
logger.warning(
|
||||
f'W-FETCH-06 [{target_object_name}] のレコード取得に失敗したため、リトライします エラー内容:[{e}]')
|
||||
raise e
|
||||
0
ecs/crm-datafetch/src/parser/__init__.py
Normal file
0
ecs/crm-datafetch/src/parser/__init__.py
Normal file
17
ecs/crm-datafetch/src/parser/json_parse.py
Normal file
17
ecs/crm-datafetch/src/parser/json_parse.py
Normal file
@ -0,0 +1,17 @@
|
||||
import json
|
||||
import re
|
||||
|
||||
from src.system_var.constants import EXCLUDE_SYMBOL
|
||||
|
||||
|
||||
class JsonParser():
|
||||
def __init__(self, json_str) -> None:
|
||||
self.__json_str = json_str
|
||||
|
||||
def parse(self) -> dict:
|
||||
for symbol in EXCLUDE_SYMBOL:
|
||||
# コメントアウトシンボルを含む部分を置き換える正規表現
|
||||
replace_comment_regex = rf'\s(?!\"){symbol}[\s\S]*?.*'
|
||||
self.__json_str = re.sub(replace_comment_regex, '', self.__json_str)
|
||||
|
||||
return json.loads(self.__json_str)
|
||||
82
ecs/crm-datafetch/src/prepare_data_fetch_process.py
Normal file
82
ecs/crm-datafetch/src/prepare_data_fetch_process.py
Normal file
@ -0,0 +1,82 @@
|
||||
from src.aws.s3 import ConfigBucket
|
||||
from src.config.objects import FetchTargetObjects
|
||||
from src.error.exceptions import FileNotFoundException, InvalidConfigException
|
||||
from src.parser.json_parse import JsonParser
|
||||
from src.system_var.constants import PRE_JP_NAME
|
||||
from src.system_var.environments import (CRM_CONFIG_BUCKET,
|
||||
OBJECT_INFO_FILENAME,
|
||||
OBJECT_INFO_FOLDER)
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def prepare_data_fetch_process():
|
||||
"""データ取得準備処理
|
||||
|
||||
Raises:
|
||||
FileNotFoundException: S3上のファイルが存在しない場合
|
||||
InvalidConfigException: オブジェクト情報定義が不正だった場合
|
||||
|
||||
Returns:
|
||||
fetch_target_objects : CRMオブジェクト情報インスタンス
|
||||
execute_datetime : 実行日次取得インスタンス
|
||||
process_result : 取得処理実行結果辞書オブジェクト
|
||||
"""
|
||||
|
||||
# ① データ取得準備処理の開始ログを出力する
|
||||
logger.info('I-PRE-01 データ取得準備処理を開始します')
|
||||
|
||||
# ② 取得処理開始年月日時分秒を控える
|
||||
|
||||
execute_datetime = ExecuteDateTime()
|
||||
|
||||
logger.info(f'I-PRE-02 データ取得処理開始日時:{execute_datetime}')
|
||||
|
||||
try:
|
||||
# ③ S3 設定ファイル保管用バケットから、CRM_取得オブジェクト情報ファイルを取得する
|
||||
object_info_file_s3_path = f's3://{CRM_CONFIG_BUCKET}{OBJECT_INFO_FOLDER}/{OBJECT_INFO_FILENAME}'
|
||||
logger.debug(
|
||||
f'D-PRE-03 CRM_取得オブジェクト情報ファイルの取得開始します ファイルパス:[{object_info_file_s3_path}]')
|
||||
|
||||
config_bucket = ConfigBucket()
|
||||
object_info_file_str = config_bucket.get_object_info_file()
|
||||
|
||||
logger.debug('D-PRE-04 CRM_取得オブジェクト情報ファイルの取得成功しました')
|
||||
|
||||
except Exception as e:
|
||||
raise FileNotFoundException(
|
||||
'E-PRE-01', PRE_JP_NAME, f'CRM_取得オブジェクト情報ファイルが存在しません ファイル名:[{OBJECT_INFO_FILENAME}] エラー内容:[{e}]')
|
||||
|
||||
try:
|
||||
# ④ CRM_取得オブジェクト情報ファイルをパースし、メモリ上に展開する
|
||||
logger.debug('D-PRE-05 CRM_取得オブジェクト情報ファイルをパースします')
|
||||
|
||||
json_parser = JsonParser(object_info_file_str)
|
||||
object_info_file_dict = json_parser.parse()
|
||||
|
||||
logger.debug('D-PRE-06 CRM_取得オブジェクト情報ファイルのパースに成功しました')
|
||||
|
||||
except Exception as e:
|
||||
raise InvalidConfigException(
|
||||
'E-PRE-02', PRE_JP_NAME, f'CRM_取得オブジェクト情報ファイルのパースに失敗しました エラー内容:[{e}]')
|
||||
|
||||
# ⑤ メモリ上のCRM_取得オブジェクト情報のキーobjectsの形式チェックを行う
|
||||
try:
|
||||
logger.debug('D-PRE-07 CRM_取得オブジェクト情報ファイルの形式チェックを開始します')
|
||||
|
||||
fetch_target_objects = FetchTargetObjects(object_info_file_dict)
|
||||
|
||||
logger.debug('D-PRE-08 CRM_取得オブジェクト情報ファイルの形式チェック 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise InvalidConfigException(
|
||||
'E-PRE-03', PRE_JP_NAME, f'CRM_取得オブジェクト情報ファイルの形式チェックに失敗しました ファイル名:[{OBJECT_INFO_FILENAME}] エラー内容:[{e}]')
|
||||
|
||||
# ⑥ 処理結果出力用のマップを初期化
|
||||
process_result = {}
|
||||
|
||||
# ⑦ データ取得準備処理の終了ログを出力する
|
||||
logger.info('I-PRE-09 データ取得準備処理を終了します')
|
||||
|
||||
# ⑧ 次の処理へ移行する
|
||||
return(fetch_target_objects, execute_datetime, process_result)
|
||||
0
ecs/crm-datafetch/src/salesforce/__init__.py
Normal file
0
ecs/crm-datafetch/src/salesforce/__init__.py
Normal file
28
ecs/crm-datafetch/src/salesforce/salesforce_api.py
Normal file
28
ecs/crm-datafetch/src/salesforce/salesforce_api.py
Normal file
@ -0,0 +1,28 @@
|
||||
from simple_salesforce import Salesforce
|
||||
from src.system_var.environments import (CRM_AUTH_DOMAIN, CRM_AUTH_TIMEOUT,
|
||||
CRM_FETCH_RECORD_TIMEOUT,
|
||||
CRM_GET_RECORD_COUNT_TIMEOUT,
|
||||
CRM_USER_NAME, CRM_USER_PASSWORD,
|
||||
CRM_USER_SECURITY_TOKEN)
|
||||
|
||||
|
||||
class SalesforceApiClient():
|
||||
def __init__(self) -> None:
|
||||
self.__sf = Salesforce(username=CRM_USER_NAME, password=CRM_USER_PASSWORD,
|
||||
security_token=CRM_USER_SECURITY_TOKEN,
|
||||
domain=CRM_AUTH_DOMAIN
|
||||
)
|
||||
|
||||
def query(self, soql, include_deleted=True, conn_timeout=100, read_timeout=300):
|
||||
return self.__sf.query(soql, include_deleted, timeout=(float(conn_timeout), float(read_timeout)))
|
||||
|
||||
def query_all(self, soql, include_deleted=True, conn_timeout=100, read_timeout=300):
|
||||
return self.__sf.query_all(soql, include_deleted, timeout=(float(conn_timeout), float(read_timeout)))
|
||||
|
||||
def fetch_sf_count(self, soql: str):
|
||||
count_res = self.query(soql, conn_timeout=CRM_AUTH_TIMEOUT, read_timeout=CRM_GET_RECORD_COUNT_TIMEOUT)
|
||||
return count_res.get('records')[0].get('expr0')
|
||||
|
||||
def fetch_sf_data(self, soql: str):
|
||||
data_res = self.query_all(soql, conn_timeout=CRM_AUTH_TIMEOUT, read_timeout=CRM_FETCH_RECORD_TIMEOUT)
|
||||
return data_res.get('records')
|
||||
34
ecs/crm-datafetch/src/salesforce/soql_builder.py
Normal file
34
ecs/crm-datafetch/src/salesforce/soql_builder.py
Normal file
@ -0,0 +1,34 @@
|
||||
from src.config.objects import TargetObject, LastFetchDatetime
|
||||
|
||||
|
||||
class SOQLBuilder:
|
||||
def __init__(self, target_object: TargetObject, last_fetch_datetime: LastFetchDatetime) -> None:
|
||||
self.__SELECT_SOQL = """SELECT {column_or_expression} FROM {object_name}
|
||||
WHERE {datetime_column} > {last_fetch_datetime_from}
|
||||
AND {datetime_column} <= {last_fetch_datetime_to}
|
||||
"""
|
||||
self.__target_object = target_object
|
||||
self.__last_fetch_datetime = last_fetch_datetime
|
||||
|
||||
def create_count_soql(self):
|
||||
count_soql = self.__SELECT_SOQL.format(
|
||||
column_or_expression='COUNT(Id)',
|
||||
object_name=self.__target_object.object_name,
|
||||
last_fetch_datetime_from=self.__last_fetch_datetime.last_fetch_datetime_from,
|
||||
last_fetch_datetime_to=self.__last_fetch_datetime.last_fetch_datetime_to,
|
||||
datetime_column=self.__target_object.datetime_column
|
||||
)
|
||||
|
||||
return count_soql
|
||||
|
||||
def create_fetch_soql(self):
|
||||
columns = ','.join(self.__target_object.columns)
|
||||
fetch_soql = self.__SELECT_SOQL.format(
|
||||
column_or_expression=columns,
|
||||
object_name=self.__target_object.object_name,
|
||||
last_fetch_datetime_from=self.__last_fetch_datetime.last_fetch_datetime_from,
|
||||
last_fetch_datetime_to=self.__last_fetch_datetime.last_fetch_datetime_to,
|
||||
datetime_column=self.__target_object.datetime_column
|
||||
)
|
||||
|
||||
return fetch_soql
|
||||
69
ecs/crm-datafetch/src/set_datetime_period_process.py
Normal file
69
ecs/crm-datafetch/src/set_datetime_period_process.py
Normal file
@ -0,0 +1,69 @@
|
||||
from src.aws.s3 import ConfigBucket
|
||||
from src.config.objects import LastFetchDatetime, TargetObject
|
||||
from src.error.exceptions import FileNotFoundException, InvalidConfigException
|
||||
from src.parser.json_parse import JsonParser
|
||||
from src.system_var.constants import DATE_JP_NAME
|
||||
from src.system_var.environments import (CRM_CONFIG_BUCKET,
|
||||
LAST_FETCH_DATE_FOLDER)
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def set_datetime_period_process(target_object: TargetObject, execute_datetime: ExecuteDateTime):
|
||||
"""データ取得期間設定処理
|
||||
|
||||
Args:
|
||||
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
|
||||
execute_datetime (ExecuteDateTime): 実行日次取得インスタンス
|
||||
|
||||
Raises:
|
||||
FileNotFoundException: S3上のファイルが存在しない場合
|
||||
InvalidConfigException: オブジェクト情報定義が不正だった場合
|
||||
|
||||
Returns:
|
||||
last_fetch_datetime: データ取得期間設定インスタンス
|
||||
"""
|
||||
|
||||
# ① データ取得期間設定処理の開始ログを出力する
|
||||
logger.info(
|
||||
f'I-DATE-01 [{target_object.object_name}] のデータ取得期間設定処理を開始します')
|
||||
|
||||
try:
|
||||
# ② S3 設定ファイル保管用バケットから、前回取得日時ファイルを取得する
|
||||
logger.info(
|
||||
f'I-DATE-02 前回取得日時ファイルの取得開始します ファイルパス:[s3://{CRM_CONFIG_BUCKET}/{LAST_FETCH_DATE_FOLDER}/{target_object.last_fetch_datetime_file_name}]')
|
||||
|
||||
s3_config_bucket = ConfigBucket()
|
||||
last_fetch_datetime_file_str = s3_config_bucket.get_last_fetch_datetime_file(
|
||||
target_object.last_fetch_datetime_file_name)
|
||||
|
||||
logger.info(f'I-DATE-03 前回取得日時ファイルの取得成功しました')
|
||||
|
||||
except Exception as e:
|
||||
raise FileNotFoundException(
|
||||
'E-DATE-01', DATE_JP_NAME, f'前回取得日時ファイルが存在しません ファイル名:[{target_object.last_fetch_datetime_file_name}] エラー内容:[{e}]')
|
||||
|
||||
try:
|
||||
# ③ 取得した前回取得日時ファイルの形式チェックを行う
|
||||
# ④ データの取得期間を設定する
|
||||
logger.debug(f'D-DATE-04 前回取得日時ファイルの形式チェックを開始します')
|
||||
|
||||
json_parser = JsonParser(last_fetch_datetime_file_str)
|
||||
last_fetch_datetime_file_dict = json_parser.parse()
|
||||
|
||||
last_fetch_datetime = LastFetchDatetime(last_fetch_datetime_file_dict, execute_datetime)
|
||||
|
||||
logger.debug(f'D-DATE-05 前回取得日時ファイルの形式チェック 正常終了')
|
||||
logger.info(
|
||||
f'I-DATE-06 取得範囲 From: [{last_fetch_datetime.last_fetch_datetime_from}] To: [{last_fetch_datetime.last_fetch_datetime_to}]')
|
||||
|
||||
except Exception as e:
|
||||
raise InvalidConfigException(
|
||||
'E-DATE-02', DATE_JP_NAME, f'前回取得日時ファイルの形式チェック処理が失敗しました エラー内容:[{e}]')
|
||||
|
||||
# ⑤ データ取得準備処理の終了ログを出力する
|
||||
logger.info(
|
||||
f'I-DATE-07 [{target_object.object_name}] のデータ取得期間設定処理を終了します')
|
||||
|
||||
# ⑥ 次の処理へ移行する
|
||||
return last_fetch_datetime
|
||||
96
ecs/crm-datafetch/src/system_var/constants.py
Normal file
96
ecs/crm-datafetch/src/system_var/constants.py
Normal file
@ -0,0 +1,96 @@
|
||||
# environments(task settings file)
|
||||
LOG_LEVEL = 'LOG_LEVEL' # ログ出力レベル。DEBUG, INFO, WARNING, ERRORの4つから指定する
|
||||
CRM_AUTH_TIMEOUT = 'CRM_AUTH_TIMEOUT' # CRMへの認証処理のタイムアウト秒数
|
||||
CRM_AUTH_MAX_RETRY_ATTEMPT = 'CRM_AUTH_MAX_RETRY_ATTEMPT' # CRMへの認証処理の最大リトライ試行回数
|
||||
CRM_AUTH_RETRY_INTERVAL = 'CRM_AUTH_RETRY_INTERVAL' # CRMへの認証処理のリトライ時の初回待ち秒数
|
||||
CRM_AUTH_RETRY_MIN_INTERVAL = 'CRM_AUTH_RETRY_MIN_INTERVAL' # CRMへの認証処理のリトライ時の最小待ち秒数
|
||||
CRM_AUTH_RETRY_MAX_INTERVAL = 'CRM_AUTH_RETRY_MAX_INTERVAL' # CRMへの認証処理のリトライ時の最大待ち秒数
|
||||
CRM_GET_RECORD_COUNT_TIMEOUT = 'CRM_GET_RECORD_COUNT_TIMEOUT' # CRMのレコード件数取得処理のタイムアウト秒数
|
||||
CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT = 'CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT' # CRMのレコード件数取得処理の最大リトライ試行回数
|
||||
CRM_GET_RECORD_COUNT_RETRY_INTERVAL = 'CRM_GET_RECORD_COUNT_RETRY_INTERVAL' # CRMのレコード件数取得処理のリトライ時の初回待ち秒数
|
||||
CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL = 'CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL' # CRMのレコード件数取得処理のリトライ時の最小待ち秒数
|
||||
CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL = 'CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL' # CRMのレコード件数取得処理のリトライ時の最大待ち秒数
|
||||
CRM_FETCH_RECORD_TIMEOUT = 'CRM_FETCH_RECORD_TIMEOUT' # CRMのレコード取得処理のタイムアウト秒数
|
||||
CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT = 'CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT' # CRMのレコード取得処理の最大リトライ試行回数
|
||||
CRM_FETCH_RECORD_RETRY_INTERVAL = 'CRM_FETCH_RECORD_RETRY_INTERVAL' # CRMのレコード取得処理のリトライ時の初回待ち秒数
|
||||
CRM_FETCH_RECORD_RETRY_MIN_INTERVAL = 'CRM_FETCH_RECORD_RETRY_MIN_INTERVAL' # CRMのレコード取得処理のリトライ時の最小待ち秒数
|
||||
CRM_FETCH_RECORD_RETRY_MAX_INTERVAL = 'CRM_FETCH_RECORD_RETRY_MAX_INTERVAL' # CRMのレコード取得処理のリトライ時の最大待ち秒数
|
||||
CONVERT_TZ = 'CONVERT_TZ' # CRMデータの日時を変換するときのタイムゾーン
|
||||
|
||||
# environments(ECS Task Environment)
|
||||
CRM_AUTH_DOMAIN = 'CRM_AUTH_DOMAIN' # CRMのAPI実行のための認証エンドポイントのドメイン
|
||||
CRM_USER_NAME = 'CRM_USER_NAME' # CRMのAPI実行用ユーザ名
|
||||
CRM_USER_PASSWORD = 'CRM_USER_PASSWORD' # CRMのAPI実行用ユーザパスワード
|
||||
CRM_USER_SECURITY_TOKEN = 'CRM_USER_SECURITY_TOKEN' # CRMのAPI実行用ユーザのセキュリティトークン
|
||||
CRM_CONFIG_BUCKET = 'CRM_CONFIG_BUCKET' # CRMデータ取得用の設定ファイルを格納するバケット名
|
||||
CRM_BACKUP_BUCKET = 'CRM_BACKUP_BUCKET' # CRMのバックアップデータを格納するバケット名
|
||||
IMPORT_DATA_BUCKET = 'IMPORT_DATA_BUCKET' # CRMの取込データを格納するバケット名
|
||||
OBJECT_INFO_FOLDER = 'OBJECT_INFO_FOLDER' # CRM取得対象オブジェクトの情報を格納するフォルダパス
|
||||
OBJECT_INFO_FILENAME = 'OBJECT_INFO_FILENAME' # CRM取得対象オブジェクトの情報のファイル名
|
||||
PROCESS_RESULT_FOLDER = 'PROCESS_RESULT_FOLDER' # CRMデータ取得結果を格納するフォルダパス
|
||||
PROCESS_RESULT_FILENAME = 'PROCESS_RESULT_FILENAME' # CRMデータ取得結果を格納するファイル名
|
||||
LAST_FETCH_DATE_FOLDER = 'LAST_FETCH_DATE_FOLDER' # CRMからの最終取得日時ファイルを格納するフォルダパス
|
||||
CRM_IMPORT_DATA_FOLDER = 'CRM_IMPORT_DATA_FOLDER' # CRMから取得し、取込用に変換したデータを格納するフォルダ
|
||||
LAST_FETCH_DATE_BACKUP_FOLDER = 'LAST_FETCH_DATE_BACKUP_FOLDER' # CRMからの最終取得日時ファイルのバックアップを格納するフォルダパス
|
||||
RESPONSE_JSON_BACKUP_FOLDER = 'RESPONSE_JSON_BACKUP_FOLDER' # CRMから取得した生データのバックアップを格納するフォルダパス
|
||||
CRM_IMPORT_DATA_BACKUP_FOLDER = 'CRM_IMPORT_DATA_BACKUP_FOLDER' # CRMから取得し、取込用に変換したデータのバックアップを格納するフォルダ
|
||||
|
||||
# 時刻フォーマット
|
||||
# .000ZはUTCを表す。ミリ秒までの考慮は不要なので固定で指定
|
||||
YYYYMMDDTHHMMSSTZ = '%Y-%m-%dT%H:%M:%S.000Z'
|
||||
CRM_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.000%z'
|
||||
YYYYMMDDHHMMSS = '%Y-%m-%d %H:%M:%S'
|
||||
MILLISEC_FORMAT = '000Z'
|
||||
|
||||
# aws
|
||||
AWS_RESOURCE_S3 = 's3'
|
||||
S3_RESPONSE_BODY = 'Body'
|
||||
S3_CHAR_CODE = 'utf-8'
|
||||
|
||||
# 正規表現チェック
|
||||
EXCLUDE_SYMBOL = ['#', '/']
|
||||
DATE_PATTERN_YYYYMMDDTHHMMSSTZ = r'[12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]\.000Z'
|
||||
DATE_PATTERN_YYYYMMDDHHMMSSFFF_UTC = r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.000\+0000'
|
||||
|
||||
# logger
|
||||
LOG_FORMAT = '[%(levelname)s]\t%(asctime)s\t%(message)s\n'
|
||||
LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
|
||||
LOG_LEVEL_INFO = 'INFO'
|
||||
|
||||
# 処理名
|
||||
PROCESS_JP_NAME = 'コントロール処理'
|
||||
PRE_JP_NAME = 'データ取得準備処理'
|
||||
CHK_JP_NAME = 'オブジェクト情報形式チェック処理'
|
||||
DATE_JP_NAME = 'データ取得期間設定処理'
|
||||
FETCH_JP_NAME = 'CRMデータ取得処理'
|
||||
RESBK_JP_NAME = 'CRM電文データバックアップ処理'
|
||||
CONV_JP_NAME = 'CSV変換処理'
|
||||
CSVBK_JP_NAME = 'CSVバックアップ処理'
|
||||
UPLD_JP_NAME = 'CSVアップロード処理'
|
||||
UPD_JP_NAME = '前回取得日時ファイル更新'
|
||||
END_JP_NAME = '取得処理実施結果アップロード処理'
|
||||
|
||||
# CSVチェック
|
||||
CSV_TRUE_VALUE = '1'
|
||||
CSV_FALSE_VALUE = '0'
|
||||
|
||||
# オブジェクト変数
|
||||
OBJECTS_KEY = 'objects'
|
||||
OBJECTS_TYPE = list
|
||||
OBJECT_NAME_KEY = 'object_name'
|
||||
OBJECT_NAME_TYPE = str
|
||||
COLUMNS_KEY = 'columns'
|
||||
COLUMNS_TYPE = list
|
||||
IS_SKIP_KEY = 'is_skip'
|
||||
IS_SKIP_TYPE = bool
|
||||
IS_UPDATE_LAST_FETCH_DATETIME_KEY = 'is_update_last_fetch_datetime'
|
||||
IS_UPDATE_LAST_FETCH_DATETIME_TYPE = bool
|
||||
LAST_FETCH_DATETIME_FILE_NAME_KEY = 'last_fetch_datetime_file_name'
|
||||
LAST_FETCH_DATETIME_FILE_NAME_TYPE = str
|
||||
UPLOAD_FILE_NAME_KEY = 'upload_file_name'
|
||||
UPLOAD_FILE_NAME_TYPE = str
|
||||
DATETIME_COLUMN_KEY = 'datetime_column'
|
||||
DATETIME_COLUMN_TYPE = str
|
||||
DATETIME_COLUMN_DEFAULT_VALUE = 'SystemModstamp'
|
||||
LAST_FETCH_DATETIME_TO_KEY = 'last_fetch_datetime_to'
|
||||
LAST_FETCH_DATETIME_FROM_KEY = 'last_fetch_datetime_from'
|
||||
73
ecs/crm-datafetch/src/system_var/environments.py
Normal file
73
ecs/crm-datafetch/src/system_var/environments.py
Normal file
@ -0,0 +1,73 @@
|
||||
import os
|
||||
|
||||
import src.system_var.constants as constants
|
||||
|
||||
# environments(task settings file)
|
||||
# ログ出力レベル。DEBUG, INFO, WARNING, ERRORの4つから指定する
|
||||
LOG_LEVEL = os.environ.get(constants.LOG_LEVEL, constants.LOG_LEVEL_INFO)
|
||||
# CRMへの認証処理のタイムアウト秒数
|
||||
CRM_AUTH_TIMEOUT = int(os.environ.get(constants.CRM_AUTH_TIMEOUT, 100))
|
||||
# CRMへの認証処理の最大リトライ試行回数
|
||||
CRM_AUTH_MAX_RETRY_ATTEMPT = int(os.environ.get(constants.CRM_AUTH_MAX_RETRY_ATTEMPT, 3))
|
||||
# CRMへの認証処理のリトライ時の初回待ち秒数
|
||||
CRM_AUTH_RETRY_INTERVAL = int(os.environ.get(constants.CRM_AUTH_RETRY_INTERVAL, 5))
|
||||
# CRMへの認証処理のリトライ時の最小待ち秒数
|
||||
CRM_AUTH_RETRY_MIN_INTERVAL = int(os.environ.get(constants.CRM_AUTH_RETRY_MIN_INTERVAL, 5))
|
||||
# CRMへの認証処理のリトライ時の最大待ち秒数
|
||||
CRM_AUTH_RETRY_MAX_INTERVAL = int(os.environ.get(constants.CRM_AUTH_RETRY_MAX_INTERVAL, 50))
|
||||
# CRMのレコード件数取得処理のタイムアウト秒数
|
||||
CRM_GET_RECORD_COUNT_TIMEOUT = int(os.environ.get(constants.CRM_GET_RECORD_COUNT_TIMEOUT, 300))
|
||||
# CRMのレコード件数取得処理の最大リトライ試行回数
|
||||
CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT = int(os.environ.get(constants.CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT, 3))
|
||||
# CRMのレコード件数取得処理のリトライ時の初回待ち秒数
|
||||
CRM_GET_RECORD_COUNT_RETRY_INTERVAL = int(os.environ.get(constants.CRM_GET_RECORD_COUNT_RETRY_INTERVAL, 5))
|
||||
# CRMのレコード件数取得処理のリトライ時の最小待ち秒数
|
||||
CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL = int(os.environ.get(constants.CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL, 5))
|
||||
# CRMのレコード件数取得処理のリトライ時の最大待ち秒数
|
||||
CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL = int(os.environ.get(constants.CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL, 50))
|
||||
# CRMのレコード取得処理のタイムアウト秒数
|
||||
CRM_FETCH_RECORD_TIMEOUT = int(os.environ.get(constants.CRM_FETCH_RECORD_TIMEOUT, 300))
|
||||
# CRMのレコード取得処理の最大リトライ試行回数
|
||||
CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT = int(os.environ.get(constants.CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT, 3))
|
||||
# CRMのレコード取得処理のリトライ時の初回待ち秒数
|
||||
CRM_FETCH_RECORD_RETRY_INTERVAL = int(os.environ.get(constants.CRM_FETCH_RECORD_RETRY_INTERVAL, 5))
|
||||
# CRMのレコード取得処理のリトライ時の最小待ち秒数
|
||||
CRM_FETCH_RECORD_RETRY_MIN_INTERVAL = int(os.environ.get(constants.CRM_FETCH_RECORD_RETRY_MIN_INTERVAL, 5))
|
||||
# CRMのレコード取得処理のリトライ時の最大待ち秒数
|
||||
CRM_FETCH_RECORD_RETRY_MAX_INTERVAL = int(os.environ.get(constants.CRM_FETCH_RECORD_RETRY_MAX_INTERVAL, 50))
|
||||
# CRMデータの日時を変換するときのタイムゾーン
|
||||
CONVERT_TZ = os.environ.get(constants.CONVERT_TZ, 'Asia/Tokyo')
|
||||
|
||||
# environments(ECS Task Environment)
|
||||
# CRMのAPI実行のための認証エンドポイントのドメイン
|
||||
CRM_AUTH_DOMAIN = os.environ[constants.CRM_AUTH_DOMAIN]
|
||||
# CRMのAPI実行用ユーザ名
|
||||
CRM_USER_NAME = os.environ[constants.CRM_USER_NAME]
|
||||
# CRMのAPI実行用ユーザパスワード
|
||||
CRM_USER_PASSWORD = os.environ[constants.CRM_USER_PASSWORD]
|
||||
# CRMのAPI実行用ユーザのセキュリティトークン
|
||||
CRM_USER_SECURITY_TOKEN = os.environ[constants.CRM_USER_SECURITY_TOKEN]
|
||||
# CRMデータ取得用の設定ファイルを格納するバケット名
|
||||
CRM_CONFIG_BUCKET = os.environ[constants.CRM_CONFIG_BUCKET]
|
||||
# CRMのバックアップデータを格納するバケット名
|
||||
CRM_BACKUP_BUCKET = os.environ[constants.CRM_BACKUP_BUCKET]
|
||||
# CRMの取込データを格納するバケット名
|
||||
IMPORT_DATA_BUCKET = os.environ[constants.IMPORT_DATA_BUCKET]
|
||||
# CRM取得対象オブジェクトの情報を格納するフォルダパス
|
||||
OBJECT_INFO_FOLDER = os.environ.get(constants.OBJECT_INFO_FOLDER, 'crm/object_info')
|
||||
# CRM取得対象オブジェクトの情報のファイル名
|
||||
OBJECT_INFO_FILENAME = os.environ[constants.OBJECT_INFO_FILENAME]
|
||||
# CRMデータ取得結果を格納するフォルダパス
|
||||
PROCESS_RESULT_FOLDER = os.environ.get(constants.PROCESS_RESULT_FOLDER, 'data_import')
|
||||
# CRMデータ取得結果を格納するファイル名
|
||||
PROCESS_RESULT_FILENAME = os.environ.get(constants.PROCESS_RESULT_FILENAME, 'process_result.json')
|
||||
# CRMからの最終取得日時ファイルを格納するフォルダパス
|
||||
LAST_FETCH_DATE_FOLDER = os.environ.get(constants.LAST_FETCH_DATE_FOLDER, 'crm/last_fetch_datetime')
|
||||
# CRMから取得し、取込用に変換したデータを格納するフォルダ
|
||||
CRM_IMPORT_DATA_FOLDER = os.environ.get(constants.CRM_IMPORT_DATA_FOLDER, 'crm/target')
|
||||
# CRMからの最終取得日時ファイルのバックアップを格納するフォルダパス
|
||||
LAST_FETCH_DATE_BACKUP_FOLDER = os.environ.get(constants.LAST_FETCH_DATE_BACKUP_FOLDER, 'last_fetch_datetime')
|
||||
# CRMから取得した生データのバックアップを格納するフォルダパス
|
||||
RESPONSE_JSON_BACKUP_FOLDER = os.environ.get(constants.RESPONSE_JSON_BACKUP_FOLDER, 'response_json')
|
||||
# CRMから取得し、取込用に変換したデータのバックアップを格納するフォルダ
|
||||
CRM_IMPORT_DATA_BACKUP_FOLDER = os.environ.get(constants.CRM_IMPORT_DATA_BACKUP_FOLDER, 'data_import')
|
||||
57
ecs/crm-datafetch/src/upload_last_fetch_datetime_process.py
Normal file
57
ecs/crm-datafetch/src/upload_last_fetch_datetime_process.py
Normal file
@ -0,0 +1,57 @@
|
||||
import json
|
||||
|
||||
from src.aws.s3 import ConfigBucket
|
||||
from src.config.objects import LastFetchDatetime, TargetObject
|
||||
from src.error.exceptions import FileUploadException
|
||||
from src.system_var.constants import UPD_JP_NAME
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def upload_last_fetch_datetime_process(target_object: TargetObject, last_fetch_datetime: LastFetchDatetime):
|
||||
"""前回取得日時ファイル更新
|
||||
|
||||
Args:
|
||||
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
|
||||
last_fetch_datetime (LastFetchDatetime): データ取得期間設定インスタンス
|
||||
|
||||
Raises:
|
||||
FileUploadException: S3のファイルアップロード失敗
|
||||
"""
|
||||
|
||||
# ① 前回取得日時ファイル更新処理の開始ログを出力する
|
||||
logger.info(
|
||||
f'I-UPD-01 [{target_object.object_name}] の前回取得日時ファイルの更新処理を開始します')
|
||||
|
||||
try:
|
||||
if target_object.is_update_last_fetch_datetime is False:
|
||||
# ② オブジェクト情報.is_update_last_fetch_datetimeがfalseの場合、以降の処理をスキップする
|
||||
logger.info(
|
||||
f'I-UPD-02 [{target_object.object_name}] の前回取得日時ファイルの更新処理をスキップします')
|
||||
return
|
||||
|
||||
# ③ 前回取得日時ファイル.last_fetch_datetime_fromに取得処理開始年月日時分秒を設定する
|
||||
# 前回取得日時ファイル.last_fetch_datetime_toに空文字を設定する
|
||||
last_fetch_datetime_dict = {
|
||||
'last_fetch_datetime_from': last_fetch_datetime.last_fetch_datetime_to,
|
||||
'last_fetch_datetime_to': ''
|
||||
}
|
||||
|
||||
config_bucket = ConfigBucket()
|
||||
config_bucket.put_last_fetch_datetime_file(
|
||||
target_object.last_fetch_datetime_file_name, json.dumps(last_fetch_datetime_dict))
|
||||
|
||||
logger.info(
|
||||
f'D-UPD-03 [{target_object.object_name}] の前回取得日時ファイル更新処理 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise FileUploadException(
|
||||
'E-UPD-01',
|
||||
UPD_JP_NAME,
|
||||
f'[{target_object.object_name}] 前回処理日時ファイルのアップロードに失敗しました ファイル名:[{target_object.last_fetch_datetime_file_name}] エラー内容:[{e}]')
|
||||
|
||||
# ④ 前回取得日時ファイル更新処理の終了ログを出力する
|
||||
logger.info(
|
||||
f'I-UPD-04 [{target_object.object_name}] の前回取得日時ファイルの更新処理を終了します')
|
||||
|
||||
# ⑤ 次の処理へ移行する
|
||||
return
|
||||
39
ecs/crm-datafetch/src/upload_result_data_process.py
Normal file
39
ecs/crm-datafetch/src/upload_result_data_process.py
Normal file
@ -0,0 +1,39 @@
|
||||
from src.aws.s3 import BackupBucket
|
||||
from src.error.exceptions import FileUploadException
|
||||
from src.system_var.constants import END_JP_NAME
|
||||
from src.system_var.environments import PROCESS_RESULT_FILENAME
|
||||
from src.util.execute_datetime import ExecuteDateTime
|
||||
from src.util.logger import logger_instance as logger
|
||||
|
||||
|
||||
def upload_result_data_process(process_result: dict, execute_datetime: ExecuteDateTime):
|
||||
"""取得処理実施結果アップロード処理
|
||||
|
||||
Args:
|
||||
process_result (dict): 取得処理実行結果辞書オブジェクト
|
||||
execute_datetime (ExecuteDateTime): データ取得期間設定インスタンス
|
||||
|
||||
Raises:
|
||||
FileUploadException: S3のファイルアップロード失敗
|
||||
"""
|
||||
|
||||
# ① 取得処理実施結果アップロード処理のログを出力する
|
||||
logger.info(
|
||||
f'I-END-01 取得処理実施結果アップロード処理を開始します')
|
||||
|
||||
try:
|
||||
# ② CRMバックアップ保管用バケットに、取得処理実施結果のJSONデータを保管する
|
||||
backup_bucket = BackupBucket()
|
||||
backup_bucket.put_result_json(
|
||||
f'{execute_datetime.to_path()}/{PROCESS_RESULT_FILENAME}', process_result)
|
||||
|
||||
logger.debug(f'D-END-02 取得処理実施結果アップロード 正常終了')
|
||||
|
||||
except Exception as e:
|
||||
raise FileUploadException(
|
||||
'E-END-01', END_JP_NAME, f'取得処理実施結果のアップロードに失敗しました ファイル名:[{PROCESS_RESULT_FILENAME}] エラー内容:[{e}]')
|
||||
|
||||
# ③ 取得処理実施結果アップロード処理の終了ログを出力する
|
||||
logger.info(f'I-END-03 取得処理実施結果アップロード処理を終了します')
|
||||
|
||||
return
|
||||
0
ecs/crm-datafetch/src/util/__init__.py
Normal file
0
ecs/crm-datafetch/src/util/__init__.py
Normal file
14
ecs/crm-datafetch/src/util/counter_object.py
Normal file
14
ecs/crm-datafetch/src/util/counter_object.py
Normal file
@ -0,0 +1,14 @@
|
||||
class CounterObject:
|
||||
def __init__(self, base_num=1) -> None:
|
||||
self.__counter = base_num
|
||||
|
||||
def describe(self) -> int:
|
||||
return self.__counter
|
||||
|
||||
def increment(self, num=1) -> int:
|
||||
self.__counter += num
|
||||
return self.__counter
|
||||
|
||||
def decrement(self, num=1) -> int:
|
||||
self.__counter -= num
|
||||
return self.__counter
|
||||
43
ecs/crm-datafetch/src/util/dict_checker.py
Normal file
43
ecs/crm-datafetch/src/util/dict_checker.py
Normal file
@ -0,0 +1,43 @@
|
||||
import re
|
||||
|
||||
|
||||
class DictChecker:
|
||||
def __init__(self, object_dict: dict) -> None:
|
||||
self.__object_dict = object_dict
|
||||
|
||||
def is_empty(self, check_key):
|
||||
"""辞書型バリュー空文字チェック"""
|
||||
return self.__object_dict[check_key] != '' and self.__object_dict[check_key] is not None
|
||||
|
||||
def check_key_exist(self, check_key: str) -> bool:
|
||||
"""辞書型キー存在チェック"""
|
||||
return check_key in self.__object_dict and self.is_empty(check_key)
|
||||
|
||||
def check_data_type(self, check_key: str, check_type: type) -> bool:
|
||||
"""辞書型バリュー型チェック"""
|
||||
return isinstance(self.__object_dict[check_key], check_type)
|
||||
|
||||
def check_match_pattern(self, regex_str: str, check_key: str) -> bool:
|
||||
"""辞書型バリュー正規表現チェック"""
|
||||
return True if re.fullmatch(regex_str, self.__object_dict[check_key]) else False
|
||||
|
||||
def assert_key_exist(self, check_key: str) -> None:
|
||||
"""辞書型キー存在検査"""
|
||||
if not self.check_key_exist(check_key):
|
||||
raise Exception(f'「{check_key}」キーは必須です')
|
||||
|
||||
return
|
||||
|
||||
def assert_data_type(self, check_key: str, check_type: type) -> None:
|
||||
"""バリュー型検査"""
|
||||
if not self.check_data_type(check_key, check_type):
|
||||
raise Exception(f'「{check_key}」キーの値は「{check_type}」でなければなりません')
|
||||
|
||||
return
|
||||
|
||||
def assert_match_pattern(self, check_key: str, regex_str: str):
|
||||
"""正規表現検査"""
|
||||
if not self.check_match_pattern(regex_str, check_key):
|
||||
raise Exception(f'「{check_key}」キーの値の正規表現「{regex_str}」チェックに失敗しました')
|
||||
|
||||
return
|
||||
17
ecs/crm-datafetch/src/util/execute_datetime.py
Normal file
17
ecs/crm-datafetch/src/util/execute_datetime.py
Normal file
@ -0,0 +1,17 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from src.system_var.constants import MILLISEC_FORMAT, YYYYMMDDTHHMMSSTZ
|
||||
|
||||
|
||||
class ExecuteDateTime:
|
||||
def __init__(self):
|
||||
self.__execute_datetime = datetime.now(timezone.utc).strftime(YYYYMMDDTHHMMSSTZ)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.__execute_datetime
|
||||
|
||||
def to_path(self) -> str:
|
||||
return self.__execute_datetime.rstrip(MILLISEC_FORMAT).translate(str.maketrans({'-': '/', 'T': '/', ':': None, '.': None}))
|
||||
|
||||
def format_date(self) -> str:
|
||||
return self.__execute_datetime.rstrip(MILLISEC_FORMAT).translate(str.maketrans({'-': None, 'T': None, ':': None, '.': None}))
|
||||
37
ecs/crm-datafetch/src/util/logger.py
Normal file
37
ecs/crm-datafetch/src/util/logger.py
Normal file
@ -0,0 +1,37 @@
|
||||
import logging
|
||||
|
||||
from src.system_var.environments import LOG_LEVEL
|
||||
|
||||
# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する
|
||||
for name in ["boto3", "botocore", "s3transfer", "urllib3"]:
|
||||
logging.getLogger(name).setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class Logger():
|
||||
__logger: logging.Logger
|
||||
|
||||
def __init__(self):
|
||||
self.__logger = logging.getLogger()
|
||||
|
||||
level = logging.getLevelName(LOG_LEVEL)
|
||||
if not isinstance(level, int):
|
||||
level = logging.INFO
|
||||
self.__logger.setLevel(level)
|
||||
|
||||
if not self.__logger.hasHandlers():
|
||||
handler = logging.StreamHandler()
|
||||
self.__logger.addHandler(handler)
|
||||
|
||||
formatter = logging.Formatter(
|
||||
'[%(levelname)s]\t%(asctime)s\t%(message)s\n',
|
||||
'%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
|
||||
for handler in self.__logger.handlers:
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
def get_logger(self) -> logging.Logger:
|
||||
return self.__logger
|
||||
|
||||
|
||||
logger_instance = Logger().get_logger()
|
||||
4
s3/config/crm/last_fetch_datetime/Account.json
Normal file
4
s3/config/crm/last_fetch_datetime/Account.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/AccountShare.json
Normal file
4
s3/config/crm/last_fetch_datetime/AccountShare.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Call2_vod__c.json
Normal file
4
s3/config/crm/last_fetch_datetime/Call2_vod__c.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Contact.json
Normal file
4
s3/config/crm/last_fetch_datetime/Contact.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Group.json
Normal file
4
s3/config/crm/last_fetch_datetime/Group.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/MSJ_Patient__c.json
Normal file
4
s3/config/crm/last_fetch_datetime/MSJ_Patient__c.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Product_vod__c.json
Normal file
4
s3/config/crm/last_fetch_datetime/Product_vod__c.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Profile.json
Normal file
4
s3/config/crm/last_fetch_datetime/Profile.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/RecordType.json
Normal file
4
s3/config/crm/last_fetch_datetime/RecordType.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Sent_Email_vod__c.json
Normal file
4
s3/config/crm/last_fetch_datetime/Sent_Email_vod__c.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Survey_vod__c.json
Normal file
4
s3/config/crm/last_fetch_datetime/Survey_vod__c.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Territory2.json
Normal file
4
s3/config/crm/last_fetch_datetime/Territory2.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/Territory2_ALL.json
Normal file
4
s3/config/crm/last_fetch_datetime/Territory2_ALL.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/User.json
Normal file
4
s3/config/crm/last_fetch_datetime/User.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
4
s3/config/crm/last_fetch_datetime/UserRole.json
Normal file
4
s3/config/crm/last_fetch_datetime/UserRole.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
{
|
||||
"last_fetch_datetime_from": "1900-01-01T00:00:00.000Z",
|
||||
"last_fetch_datetime_to": ""
|
||||
}
|
||||
47
s3/config/crm/object_info/crm_object_list_all.json
Normal file
47
s3/config/crm/object_info/crm_object_list_all.json
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"objects": [
|
||||
{
|
||||
"object_name": "Territory2",
|
||||
"columns": [
|
||||
"Id",
|
||||
"Name",
|
||||
"Territory2TypeId",
|
||||
"Territory2ModelId",
|
||||
"ParentTerritory2Id",
|
||||
"Description",
|
||||
"ForecastUserId",
|
||||
"AccountAccessLevel",
|
||||
"OpportunityAccessLevel",
|
||||
"CaseAccessLevel",
|
||||
"ContactAccessLevel",
|
||||
"LastModifiedDate",
|
||||
"LastModifiedById",
|
||||
"SystemModstamp",
|
||||
"DeveloperName",
|
||||
"MSJ_Territory_Type__c",
|
||||
"MSJ_Level__c"
|
||||
],
|
||||
"is_skip": false,
|
||||
"is_update_last_fetch_datetime": false,
|
||||
"last_fetch_datetime_file_name": "Territory2_ALL.json",
|
||||
"upload_file_name": "CRM_Territory2_ALL_{execute_datetime}"
|
||||
},
|
||||
{
|
||||
"object_name": "UserTerritory2Association",
|
||||
"columns": [
|
||||
"Id",
|
||||
"UserId",
|
||||
"Territory2Id",
|
||||
"IsActive",
|
||||
"RoleInTerritory2",
|
||||
"LastModifiedDate",
|
||||
"LastModifiedById",
|
||||
"SystemModstamp"
|
||||
],
|
||||
"is_skip": false,
|
||||
"is_update_last_fetch_datetime": false,
|
||||
"last_fetch_datetime_file_name": "UserTerritory2Association_ALL.json",
|
||||
"upload_file_name": "CRM_UserTerritory2Association_ALL_{execute_datetime}"
|
||||
}
|
||||
]
|
||||
}
|
||||
3009
s3/config/crm/object_info/crm_object_list_diff.json
Normal file
3009
s3/config/crm/object_info/crm_object_list_diff.json
Normal file
File diff suppressed because it is too large
Load Diff
17
s3/config/crm/task_settings/task_settings.env
Normal file
17
s3/config/crm/task_settings/task_settings.env
Normal file
@ -0,0 +1,17 @@
|
||||
LOG_LEVEL=INFO
|
||||
CRM_AUTH_TIMEOUT=100
|
||||
CRM_AUTH_MAX_RETRY_ATTEMPT=3
|
||||
CRM_AUTH_RETRY_INTERVAL=5
|
||||
CRM_AUTH_RETRY_MIN_INTERVAL=5
|
||||
CRM_AUTH_RETRY_MAX_INTERVAL=50
|
||||
CRM_GET_RECORD_COUNT_TIMEOUT=300
|
||||
CRM_GET_RECORD_COUNT_MAX_RETRY_ATTEMPT=3
|
||||
CRM_GET_RECORD_COUNT_RETRY_INTERVAL=5
|
||||
CRM_GET_RECORD_COUNT_RETRY_MIN_INTERVAL=5
|
||||
CRM_GET_RECORD_COUNT_RETRY_MAX_INTERVAL=50
|
||||
CRM_FETCH_RECORD_TIMEOUT=300
|
||||
CRM_FETCH_RECORD_MAX_RETRY_ATTEMPT=3
|
||||
CRM_FETCH_RECORD_RETRY_INTERVAL=5
|
||||
CRM_FETCH_RECORD_RETRY_MIN_INTERVAL=5
|
||||
CRM_FETCH_RECORD_RETRY_MAX_INTERVAL=50
|
||||
CONVERT_TZ='Asia/Tokyo'
|
||||
Loading…
x
Reference in New Issue
Block a user