diff --git a/ecs/jskult-batch-daily/.env.example b/ecs/jskult-batch-daily/.env.example index 95aef7fe..1e15cdb0 100644 --- a/ecs/jskult-batch-daily/.env.example +++ b/ecs/jskult-batch-daily/.env.example @@ -8,6 +8,17 @@ ULTMARC_DATA_BUCKET=**************** ULTMARC_DATA_FOLDER=recv JSKULT_BACKUP_BUCKET=**************** ULTMARC_BACKUP_FOLDER=ultmarc +VJSK_BACKUP_FOLDER=vjsk JSKULT_CONFIG_BUCKET=********************** JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME=jskult_holiday_list.txt +VJSK_DATA_SEND_FOLDER=send +VJSK_DATA_BUCKET=************* +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME=jskult_wholesaler_stock_input_day_list.txt +VJSK_DATA_RECEIVE_FOLDER=********************** +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD=0 +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME=src05.sales_lau +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET=5 diff --git a/ecs/jskult-batch-daily/Pipfile b/ecs/jskult-batch-daily/Pipfile index a5d5dddd..1e6adf91 100644 --- a/ecs/jskult-batch-daily/Pipfile +++ b/ecs/jskult-batch-daily/Pipfile @@ -18,6 +18,7 @@ autopep8 = "*" flake8 = "*" pytest = "*" pytest-cov = "*" +boto3 = "*" [requires] python_version = "3.9" diff --git a/ecs/jskult-batch-daily/Pipfile.lock b/ecs/jskult-batch-daily/Pipfile.lock index 519c60a0..10b5f555 100644 --- a/ecs/jskult-batch-daily/Pipfile.lock +++ b/ecs/jskult-batch-daily/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9bce8f43bcad5d6ae8e5a558b8ade00a83f6e1671993e91b0a883fffa6b95df9" + "sha256": "df8b09869c6ad0daff24cf808bac56f528d8ae5835fe70a50d58c2bed724e717" }, "pipfile-spec": 6, "requires": { @@ -18,19 +18,19 @@ "default": { "boto3": { "hashes": [ - "sha256:816a198a6cc4f283af6b21439d85be6dbe4b73c2232dd906c6bafb4fece28d19", - "sha256:9de90a2c0b853f84436b032b28947fc8a765dc462573a8d543b13f16c6579b40" + "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6", + "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40" ], "index": "pypi", - "version": "==1.26.107" + "version": "==1.26.134" }, "botocore": { "hashes": [ - "sha256:ee1e43e6cd0864cc6811ba3f05123647612ee3f07a286a4c94f5885aa86d6922", - "sha256:f63942b4b7248c0b3d6ecbc2852cf0787c23ace2a91a012f7ee0b3ae3eb08f4f" + "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba", + "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f" ], "markers": "python_version >= '3.7'", - "version": "==1.29.107" + "version": "==1.29.134" }, "greenlet": { "hashes": [ @@ -124,11 +124,11 @@ }, "s3transfer": { "hashes": [ - "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd", - "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947" + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" ], "markers": "python_version >= '3.7'", - "version": "==0.6.0" + "version": "==0.6.1" }, "six": { "hashes": [ @@ -140,50 +140,50 @@ }, "sqlalchemy": { "hashes": [ - "sha256:07950fc82f844a2de67ddb4e535f29b65652b4d95e8b847823ce66a6d540a41d", - "sha256:0a865b5ec4ba24f57c33b633b728e43fde77b968911a6046443f581b25d29dd9", - "sha256:0b49f1f71d7a44329a43d3edd38cc5ee4c058dfef4487498393d16172007954b", - "sha256:13f984a190d249769a050634b248aef8991acc035e849d02b634ea006c028fa8", - "sha256:1b69666e25cc03c602d9d3d460e1281810109e6546739187044fc256c67941ef", - "sha256:1d06e119cf79a3d80ab069f064a07152eb9ba541d084bdaee728d8a6f03fd03d", - "sha256:246712af9fc761d6c13f4f065470982e175d902e77aa4218c9cb9fc9ff565a0c", - "sha256:34eb96c1de91d8f31e988302243357bef3f7785e1b728c7d4b98bd0c117dafeb", - "sha256:4c3020afb144572c7bfcba9d7cce57ad42bff6e6115dffcfe2d4ae6d444a214f", - "sha256:4f759eccb66e6d495fb622eb7f4ac146ae674d829942ec18b7f5a35ddf029597", - "sha256:68ed381bc340b4a3d373dbfec1a8b971f6350139590c4ca3cb722fdb50035777", - "sha256:6b72dccc5864ea95c93e0a9c4e397708917fb450f96737b4a8395d009f90b868", - "sha256:6e84ab63d25d8564d7a8c05dc080659931a459ee27f6ed1cf4c91f292d184038", - "sha256:734805708632e3965c2c40081f9a59263c29ffa27cba9b02d4d92dfd57ba869f", - "sha256:78612edf4ba50d407d0eb3a64e9ec76e6efc2b5d9a5c63415d53e540266a230a", - "sha256:7e472e9627882f2d75b87ff91c5a2bc45b31a226efc7cc0a054a94fffef85862", - "sha256:865392a50a721445156809c1a6d6ab6437be70c1c2599f591a8849ed95d3c693", - "sha256:8d118e233f416d713aac715e2c1101e17f91e696ff315fc9efbc75b70d11e740", - "sha256:8d3ece5960b3e821e43a4927cc851b6e84a431976d3ffe02aadb96519044807e", - "sha256:93c78d42c14aa9a9e0866eacd5b48df40a50d0e2790ee377af7910d224afddcf", - "sha256:95719215e3ec7337b9f57c3c2eda0e6a7619be194a5166c07c1e599f6afc20fa", - "sha256:9838bd247ee42eb74193d865e48dd62eb50e45e3fdceb0fdef3351133ee53dcf", - "sha256:aa5c270ece17c0c0e0a38f2530c16b20ea05d8b794e46c79171a86b93b758891", - "sha256:ac6a0311fb21a99855953f84c43fcff4bdca27a2ffcc4f4d806b26b54b5cddc9", - "sha256:ad5363a1c65fde7b7466769d4261126d07d872fc2e816487ae6cec93da604b6b", - "sha256:b3e5864eba71a3718236a120547e52c8da2ccb57cc96cecd0480106a0c799c92", - "sha256:bbda1da8d541904ba262825a833c9f619e93cb3fd1156be0a5e43cd54d588dcd", - "sha256:c6e27189ff9aebfb2c02fd252c629ea58657e7a5ff1a321b7fc9c2bf6dc0b5f3", - "sha256:c8239ce63a90007bce479adf5460d48c1adae4b933d8e39a4eafecfc084e503c", - "sha256:d209594e68bec103ad5243ecac1b40bf5770c9ebf482df7abf175748a34f4853", - "sha256:d5327f54a9c39e7871fc532639616f3777304364a0bb9b89d6033ad34ef6c5f8", - "sha256:db4bd1c4792da753f914ff0b688086b9a8fd78bb9bc5ae8b6d2e65f176b81eb9", - "sha256:e4780be0f19e5894c17f75fc8de2fe1ae233ab37827125239ceb593c6f6bd1e2", - "sha256:e4a019f723b6c1e6b3781be00fb9e0844bc6156f9951c836ff60787cc3938d76", - "sha256:e62c4e762d6fd2901692a093f208a6a6575b930e9458ad58c2a7f080dd6132da", - "sha256:e730603cae5747bc6d6dece98b45a57d647ed553c8d5ecef602697b1c1501cf2", - "sha256:ebc4eeb1737a5a9bdb0c24f4c982319fa6edd23cdee27180978c29cbb026f2bd", - "sha256:ee2946042cc7851842d7a086a92b9b7b494cbe8c3e7e4627e27bc912d3a7655e", - "sha256:f005245e1cb9b8ca53df73ee85e029ac43155e062405015e49ec6187a2e3fb44", - "sha256:f49c5d3c070a72ecb96df703966c9678dda0d4cb2e2736f88d15f5e1203b4159", - "sha256:f61ab84956dc628c8dfe9d105b6aec38afb96adae3e5e7da6085b583ff6ea789" + "sha256:0aa2cbde85a6eab9263ab480f19e8882d022d30ebcdc14d69e6a8d7c07b0a871", + "sha256:0d6979c9707f8b82366ba34b38b5a6fe32f75766b2e901f9820e271e95384070", + "sha256:0eb14a386a5b610305bec6639b35540b47f408b0a59f75999199aed5b3d40079", + "sha256:2424a84f131901fbb20a99844d47b38b517174c6e964c8efb15ea6bb9ced8c2b", + "sha256:2ad9688debf1f0ae9c6e0706a4e2d33b1a01281317cee9bd1d7eef8020c5baac", + "sha256:2f0a355264af0952570f18457102984e1f79510f856e5e0ae652e63316d1ca23", + "sha256:31f72bb300eed7bfdb373c7c046121d84fa0ae6f383089db9505ff553ac27cef", + "sha256:375b7ba88f261dbd79d044f20cbcd919d88befb63f26af9d084614f10cdf97a6", + "sha256:37de4010f53f452e94e5ed6684480432cfe6a7a8914307ef819cd028b05b98d5", + "sha256:49c138856035cb97f0053e5e57ba90ec936b28a0b8b0020d44965c7b0c0bf03a", + "sha256:4f9832815257969b3ca9bf0501351e4c02c8d60cbd3ec9f9070d5b0f8852900e", + "sha256:566a0ac347cf4632f551e7b28bbd0d215af82e6ffaa2556f565a3b6b51dc3f81", + "sha256:6777673d346071451bf7cccf8d0499024f1bd6a835fc90b4fe7af50373d92ce6", + "sha256:72746ec17a7d9c5acf2c57a6e6190ceba3dad7127cd85bb17f24e90acc0e8e3f", + "sha256:755f653d693f9b8f4286d987aec0d4279821bf8d179a9de8e8a5c685e77e57d6", + "sha256:7612a7366a0855a04430363fb4ab392dc6818aaece0b2e325ff30ee77af9b21f", + "sha256:7ad24c85f2a1caf0cd1ae8c2fdb668777a51a02246d9039420f94bd7dbfd37ed", + "sha256:881cc388dded44ae6e17a1666364b98bd76bcdc71b869014ae725f06ba298e0e", + "sha256:8d97b37b4e60073c38bcf94e289e3be09ef9be870de88d163f16e08f2b9ded1a", + "sha256:9119795d2405eb23bf7e6707e228fe38124df029494c1b3576459aa3202ea432", + "sha256:9136d596111c742d061c0f99bab95c5370016c4101a32e72c2b634ad5e0757e6", + "sha256:9ad883ac4f5225999747f0849643c4d0ec809d9ffe0ddc81a81dd3e68d0af463", + "sha256:a25b4c4fdd633501233924f873e6f6cd8970732859ecfe4ecfb60635881f70be", + "sha256:a30e4db983faa5145e00ef6eaf894a2d503b3221dbf40a595f3011930d3d0bac", + "sha256:a5e9e78332a5d841422b88b8c490dfd7f761e64b3430249b66c05d02f72ceab0", + "sha256:b4e08e3831671008888bad5d160d757ef35ce34dbb73b78c3998d16aa1334c97", + "sha256:bf1aae95e80acea02a0a622e1c12d3fefc52ffd0fe7bda70a30d070373fbb6c3", + "sha256:c61b89803a87a3b2a394089a7dadb79a6c64c89f2e8930cc187fec43b319f8d2", + "sha256:cdf80359b641185ae7e580afb9f88cf560298f309a38182972091165bfe1225d", + "sha256:d93ebbff3dcf05274843ad8cf650b48ee634626e752c5d73614e5ec9df45f0ce", + "sha256:db24d2738add6db19d66ca820479d2f8f96d3f5a13c223f27fa28dd2f268a4bd", + "sha256:e0d20f27edfd6f35b388da2bdcd7769e4ffa374fef8994980ced26eb287e033a", + "sha256:e2f3b5236079bc3e318a92bab2cc3f669cc32127075ab03ff61cacbae1c392b8", + "sha256:e481e54db8cec1457ee7c05f6d2329e3298a304a70d3b5e2e82e77170850b385", + "sha256:e5e5dc300a0ca8755ada1569f5caccfcdca28607dfb98b86a54996b288a8ebd3", + "sha256:ec2f525273528425ed2f51861b7b88955160cb95dddb17af0914077040aff4a5", + "sha256:f234ba3bb339ad17803009c8251f5ee65dcf283a380817fe486823b08b26383d", + "sha256:f463598f9e51ccc04f0fe08500f9a0c3251a7086765350be418598b753b5561d", + "sha256:f717944aee40e9f48776cf85b523bb376aa2d9255a268d6d643c57ab387e7264", + "sha256:fd0febae872a4042da44e972c070f0fd49a85a0a7727ab6b85425f74348be14e", + "sha256:fec56c7d1b6a22c8f01557de3975d962ee40270b81b60d1cfdadf2a105d10e84" ], "index": "pypi", - "version": "==2.0.9" + "version": "==2.0.13" }, "tenacity": { "hashes": [ @@ -211,14 +211,6 @@ } }, "develop": { - "attrs": { - "hashes": [ - "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", - "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" - ], - "markers": "python_version >= '3.6'", - "version": "==22.2.0" - }, "autopep8": { "hashes": [ "sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1", @@ -227,65 +219,89 @@ "index": "pypi", "version": "==2.0.2" }, + "boto3": { + "hashes": [ + "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6", + "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40" + ], + "index": "pypi", + "version": "==1.26.134" + }, + "botocore": { + "hashes": [ + "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba", + "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f" + ], + "markers": "python_version >= '3.7'", + "version": "==1.29.134" + }, + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.6" + }, "coverage": { "extras": [ "toml" ], "hashes": [ - "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d", - "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4", - "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e", - "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab", - "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90", - "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6", - "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731", - "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540", - "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2", - "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292", - "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5", - "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b", - "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2", - "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0", - "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57", - "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3", - "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140", - "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84", - "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988", - "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67", - "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d", - "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2", - "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5", - "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9", - "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8", - "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd", - "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6", - "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be", - "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88", - "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25", - "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137", - "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968", - "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9", - "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef", - "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54", - "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512", - "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005", - "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f", - "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149", - "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d", - "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8", - "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7", - "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5", - "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016", - "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69", - "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212", - "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc", - "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8", - "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d", - "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd", - "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169" + "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3", + "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a", + "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813", + "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0", + "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a", + "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd", + "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139", + "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b", + "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252", + "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790", + "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045", + "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce", + "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200", + "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718", + "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b", + "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f", + "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5", + "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade", + "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5", + "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a", + "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8", + "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33", + "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e", + "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c", + "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3", + "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969", + "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068", + "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2", + "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771", + "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed", + "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212", + "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614", + "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88", + "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3", + "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c", + "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84", + "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11", + "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1", + "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1", + "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e", + "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1", + "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd", + "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47", + "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a", + "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c", + "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31", + "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5", + "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6", + "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303", + "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5", + "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47" ], "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.2.5" }, "exceptiongroup": { "hashes": [ @@ -311,6 +327,14 @@ "markers": "python_version >= '3.7'", "version": "==2.0.0" }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, "mccabe": { "hashes": [ "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", @@ -321,11 +345,11 @@ }, "packaging": { "hashes": [ - "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", - "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" ], "markers": "python_version >= '3.7'", - "version": "==23.0" + "version": "==23.1" }, "pluggy": { "hashes": [ @@ -353,11 +377,11 @@ }, "pytest": { "hashes": [ - "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e", - "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4" + "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", + "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" ], "index": "pypi", - "version": "==7.2.2" + "version": "==7.3.1" }, "pytest-cov": { "hashes": [ @@ -367,6 +391,30 @@ "index": "pypi", "version": "==4.0.0" }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.2" + }, + "s3transfer": { + "hashes": [ + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" + ], + "markers": "python_version >= '3.7'", + "version": "==0.6.1" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, "tomli": { "hashes": [ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", @@ -374,6 +422,14 @@ ], "markers": "python_version < '3.11'", "version": "==2.0.1" + }, + "urllib3": { + "hashes": [ + "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", + "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.15" } } } diff --git a/ecs/jskult-batch-daily/src/aws/s3.py b/ecs/jskult-batch-daily/src/aws/s3.py index 68ed0a7c..a4ebc30a 100644 --- a/ecs/jskult-batch-daily/src/aws/s3.py +++ b/ecs/jskult-batch-daily/src/aws/s3.py @@ -1,4 +1,6 @@ +import os import os.path as path +import tarfile import tempfile import boto3 @@ -16,7 +18,8 @@ class S3Client: return [] contents = response['Contents'] # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする - objects = [{'filename': content['Key'], 'size': content['Size']} for content in contents if not content['Key'].endswith('/')] + objects = [{'filename': content['Key'], 'size': content['Size']} + for content in contents if not content['Key'].endswith('/')] return objects def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: @@ -89,6 +92,16 @@ class ConfigBucket(S3Bucket): f.seek(0) return temporary_file_path + def download_wholesaler_stock_input_day_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME) + wholesaler_stock_input_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, wholesaler_stock_input_day_list_key, f) + f.seek(0) + return temporary_file_path + class JskUltBackupBucket(S3Bucket): _bucket_name = environment.JSKULT_BACKUP_BUCKET @@ -96,3 +109,66 @@ class JskUltBackupBucket(S3Bucket): class UltmarcBackupBucket(JskUltBackupBucket): _folder = environment.ULTMARC_BACKUP_FOLDER + + +class VjskBackupBucket(JskUltBackupBucket): + _folder = environment.VJSK_BACKUP_FOLDER + + +class VjskReceiveBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _recv_folder = environment.VJSK_DATA_RECEIVE_FOLDER + + _s3_file_list = None + + def get_s3_file_list(self): + self._s3_file_list = self._s3_client.list_objects(self._bucket_name, self._recv_folder) + return self._s3_file_list + + def download_data_file(self, data_filename: str): + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, f'{data_filename.replace(f"{self._recv_folder}/", "")}') + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, data_filename, f) + f.seek(0) + return temporary_file_path + + def unzip_data_file(self, filename: str): + ret = [] + with tarfile.open(filename) as tar: + temp_dir = os.path.dirname(filename) + tar.extractall(path=temp_dir) + extracted_files = tar.getnames() + for extracted_file in extracted_files: + file = os.path.join(temp_dir, extracted_file) + ret.append(file) + return ret + + def backup_dat_file(self, target_files: list, datetime_key: str): + jskult_backup_bucket = VjskBackupBucket() + for target_file in target_files: + backup_from_file_path = target_file.get("filename") + backup_to_filename = backup_from_file_path.replace(f"{self._recv_folder}/", "") + backup_key = f'{jskult_backup_bucket._folder}/{datetime_key}/{backup_to_filename}' + self._s3_client.copy(self._bucket_name, backup_from_file_path, + jskult_backup_bucket._bucket_name, backup_key) + self._s3_client.delete_file(self._bucket_name, backup_from_file_path) + + +class VjskSendBucket(S3Bucket): + _bucket_name = environment.VJSK_DATA_BUCKET + _send_folder = environment.VJSK_DATA_SEND_FOLDER + + def upload_inst_pharm_csv_file(self, vjsk_create_csv: str, csv_file_path: str): + # S3バケットにファイルを移動 + csv_file_name = f'{self._send_folder}/{vjsk_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, self._bucket_name, csv_file_name) + return + + def backup_inst_pharm_csv_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + vjsk_backup_bucket = VjskBackupBucket() + dat_key = f'{self._send_folder}/{dat_file_key}' + backup_key = f'{vjsk_backup_bucket._folder}/{self._send_folder}/{datetime_key}/{dat_file_key.replace(f"{self._send_folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_key, vjsk_backup_bucket._bucket_name, backup_key) diff --git a/ecs/jskult-batch-daily/src/batch/common/batch_context.py b/ecs/jskult-batch-daily/src/batch/common/batch_context.py index 3b3ac157..b3fc4967 100644 --- a/ecs/jskult-batch-daily/src/batch/common/batch_context.py +++ b/ecs/jskult-batch-daily/src/batch/common/batch_context.py @@ -3,6 +3,7 @@ class BatchContext: __syor_date: str # 処理日(yyyy/mm/dd形式) __is_not_business_day: bool # 日次バッチ起動日フラグ __is_ultmarc_imported: bool # アルトマーク取込実施済フラグ + __is_vjsk_stock_import_day: bool # 卸在庫データ取込対象フラグ def __init__(self) -> None: self.__is_not_business_day = False @@ -37,3 +38,11 @@ class BatchContext: @is_ultmarc_imported.setter def is_ultmarc_imported(self, flag: bool): self.__is_ultmarc_imported = flag + + @property + def is_vjsk_stock_import_day(self): + return self.__is_vjsk_stock_import_day + + @is_vjsk_stock_import_day.setter + def is_vjsk_stock_import_day(self, flag: bool): + self.__is_vjsk_stock_import_day = flag diff --git a/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py new file mode 100644 index 00000000..816a0545 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/dcf_inst_merge/integrate_dcf_inst_merge.py @@ -0,0 +1,647 @@ +from datetime import datetime, timedelta +from src.batch.batch_functions import logging_sql +from src.batch.common.batch_context import BatchContext +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.time.elapsed_time import ElapsedTime + +batch_context = BatchContext.get_instance() +logger = get_logger('DCF施設統合マスタ日次更新') + + +def exec(): + db = Database.get_instance() + try: + db.connect() + db.begin() + logger.debug('DCF施設統合マスタ日次更新処理開始') + # DCF施設統合マスタ移行先コードのセット(無効フラグが『0(有効)』) + enabled_dst_inst_merge_records = _set_enabled_dct_inst_merge(db) + # DCF施設統合マスタ移行先コードのセット(無効フラグが『1(無効)』) + _set_disabled_dct_inst_merge(db) + # DCF施設統合マスタに無効フラグが『0(有効)』データが存在する場合 + if len(enabled_dst_inst_merge_records) > 0: + _add_emp_chg_inst(db, enabled_dst_inst_merge_records) + _add_ult_ident_presc(db, enabled_dst_inst_merge_records) + db.commit() + logger.debug('DCF施設統合マスタ日次更新処理終了') + except Exception as e: + db.rollback() + raise BatchOperationException(e) + finally: + db.disconnect() + + +def _set_enabled_dct_inst_merge(db: Database) -> list[dict]: + # データ取得(無効フラグが『0(有効)』) + enabled_dst_inst_merge_records = _select_dct_inst_merge(db, 0) + # 移行先DCF施設コードの更新(無効フラグが『0(有効)』) + if _update_dcf_inst_merge(db, 0) > 0: + # DCF施設統合マスタの過去分の洗い替え + for row in enabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dup_opp_cd'], row['dcf_inst_cd'], '') + + return enabled_dst_inst_merge_records + + +def _set_disabled_dct_inst_merge(db: Database): + # データ取得(無効フラグが『1(無効)』) + disabled_dst_inst_merge_records = _select_dct_inst_merge(db, 1) + # 移行先DCF施設コードの更新(無効フラグが『1(無効)』) + if _update_dcf_inst_merge(db, 1) > 0: + # DCF施設統合マスタの過去分の洗い替え + for row in disabled_dst_inst_merge_records: + _update_dcf_inst_cd_new(db, row['dcf_inst_cd'], row['dup_opp_cd'], '戻し') + + +def _select_ult_ident_presc_dcf_inst_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 納入先処方元マスタから、DCF施設コードに対応したレコードの取得 + try: + sql = """ + SELECT + ta_cd, + ult_ident_cd, + ratio + FROM + src05.ult_ident_presc + WHERE + presc_cd = :dcf_inst_cd + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd} + ult_ident_presc_ta_cd_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('納入先処方元マスタからDCF施設コードに対応したレコードの取得に成功') + except Exception as e: + logger.debug('納入先処方元マスタからDCF施設コードに対応したレコードの取得に失敗') + raise e + + return ult_ident_presc_ta_cd_records + + +def _add_ult_ident_presc(db: Database, enabled_dst_inst_merge_records: list[dict]): + # 納入先処方元マスタの追加 + logger.info('納入先処方元マスタの登録 開始') + for data_inst_cnt, enabled_merge_record in enumerate(enabled_dst_inst_merge_records, start=1): + tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) + ult_ident_presc_source_records = _select_ult_ident_presc_dcf_inst_cd(db, enabled_merge_record['dcf_inst_cd']) + for ult_ident_presc_source_record in ult_ident_presc_source_records: + ult_ident_presc_records = _select_ult_ident_presc(db, + enabled_merge_record['dcf_inst_cd'], + enabled_merge_record['dup_opp_cd'], + ult_ident_presc_source_record) + for data_cnt, ult_ident_presc_row in enumerate(ult_ident_presc_records, start=1): + logger.info(f'{data_inst_cnt}件目の移行施設の{data_cnt}レコード目処理 開始') + # 処方元コード=重複時相手先コードが発生した場合 + if ult_ident_presc_row['opp_count'] > 0: + continue + + start_date = _str_to_date_time(ult_ident_presc_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + set_start_date = _date_time_to_str(set_start_date) + is_exists_duplicate_key = False + if _count_duplicate_ult_ident_presc(db, set_start_date, ult_ident_presc_row) > 0: + _delete_ult_ident_presc(db, set_start_date, ult_ident_presc_row, + '納入先処方元マスタの重複予定データの削除') + is_exists_duplicate_key = True + else: + logger.info('納入先処方元マスタの重複予定データなし') + _insert_ult_ident_presc(db, set_start_date, enabled_merge_record['dup_opp_cd'], ult_ident_presc_row) + + # 重複予定データが存在しない、且つ、適用終了日 ≧ 適用開始日の場合 + if not is_exists_duplicate_key and _str_to_date_time(ult_ident_presc_row['end_date']) >= start_date: + last_end_date = tekiyo_month_first_day - timedelta(days=1) + # 適用終了日を、DCF施設統合マスタの適用月度の前月末日で更新 + _update_ult_ident_presc_end_date(db, _date_time_to_str(last_end_date), ult_ident_presc_row) + + logger.info('納入先処方元マスタの登録 終了') + + +def _select_emp_chg_inst_ta_cd(db: Database, dcf_inst_cd: str) -> list[dict]: + # 従業員担当施設マスタから、DCF施設コードに対応した領域コードの取得 + try: + sql = """ + SELECT + ta_cd + FROM + src05.emp_chg_inst + WHERE + inst_cd = :dcf_inst_cd + AND enabled_flg = 'Y' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd} + emp_chg_inst_ta_cd_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('従業員担当施設マスタから領域コードの取得に成功') + except Exception as e: + logger.debug('従業員担当施設マスタから領域コードの取得に失敗') + raise e + + return emp_chg_inst_ta_cd_records + + +def _add_emp_chg_inst(db: Database, enabled_dst_inst_merge_records: list[dict]): + # 従業員担当施設マスタの登録 + logger.info('従業員担当施設マスタの登録 開始') + for enabled_merge_record in enabled_dst_inst_merge_records: + tekiyo_month_first_day = _get_first_day_of_month(enabled_merge_record['tekiyo_month']) + emp_chg_inst_ta_cd_records = _select_emp_chg_inst_ta_cd(db, enabled_merge_record['dcf_inst_cd']) + for emp_chg_inst_ta_cd_record in emp_chg_inst_ta_cd_records: + emp_chg_inst_records = _select_emp_chg_inst(db, enabled_merge_record['dcf_inst_cd'], enabled_merge_record['dup_opp_cd'], + emp_chg_inst_ta_cd_record['ta_cd']) + for emp_chg_inst_row in emp_chg_inst_records: + # 重複時相手先コードが存在したかのチェック + if emp_chg_inst_row['opp_count'] > 0: + continue + + start_date = _str_to_date_time(emp_chg_inst_row['start_date']) + set_start_date = start_date \ + if start_date > tekiyo_month_first_day else tekiyo_month_first_day + + _insert_emp_chg_inst(db, enabled_merge_record['dup_opp_cd'], _date_time_to_str(set_start_date), + emp_chg_inst_row) + + # 適用開始日 < DCF施設統合マスタの適用月度の1日の場合 + if start_date < tekiyo_month_first_day: + # DCF施設統合マスタの適用月度の前月末日で、適用終了日を更新する + last_end_date = tekiyo_month_first_day - timedelta(days=1) + _update_emp_chg_inst_end_date(db, enabled_merge_record['dcf_inst_cd'], _date_time_to_str(last_end_date), + emp_chg_inst_row) + continue + # 適用開始日 ≧ DCF施設統合マスタの適用月度の1日の場合、N(論理削除レコード)に設定する + _update_emp_chg_inst_disabled(db, enabled_merge_record['dcf_inst_cd'], emp_chg_inst_row['ta_cd'], + emp_chg_inst_row['start_date']) + + logger.info('従業員担当施設マスタの登録 終了') + + +def _delete_ult_ident_presc(db: Database, start_date: str, ult_ident_presc_row: dict, + log_message: str): + # ult_ident_prescのDelete + try: + elapsed_time = ElapsedTime() + sql = """ + DELETE FROM + src05.ult_ident_presc + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :start_date + """ + params = { + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': start_date + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug(f'{log_message} 失敗') + raise e + + +def _update_emp_chg_inst_disabled(db: Database, dcf_inst_cd: str, ta_cd: str, start_date: str): + # emp_chg_instをUPDATE + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.emp_chg_inst + SET + enabled_flg = 'N', + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + inst_cd = :dcf_inst_cd + AND ta_cd = :ta_cd + AND start_date = :start_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'ta_cd': ta_cd, 'start_date': start_date} + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタのYorNフラグ更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('従業員担当施設マスタのYorNフラグ更新に失敗') + raise e + + +def _update_emp_chg_inst_end_date(db: Database, dcf_inst_cd: str, last_end_date: str, + emp_chg_inst_row: dict): + # emp_chg_instをUPDATE + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.emp_chg_inst + SET end_date = :end_date, + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + inst_cd = :dcf_inst_cd + AND ta_cd = :ta_cd + AND emp_cd = :emp_cd + AND bu_cd = :bu_cd + AND start_date = :start_date + """ + params = { + 'end_date': last_end_date, + 'dcf_inst_cd': dcf_inst_cd, + 'ta_cd': emp_chg_inst_row['ta_cd'], + 'emp_cd': emp_chg_inst_row['emp_cd'], + 'bu_cd': emp_chg_inst_row['bu_cd'], + 'start_date': emp_chg_inst_row['start_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの適用終了日更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('従業員担当施設マスタの適用終了日更新 失敗') + raise e + + +def _insert_emp_chg_inst(db: Database, dup_opp_cd: str, set_start_date: str, + emp_chg_inst_row: dict): + # emp_chg_instにINSERT + try: + elapsed_time = ElapsedTime() + sql = """ + INSERT INTO + src05.emp_chg_inst( + inst_cd, + ta_cd, + emp_cd, + bu_cd, + start_date, + end_date, + main_chg_flg, + enabled_flg, + creater, + create_date, + updater, + update_date + ) + VALUES( + :dup_opp_cd, + :ta_cd, + :emp_cd, + :bu_cd, + :start_date, + :end_date, + :main_chg_flg, + 'Y', + CURRENT_USER(), + SYSDATE(), + CURRENT_USER(), + SYSDATE() + ) + """ + params = { + 'dup_opp_cd': dup_opp_cd, + 'ta_cd': emp_chg_inst_row['ta_cd'], + 'emp_cd': emp_chg_inst_row['emp_cd'], + 'bu_cd': emp_chg_inst_row['bu_cd'], + 'start_date': set_start_date, + 'end_date': emp_chg_inst_row['end_date'], + 'main_chg_flg': None + if emp_chg_inst_row['main_chg_flg'] is None else emp_chg_inst_row['main_chg_flg'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'従業員担当施設マスタの追加に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('従業員担当施設マスタの追加に失敗') + raise e + + +def _select_dct_inst_merge(db: Database, muko_flg: int) -> list[dict]: + # dcf_inst_mergeからSELECT + # 無効フラグがOFFのときは、移行先DCF施設コードが設定されてないデータを抽出する。 + # ONのときは、移行先DCF施設コードが設定されているデータを抽出する。 + try: + sql = """ + SELECT + dim.dcf_inst_cd, + dim.dup_opp_cd, + dim.tekiyo_month + FROM + src05.dcf_inst_merge AS dim + INNER JOIN + src05.hdke_tbl AS ht + ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') + WHERE + dim.muko_flg = :muko_flg + AND dim.enabled_flg = 'Y' + AND dim.dcf_inst_cd_new IS {not_null}NULL + """.format( + not_null='' if muko_flg == 0 else 'NOT ' + ) + params = { + 'muko_flg': muko_flg + } + dst_inst_merge_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('DCF施設統合マスタの取得に成功') + except Exception as e: + logger.debug('DCF施設統合マスタの取得に失敗') + raise e + + return dst_inst_merge_records + + +def _update_dcf_inst_merge(db: Database, muko_flg: int) -> int: + # dcf_inst_mergeをUPDATE + # 無効フラグがOFFのときは、 + # 移行先DCF施設コードが設定されていないデータを抽出し、移行先DCF施設コードに重複時相手先コードを上書きする + # 無効フラグがONのときは、 + # 移行先DCF施設コードが設定されているデータを抽出し、移行先DCF施設コードにNULLを上書きする。 + try: + elapsed_time = ElapsedTime() + log_message = '更新しました' if muko_flg == 0 else '無効データに戻しました' + sql = """ + UPDATE + src05.dcf_inst_merge AS updim + INNER JOIN( + SELECT + dim.dcf_inst_cd AS base_dcf_inst_cd, + dim.dup_opp_cd AS base_dup_opp_cd, + dim.tekiyo_month AS base_tekiyo_month, + dim.muko_flg AS base_muko_flg, + dim.enabled_flg AS base_enabled_flg + FROM + src05.dcf_inst_merge AS dim + INNER JOIN + src05.hdke_tbl AS ht + ON dim.tekiyo_month = DATE_FORMAT(ht.syor_date, '%Y%m') + WHERE + dim.muko_flg = :muko_flg + AND dim.enabled_flg ='Y' + AND dim.dcf_inst_cd_new IS {not_null}NULL + ) AS bf_dim + SET + updim.dcf_inst_cd_new = {column}, + updim.updater = CURRENT_USER(), + updim.update_date = SYSDATE() + WHERE + updim.dcf_inst_cd = base_dcf_inst_cd + AND updim.dup_opp_cd = base_dup_opp_cd + AND updim.tekiyo_month = base_tekiyo_month + AND updim.muko_flg = base_muko_flg + AND updim.enabled_flg = base_enabled_flg + """.format( + not_null='' if muko_flg == 0 else 'NOT ', + column='base_dup_opp_cd' if muko_flg == 0 else 'NULL' + ) + params = { + 'muko_flg': muko_flg + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'DCF施設統合マスタの有効データを{log_message} 成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug(f'DCF施設統合マスタの有効データを{log_message} 失敗') + raise e + + return res.rowcount + + +def _update_dcf_inst_cd_new(db: Database, dcf_inst_cd_new_after: str, dcf_inst_cd_new_before: str, log_message: str): + # dcf_inst_mergeをUPDATE + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.dcf_inst_merge + SET + dcf_inst_cd_new = :dcf_inst_cd_new_after, + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + dcf_inst_cd_new = :dcf_inst_cd_new_before + AND enabled_flg = 'Y' + AND muko_flg = 0 + """ + params = { + 'dcf_inst_cd_new_after': dcf_inst_cd_new_after, + 'dcf_inst_cd_new_before': dcf_inst_cd_new_before + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'移行先DCF施設コードの{log_message}更新に成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug(f'移行先DCF施設コードの{log_message}更新に失敗') + raise e + + +def _update_ult_ident_presc_end_date(db: Database, last_end_date: str, ult_ident_presc_row: dict): + # ult_ident_presc_endをUPDATE + try: + elapsed_time = ElapsedTime() + sql = """ + UPDATE + src05.ult_ident_presc + SET end_date = :end_date, + updater = CURRENT_USER(), + update_date = SYSDATE() + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :start_date + """ + params = { + 'end_date': last_end_date, + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': ult_ident_presc_row['start_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'終了日 > 開始月のため適用終了日を更新 成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('終了日 > 開始月のため適用終了日を更新 失敗') + raise e + + +def _insert_ult_ident_presc(db: Database, set_Start_Date: str, dup_opp_cd: str, + ult_ident_presc_row: dict): + # ult_ident_prescにINSERT + try: + elapsed_time = ElapsedTime() + sql = """ + INSERT INTO + src05.ult_ident_presc( + ta_cd, + ult_ident_cd, + ratio, + start_date, + presc_cd, + end_date, + creater, + create_date, + update_date, + updater + ) + VALUES( + :ta_cd, + :ult_ident_cd, + :ratio, + :start_date, + :presc_cd, + :end_date, + CURRENT_USER(), + SYSDATE(), + SYSDATE(), + CURRENT_USER() + ) + """ + params = { + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': set_Start_Date, + 'presc_cd': dup_opp_cd, + 'end_date': ult_ident_presc_row['end_date'] + } + res = db.execute(sql, params) + logging_sql(logger, sql) + logger.info(f'納入先処方元マスタに追加 成功, {res.rowcount} 行更新 ({elapsed_time.of})') + except Exception as e: + logger.debug('納入先処方元マスタに追加 失敗') + raise e + + +def _select_emp_chg_inst(db: Database, dcf_inst_cd: str, dup_opp_cd: str, ta_cd: str) -> list[dict]: + # emp_chg_instからSELECT + try: + sql = """ + SELECT + eci.inst_cd, + eci.ta_cd, + eci.emp_cd, + eci.bu_cd, + eci.start_date, + eci.end_date, + eci.main_chg_flg, + eci.enabled_flg, + ( + SELECT + COUNT(eciopp.inst_cd) + FROM + src05.emp_chg_inst AS eciopp + WHERE + eciopp.inst_cd = :dup_opp_cd + AND eciopp.ta_cd = :ta_cd + ) AS opp_count + FROM + src05.emp_chg_inst AS eci + WHERE + eci.inst_cd = :dcf_inst_cd + AND eci.ta_cd = :ta_cd + AND eci.enabled_flg = 'Y' + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < eci.end_date + """ + params = {'dcf_inst_cd': dcf_inst_cd, 'dup_opp_cd': dup_opp_cd, 'ta_cd': ta_cd} + emp_chg_inst_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('従業員担当施設マスタの取得 成功') + except Exception as e: + logger.debug('従業員担当施設マスタの取得 失敗') + raise e + return emp_chg_inst_records + + +def _select_ult_ident_presc(db: Database, dcf_inst_cd: str, dup_opp_cd: str, + ult_ident_presc_row: dict) -> list[dict]: + # ult_ident_prescからSELECT + try: + sql = """ + SELECT + uip.ta_cd, + uip.ult_ident_cd, + uip.ratio, + uip.start_date, + uip.end_date, + ( + SELECT + COUNT(uipopp.ta_cd) + FROM + src05.ult_ident_presc AS uipopp + WHERE + uipopp.presc_cd = :dup_opp_cd + AND uipopp.ta_cd = :ta_cd + AND uipopp.ult_ident_cd = :ult_ident_cd + AND uipopp.ratio = :ratio + ) AS opp_count + FROM + src05.ult_ident_presc AS uip + WHERE + uip.presc_cd = :dcf_inst_cd + AND uip.ta_cd = :ta_cd + AND (SELECT ht.syor_date FROM src05.hdke_tbl AS ht) < uip.end_date + """ + params = { + 'dcf_inst_cd': dcf_inst_cd, + 'dup_opp_cd': dup_opp_cd, + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'] + } + ult_ident_presc_records = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('納入先処方元マスタの取得 成功') + except Exception as e: + logger.debug('納入先処方元マスタの取得 失敗') + raise e + return ult_ident_presc_records + + +def _count_duplicate_ult_ident_presc(db: Database, set_start_date: str, + ult_ident_presc_row: dict) -> int: + # ult_ident_prescの重複時相手先コードの件数取得 + try: + sql = """ + SELECT + COUNT(ta_cd) AS cnt + FROM + src05.ult_ident_presc + WHERE + ta_cd = :ta_cd + AND ult_ident_cd = :ult_ident_cd + AND ratio = :ratio + AND start_date = :start_date + """ + params = { + 'ta_cd': ult_ident_presc_row['ta_cd'], + 'ult_ident_cd': ult_ident_presc_row['ult_ident_cd'], + 'ratio': ult_ident_presc_row['ratio'], + 'start_date': set_start_date + } + result = db.execute_select(sql, params) + logging_sql(logger, sql) + logger.info('納入先処方元マスタの重複予定データの存在チェック 成功') + except Exception as e: + logger.debug('納入先処方元マスタの重複予定データの存在チェック 失敗') + raise e + return result[0]['cnt'] + + +def _get_first_day_of_month(year_month: str) -> datetime: + # year_monthの初日の日付を日付型に変換し返却する + return datetime.strptime(year_month + '01', '%Y%m%d') + + +def _str_to_date_time(str_date_time: str) -> datetime: + # str_date_timeを日付型に変換して返却する + return datetime.strptime(str_date_time, '%Y%m%d') + + +def _date_time_to_str(date_time: datetime) -> str: + # date_timeをYmd型に変換して返却する + return date_time.strftime('%Y%m%d') diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py index f6d682b4..3c086ca5 100644 --- a/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_laundering.py @@ -1,5 +1,8 @@ from src.batch.common.batch_context import BatchContext -from src.batch.laundering import create_inst_merge_for_laundering, emp_chg_inst_laundering, ult_ident_presc_laundering +from src.batch.laundering import ( + create_inst_merge_for_laundering, emp_chg_inst_laundering, + ult_ident_presc_laundering, sales_results_laundering) +from src.batch.dcf_inst_merge import integrate_dcf_inst_merge from src.logging.get_logger import get_logger batch_context = BatchContext.get_instance() @@ -16,10 +19,14 @@ def exec(): return # 洗替用マスタ作成 create_inst_merge_for_laundering.exec() + # DCF施設統合マスタ日次更新 + integrate_dcf_inst_merge.exec() # 施設担当者洗替 emp_chg_inst_laundering.exec() # 納入先処方元マスタ洗替 ult_ident_presc_laundering.exec() + # 卸販売洗替 + sales_results_laundering.exec() # # 並列処理のテスト用コード # import time diff --git a/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py new file mode 100644 index 00000000..7f6d4259 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/laundering/sales_results_laundering.py @@ -0,0 +1,167 @@ +from src.batch.batch_functions import logging_sql +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.system_var import environment + +logger = get_logger('卸販売洗替') + + +def exec(): + db = Database.get_instance(autocommit=True) + try: + db.connect() + logger.debug('処理開始') + # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 + _call_sales_lau_delete(db) + # 卸販売実績テーブル(洗替後)作成 + _call_sales_lau_upsert(db) + # 1:卸組織洗替 + _call_whs_org_laundering(db) + # 3:HCO施設コードの洗替 + _update_sales_lau_from_vop_hco_merge_v(db) + # 4:メルク施設コードの洗替 + _update_mst_inst_laundering(db) + logger.debug('処理終了') + except Exception as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + +def _call_sales_lau_delete(db: Database): + # 卸販売実績テーブル(洗替後)過去5年以前のデータ削除 + logger.info('sales_lau_delete(プロシージャ―) 開始') + db.execute(f""" + CALL src05.sales_lau_delete( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', + {environment.SALES_LAUNDERING_TARGET_YEAR_OFFSET} + ) + """) + logger.info('sales_lau_delete(プロシージャ―) 終了') + return + + +def _call_sales_lau_upsert(db: Database): + # 卸販売実績テーブル(洗替後)作成 + logger.info('sales_lau_upsert(プロシージャ―) 開始') + db.execute(f""" + CALL src05.sales_lau_upsert( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}', + (src05.get_syor_date() - interval {environment.SALES_LAUNDERING_EXTRACT_DATE_PERIOD} day), + src05.get_syor_date() + ) + """) + logger.info('sales_lau_upsert(プロシージャ―) 終了') + return + + +def _call_whs_org_laundering(db: Database): + # 卸組織洗替 + logger.info('whs_org_laundering(プロシージャ―) 開始') + db.execute(f""" + CALL src05.whs_org_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) + logger.info('whs_org_laundering(プロシージャ―) 終了') + return + + +def _update_sales_lau_from_vop_hco_merge_v(db: Database): + # HCO施設コードの洗替 + if _count_v_inst_merge_t(db) == 0: + logger.info('V施設統合マスタ(洗替処理一時テーブル)にデータは存在しません') + return + + _call_v_inst_merge_laundering(db) + return + + +def _count_v_inst_merge_t(db: Database) -> int: + # V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 + try: + sql = """ + SELECT + COUNT(v_inst_cd) AS cnt + FROM + internal05.v_inst_merge_t + """ + result = db.execute_select(sql) + logging_sql(logger, sql) + logger.info('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功') + except Exception as e: + logger.debug('V施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗') + raise e + + return result[0]['cnt'] + + +def _call_v_inst_merge_laundering(db: Database): + # HCO施設コードの洗替(プロシージャ―の呼び出し) + logger.info('v_inst_merge_laundering(プロシージャ―) 開始') + db.execute(f""" + CALL src05.v_inst_merge_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) + logger.info('v_inst_merge_laundering(プロシージャ―) 終了') + return + + +def _update_mst_inst_laundering(db: Database): + # メルク施設コードの洗替 + _call_hco_to_mdb_laundering(db) + _update_sales_lau_from_dcf_inst_merge(db) + + +def _call_hco_to_mdb_laundering(db: Database): + # A:医療機関のデータはMDB変換表からHCO⇒DCFへ変換 + logger.info('hco_to_mdb_laundering(プロシージャ―) 開始') + db.execute(f""" + CALL src05.hco_to_mdb_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) + logger.info('hco_to_mdb_laundering(プロシージャ―) 終了') + return + + +def _update_sales_lau_from_dcf_inst_merge(db: Database): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする + if _count_inst_merge_t(db) == 0: + logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)にデータは存在しません') + return + _call_inst_merge_laundering(db) + return + + +def _count_inst_merge_t(db: Database) -> int: + # アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 + try: + sql = """ + SELECT + COUNT(dcf_dsf_inst_cd) AS cnt + FROM + internal05.inst_merge_t + """ + result = db.execute_select(sql) + logging_sql(logger, sql) + logger.info('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 成功') + except Exception as e: + logger.debug('アルトマーク施設統合マスタ(洗替処理一時テーブル)のデータ件数の取得 失敗') + raise e + + return result[0]['cnt'] + + +def _call_inst_merge_laundering(db: Database): + # B:DCF施設統合マスタがある場合は、コードを変換し、住所等をSETする(プロシージャ―の呼び出し) + logger.info('inst_merge_laundering(プロシージャ―) 開始') + db.execute(f""" + CALL src05.inst_merge_laundering( + '{environment.SALES_LAUNDERING_TARGET_TABLE_NAME}' + ) + """) + logger.info('inst_merge_laundering(プロシージャ―) 終了') + return diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py new file mode 100644 index 00000000..838dae34 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/output_vjsk_inst_pharm_data.py @@ -0,0 +1,271 @@ +"""output_vjsk_inst_pharm_data""" + +from src.aws.s3 import VjskSendBucket +from src.batch.common.batch_context import BatchContext + +from src.db.database import Database +from src.logging.get_logger import get_logger +import tempfile +import os.path as path +import csv + +logger = get_logger('V実消化用施設データ作成処理') + +sql_err_msg = "SQL実行エラーです。" + + +def exec(): + vjsk_csv_file_name = 'ComInst.csv' + + # バッチ共通設定を取得 + batch_context = BatchContext.get_instance() + + if not batch_context.is_ultmarc_imported: + logger.info('アルトマーク取込が行われていないため、V実消化用施設データ作成処理をスキップします。') + return + + db = Database.get_instance() + try: + logger.info('処理開始') + + try: + # DB接続 + db.connect() + except Exception as e: + logger.info('DB接続エラーです。') + raise e + + # CSVファイルの作成用のSQL実行(施設) + record_inst = select_inst_record(db) + # CSVファイルの作成用のSQL実行(薬局) + record_pharm = select_pharm_record(db) + # CSVファイル作成 + csv_file_path = make_csv_data(record_inst, record_pharm, vjsk_csv_file_name) + + vjsk_bucket = VjskSendBucket() + try: + # s3へデータ移動 + vjsk_bucket.upload_inst_pharm_csv_file(vjsk_csv_file_name, csv_file_path) + except Exception as e: + logger.info('S3バケットにCSVデータを作成できませんでした。') + raise e + + try: + # 処理後ファイルをバックアップ + vjsk_bucket.backup_inst_pharm_csv_file(vjsk_csv_file_name, batch_context.syor_date) + except Exception as e: + logger.info('バックアップバケットへCSVデータをコピーできませんでした。') + raise e + + csv_count = len(record_inst) + len(record_pharm) + logger.info(f'CSV出力件数: {csv_count}') + logger.info('正常終了') + except Exception as e: + raise e + finally: + db.disconnect() + return + + +def select_inst_record(db): + # CSVファイル作成用のSQL実行(施設) + try: + # 施設テーブル検索SQL + sql = """\ + SELECT dcf_dsf_inst_cd, + inst_div_cd, + addr_unknown_reason_cd, + form_inst_name_kana, + inst_name_kana, + form_inst_name_kanji, + inst_name_kanji, + rltd_univ_prnt_cd, + bed_num, + close_flg, + estab_sche_flg, + close_start_ym, + estab_sche_ym, + ward_abolish_flg, + inst_repre_cd, + inst_repre_kana, + inst_repre, + phone_number_non_flg, + unconf_flg, + inst_phone_number, + inst_addr_kana, + inst_addr, + postal_number, + village_cd, + prefc_cd, + city_cd, + addr_display_number, + addr_cnt_kana, + addr_cnt, + manage_cd, + delete_sche_reason_cd, + hp_assrt_cd, + dup_opp_cd, + insp_item_micrb, + insp_item_serum, + insp_item_blood, + insp_item_patho, + insp_item_paras, + insp_item_biochem, + insp_item_ri, + re_exam_cd, + prmit_bed_num_other, + prmit_bed_num_mental, + prmit_bed_num_tuber, + prmit_bed_num_infection, + prmit_bed_num_sum, + prmit_bed_num_gen, + prmit_bed_num_rcup, + prmit_bed_maint_ymd, + inst_pharm_div, + abolish_ymd, + delete_flg, + filler_1, + filler_2, + filler_3, + filler_4, + filler_5, + regist_date, + create_user, + update_date, + update_user, + sys_regist_date, + regist_prgm_id, + sys_update_date, + update_prgm_id + FROM src05.com_inst ORDER BY dcf_dsf_inst_cd + """ + return db.execute_select(sql) + except Exception as e: + logger.debug(sql_err_msg) + raise e + + +def select_pharm_record(db): + # CSVファイル作成用のSQL実行(薬局) + try: + # 薬局テーブル検索SQL + sql = """\ + SELECT dcf_dsf_inst_cd, + inst_div_cd, + addr_unknown_reason_cd, + form_inst_name_kana, + inst_name_kana, + form_inst_name_kanji, + inst_name_kanji, + '' AS rltd_univ_prnt_cd, + '' AS bed_num, + close_flg, + estab_sche_flg, + close_start_ym, + estab_sche_ym, + '' AS ward_abolish_flg, + '' AS inst_repre_cd, + inst_repre_kana, + inst_repre, + phone_number_non_flg, + unconf_flg, + inst_phone_number, + inst_addr_kana, + inst_addr, + postal_number, + village_cd, + prefc_cd, + city_cd, + addr_display_number, + addr_cnt_kana, + addr_cnt, + manage_cd, + delete_sche_reason_cd, + '' AS hp_assrt_cd, + dup_opp_cd, + '' AS insp_item_micrb, + '' AS insp_item_serum, + '' AS insp_item_blood, + '' AS insp_item_patho, + '' AS insp_item_paras, + '' AS insp_item_biochem, + '' AS insp_item_ri, + '' AS re_exam_cd, + '' AS prmit_bed_num_other, + '' AS prmit_bed_num_mental, + '' AS prmit_bed_num_tuber, + '' AS prmit_bed_num_infection, + '' AS prmit_bed_num_sum, + '' AS prmit_bed_num_gen, + '' AS prmit_bed_num_rcup, + '' AS prmit_bed_maint_ymd, + inst_pharm_div, + abolish_ymd, + delete_flg, + filler_1, + filler_2, + filler_3, + filler_4, + filler_5, + regist_date, + create_user, + update_date, + update_user, + sys_regist_date, + regist_prgm_id, + sys_update_date, + update_prgm_id + FROM src05.com_pharm ORDER BY dcf_dsf_inst_cd + """ + return db.execute_select(sql) + except Exception as e: + logger.debug(sql_err_msg) + raise e + + +def make_csv_data(record_inst: list, record_pharm: list, vjsk_csv_file_name: str): + # 一時ファイルとして保存する(CSVファイル) + try: + + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, vjsk_csv_file_name) + + head_str = ['DCF_DSF_INST_CD', 'INST_DIV_CD', 'ADDR_UNKNOWN_REASON_CD', 'FORM_INST_NAME_KANA', 'INST_NAME_KANA', + 'FORM_INST_NAME_KANJI', 'INST_NAME_KANJI', 'RLTD_UNIV_PRNT_CD', 'BED_NUM', 'CLOSE_FLG', 'ESTAB_SCHE_FLG', + 'CLOSE_START_YM', 'ESTAB_SCHE_YM', 'WARD_ABOLISH_FLG', 'INST_REPRE_CD', 'INST_REPRE_KANA', 'INST_REPRE', + 'PHONE_NUMBER_NON_FLG', 'UNCONF_FLG', 'INST_PHONE_NUMBER', 'INST_ADDR_KANA', 'INST_ADDR', 'POSTAL_NUMBER', + 'VILLAGE_CD', 'PREFC_CD', 'CITY_CD', 'ADDR_DISPLAY_NUMBER', 'ADDR_CNT_KANA', 'ADDR_CNT', 'MANAGE_CD', + 'DELETE_SCHE_REASON_CD', 'HP_ASSRT_CD', 'DUP_OPP_CD', 'INSP_ITEM_MICRB', 'INSP_ITEM_SERUM', 'INSP_ITEM_BLOOD', + 'INSP_ITEM_PATHO', 'INSP_ITEM_PARAS', 'INSP_ITEM_BIOCHEM', 'INSP_ITEM_RI', 'RE_EXAM_CD', 'PRMIT_BED_NUM_OTHER', + 'PRMIT_BED_NUM_MENTAL', 'PRMIT_BED_NUM_TUBER', 'PRMIT_BED_NUM_INFECTION', 'PRMIT_BED_NUM_SUM', 'PRMIT_BED_NUM_GEN', + 'PRMIT_BED_NUM_RCUP', 'PRMIT_BED_MAINT_YMD', 'INST_PHARM_DIV', 'ABOLISH_YMD', 'DELETE_FLG', 'FILLER_1', 'FILLER_2', + 'FILLER_3', 'FILLER_4', 'FILLER_5', 'REGIST_DATE', 'CREATE_USER', 'UPDATE_DATE', 'UPDATE_USER', 'SYS_REGIST_DATE', + 'REGIST_PRGM_ID', 'SYS_UPDATE_DATE', 'UPDATE_PRGM_ID'] + + with open(csv_file_path, mode='w', encoding='UTF-8') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") + + # Shift-JIS、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + + # データ部分書き込み(施設) + for record_inst_data in record_inst: + record_inst_value = list(record_inst_data.values()) + csv_data = ['' if n is None else n for n in record_inst_value] + writer.writerow(csv_data) + + # データ部分書き込み(薬局) + for record_pharm_data in record_pharm: + record_pharm_value = list(record_pharm_data.values()) + csv_data = ['' if n is None else n for n in record_pharm_value] + writer.writerow(csv_data) + + except Exception as e: + logger.info('CSVデータの作成に失敗しました。') + raise e + + return csv_file_path diff --git a/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py b/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py index b511a9c8..0e2ffe6a 100644 --- a/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py +++ b/ecs/jskult-batch-daily/src/batch/ultmarc/ultmarc_process.py @@ -5,6 +5,7 @@ from datetime import datetime from src.aws.s3 import UltmarcBucket from src.batch.common.batch_context import BatchContext from src.batch.ultmarc.datfile import DatFile + from src.batch.ultmarc.utmp_tables.ultmarc_table_mapper_factory import \ UltmarcTableMapperFactory from src.db.database import Database @@ -60,13 +61,6 @@ def exec_import(): raise BatchOperationException(e) -def exec_export(): - """V実消化用施設・薬局薬店データ作成処理""" - if not batch_context.is_ultmarc_imported: - logger.info('アルトマーク取込が行われていないため、V実消化用施設・薬局薬店データ作成処理をスキップします。') - return - - def _import_to_ultmarc_table(dat_file: DatFile): db = Database.get_instance() try: diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py new file mode 100644 index 00000000..cef4e1ec --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_data_load_manager.py @@ -0,0 +1,103 @@ +# from src.batch.vjsk.vjsk_recv_file_manager import VjskDatFile +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger + +logger = get_logger('V実消化データ取込(DB登録)') +mapper = VjskReceiveFileMapper() + + +class VjskDataLoadManager: + def __init__(self): + pass + + def _import_to_db(src_file_name: str, condkey: str): + logger.debug(f"_import_to_db start (src_file_name : {src_file_name}, condkey : {condkey})") + + db = Database.get_instance() + data_name = mapper.get_data_name(condkey) + table_name_org = mapper.get_org_table(condkey) + table_name_src = mapper.get_src_table(condkey) + upsert_sql = mapper.get_upsert_sql(condkey) + + try: + db.connect() + db.execute("SET SESSION sql_mode = 'TRADITIONAL';") + + # orgをtruncate + db.execute(f"TRUNCATE TABLE {table_name_org};") + + # orgにload ※warningが発生すれば異常終了させる + sql = f"""\ + LOAD DATA LOCAL INFILE :src_file_name + INTO TABLE {table_name_org} + FIELDS TERMINATED BY '\\t' + ENCLOSED BY '\"' + IGNORE 1 LINES; + """ + db.begin() + result = db.execute(sql, {"src_file_name": src_file_name}) + logger.info(f'{data_name}tsvファイルを{table_name_org}にLOAD : 件数({result.rowcount})') + db.commit() + + # org→srcにinsert select + db.begin() + logger.debug(upsert_sql) + db.execute(upsert_sql) + # MEMO: insert+selectの結果件数は、LOAD結果と必ず等しいので、executeの結果件数はログ出力しない + # MEMO: insert+select 実質10件なのに、result.rowcountは20件になってしまう ※sqlalchemyの仕様 + # MEMO: https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.BaseCursorResult.rowcount + logger.info(f'{table_name_org}を{table_name_src}にUPSERT') + + db.commit() + except Exception as e: + db.rollback() + raise BatchOperationException(e) + finally: + db.disconnect() + + logger.debug("_import_to_db done") + return + + def _get_tsv_last_row_tab_count(src_file_name: str) -> int: + # memo: tsvファイルが数百MBに及ぶことを想定して、末尾から1行分を参照する + # memo: 前提1 行区切りは LF('\n') + buf_count = 0 + + # バイナリモードでファイルオープン + with open(src_file_name, 'rb') as file: + # ファイルの末尾から2バイト手前に移動 + file.seek(-2, 2) + # 改行文字を見つけるまで逆方向に読み進める + while file.read(1) != b'\n': + # 1バイト戻って再度読み込み + file.seek(-2, 1) + # 末尾行を抽出 + last_line = file.readline().decode().rstrip('\n') + # 末尾行に含まれるタブ文字の数を抽出 + buf_count = last_line.count('\t') + + return buf_count + + @classmethod + def load(self, target: dict): + logger.debug(f'load start target:{target}') + + # S3からローカルストレージにdownloadした登録対象のtsvファイルパスを取得 + local_file_name = target["src_file_path"] + + # tsvファイル末尾行のTABの数が総定数と一致しない場合は例外をスロー + # TODO: ↓↓↓developへのマージを優先させたいので、未テストのロジックはコメントアウトする + # tsv_tabs = self._get_tsv_last_row_tab_count(local_file_name) + # expect_tabs = mapper.get_file_column_separators(target["condkey"]) + # if tsv_tabs != expect_tabs: + # msg = f"受領tsvファイルの末尾行のTABの数が総定数と一致しませんでした local_file_name: {local_file_name}" + # raise BatchOperationException(msg) + # TODO: ↑↑↑developへのマージを優先させたいので、未テストのロジックはコメントアウトする + + # データベース登録 + self._import_to_db(local_file_name, target["condkey"]) + + logger.debug('load done') + return diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py new file mode 100644 index 00000000..694f93ac --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_importer.py @@ -0,0 +1,258 @@ +from src.aws.s3 import ConfigBucket, VjskReceiveBucket +from src.batch.common.batch_context import BatchContext +from src.batch.common.calendar_file import CalendarFile +from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger + +logger = get_logger('V実消化データ取込') +batch_context = BatchContext.get_instance() +vjsk_recv_bucket = VjskReceiveBucket() +vjsk_mapper = VjskReceiveFileMapper() + + +def exec(): + """V実消化データ取込処理""" + logger.debug('exec start') + + # 非営業日なら何もせず終了 + if batch_context.is_not_business_day: + logger.debug('非営業日なので処理をスキップ') + return + + # 卸在庫データ取込対象日であれば、卸在庫データ処理対象フラグを立てる + logger.debug('卸在庫データ取込対象日であるかを判定') + batch_context.is_vjsk_stock_import_day = _determine_today_is_stockslipdata_target() + logger.debug(f'判定結果 : {batch_context.is_vjsk_stock_import_day}') + if batch_context.is_vjsk_stock_import_day: + logger.info('卸在庫データ取込対象日です') + + # V実消化データファイル受領チェック + logger.debug('V実消化データファイル受領チェック:開始') + received_s3_files = [] + try: + # S3バケットにある受領済のV実消化データファイルの存在チェックをする + received_s3_files = _check_received_files() + + except BatchOperationException as e: + logger.debug('受領したV実消化データファイルに未受領もものがあります') + raise e + logger.debug('V実消化データファイル受領チェック:終了') + + # データベース取込 + logger.debug('V実消化データ取込:開始') + try: + # S3バケットにある受領済のV実消化データファイルをデータベースに登録する + _import_file_to_db(received_s3_files) + except Exception as e: + logger.debug(f'データベース登録失敗 {e}') + raise e + + # V実消化データ受領ファイルバックアップ退避 + logger.debug('V実消化データ受領ファイルバックアップ退避:開始') + try: + # 取込が完了したS3バケットにある受領ファイルをバックアップ用S3バケットに移動する + vjsk_recv_bucket.backup_dat_file(received_s3_files, batch_context.syor_date) + + except BatchOperationException as e: + logger.debug('V実消化データ受領ファイルのバックアップ退避が失敗しました') + raise e + logger.debug('V実消化データ受領ファイルバックアップ退避:終了') + + logger.debug('exec done') + + +def _check_if_file_exists(src_list: list, condkey: str) -> bool: + logger.debug(f"_check_if_file_exists start (src_list : {src_list} , condkey : {condkey})") + # ファイル接頭辞と拡張子が一致するかで判定する + ret = False + pref = vjsk_mapper.get_file_prefix(condkey) + suff = vjsk_mapper.get_file_suffix(condkey) + + for idx, elem in enumerate(src_list): + buf = elem.get("filename") + filename = buf[buf.rfind("/") + 1:] + if filename.startswith(pref) and filename.endswith(suff): + ret = True + break + + logger.debug(f"_check_if_file_exists done (return : {ret})") + return ret + + +def _check_received_files() -> list: + """V実消化連携データファイル受領確認処理""" + logger.debug('_check_received_files start') + + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」にある受領ファイル一覧を取得 + received_s3_files = vjsk_recv_bucket.get_s3_file_list() + logger.debug(f'ファイル一覧{received_s3_files}') + + # ファイル存在確認 卸在庫データファイル(卸在庫データ処理対象日のみ実施) + if batch_context.is_vjsk_stock_import_day: + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_STOCK_SLIP_DATA): + raise BatchOperationException(f'卸在庫データファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 卸販売データ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_SLIP_DATA): + raise BatchOperationException(f'卸販売データファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 卸組織変換マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_ORG_CNV_MST): + raise BatchOperationException(f'卸組織変換マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 施設統合マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_VOP_HCO_MERGE): + raise BatchOperationException(f'施設統合マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 卸マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_WHS_MST): + raise BatchOperationException(f'卸マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 卸ホールディングスマスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_HLD_MST): + raise BatchOperationException(f'卸ホールディングスマスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 施設マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_FCL_MST): + raise BatchOperationException(f'施設マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 メーカー卸組織展開表 + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MKR_ORG_HORIZON): + raise BatchOperationException(f'メーカー卸組織展開表ファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 取引区分マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_TRAN_KBN_MST): + raise BatchOperationException(f'取引区分マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 製品マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_PHM_PRD_MST): + raise BatchOperationException(f'製品マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 製品価格マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_PHM_PRICE_MST): + raise BatchOperationException(f'製品価格マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 卸得意先情報マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST): + raise BatchOperationException(f'卸得意先情報マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 MDBコード変換マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_MDB_CONV_MST): + raise BatchOperationException(f'MDBコード変換マスタファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 生物由来データ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_BIO_SLIP_DATA): + raise BatchOperationException(f'生物由来データファイルがありません ファイル一覧:{received_s3_files}') + + # ファイル存在確認 製造ロット番号マスタ + if not _check_if_file_exists(received_s3_files, vjsk_mapper.CONDKEY_LOT_NUM_MST): + raise BatchOperationException(f'製造ロット番号マスタファイルがありません ファイル一覧:{received_s3_files}') + + # 想定外ファイルの受領確認 (想定ファイル数、卸在庫データ取込対象日の場合は15、そうでない場合は14) + naturally_count = 15 if batch_context.is_vjsk_stock_import_day else 14 + if len(received_s3_files) > naturally_count: + raise BatchOperationException(f'想定数を超える受領ファイルがあります ファイル一覧:{received_s3_files}') + + logger.debug('_check_received_files done') + + return received_s3_files + + +def _import_file_to_db(received_s3_files: list): + """V実消化連携データ取込処理""" + logger.debug('_import_file_to_db start') + + # S3バケット「実消化&アルトマーク V実消化データ受領バケット」の受領ファイルをローカルストレージにdownloadして辞書化する + target_dict = {} + for s3_file_path in received_s3_files: + file_name = s3_file_path.get('filename') + + # S3バケットにある受領ファイルをローカルストレージにdownloadする + logger.debug(f"download s3 file start : {file_name}") + local_file_path = vjsk_recv_bucket.download_data_file(file_name) + logger.debug(f"download s3 file done : {file_name}") + + # ローカルストレージにdownloadした受領ファイル(tar.gz)を解凍する + unzip_file_path = vjsk_recv_bucket.unzip_data_file(local_file_path) + logger.debug(f"unzip done : {unzip_file_path}") + + # データファイル名に該当する辞書アクセス用のキーを取得する + key = vjsk_mapper.get_condkey_by_s3_file_path(file_name) + + # 想定されたデータファイルであれば辞書登録する + if key is not None: + # ※受領ファイル(tar.gz)の書庫構成はtsvファイルが1つだけの前提 + target_dict[key] = {"condkey": key, "src_file_path": unzip_file_path[0]} + logger.debug(f'取込対象データファイル辞書{target_dict}') + + # DB登録 卸在庫データファイル(卸在庫データ処理対象日のみ実施) + if batch_context.is_vjsk_stock_import_day: + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_STOCK_SLIP_DATA]) + + # DB登録 卸販売データ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_SLIP_DATA]) + + # DB登録 卸組織変換マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_ORG_CNV_MST]) + + # DB登録 施設統合マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_VOP_HCO_MERGE]) + + # DB登録 卸マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_WHS_MST]) + + # DB登録 卸ホールディングスマスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_HLD_MST]) + + # DB登録 施設マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_FCL_MST]) + + # DB登録 メーカー卸組織展開表 + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_MKR_ORG_HORIZON]) + + # DB登録 取引区分マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_TRAN_KBN_MST]) + + # DB登録 製品マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_PHM_PRD_MST]) + + # DB登録 製品価格マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_PHM_PRICE_MST]) + + # DB登録 卸得意先情報マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_WHS_CUSTOMER_MST]) + + # DB登録 MDBコード変換マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_MDB_CONV_MST]) + + # DB登録 生物由来データ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_BIO_SLIP_DATA]) + + # DB登録 製造ロット番号マスタ + VjskDataLoadManager.load(target_dict[vjsk_mapper.CONDKEY_LOT_NUM_MST]) + + logger.debug('_import_file_to_db done') + + +def _determine_today_is_stockslipdata_target(): + """設定ファイル「V実消化卸在庫データ連携日ファイル」の内容を取得して、処理日付が該当していればTrueを返却する""" + logger.debug("_determine_today_is_stockslipdata_target start") + try: + # 処理日付を取得する + today = batch_context.syor_date + + # S3バケット上の設定ファイル「V実消化卸在庫データ連携日ファイル」をローカルストレージにdownloadする + wholesaler_stock_list_file_path = ConfigBucket().download_wholesaler_stock_input_day_list() + + # 設定ファイル「V実消化卸在庫データ連携日ファイル」の定義内容を取得する + target_days = CalendarFile(wholesaler_stock_list_file_path) + + # 処理日付が、設定ファイル「V実消化卸在庫データ連携日ファイル」の定義に該当するかを判定する + ret = target_days.compare_date(today) + except Exception as e: + logger.error(f'{e}') + raise e + logger.debug("_determine_today_is_stockslipdata_target done") + return ret diff --git a/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py new file mode 100644 index 00000000..19f70067 --- /dev/null +++ b/ecs/jskult-batch-daily/src/batch/vjsk/vjsk_recv_file_mapper.py @@ -0,0 +1,1521 @@ +import textwrap + + +class VjskReceiveFileMapper: + CONDKEY_SLIP_DATA = "SLIP_DATA" # 販売実績データ + CONDKEY_HLD_MST = "HLD_MST" # V卸ホールディングスマスタ + CONDKEY_WHS_MST = "WHS_MST" # V卸マスタ + CONDKEY_MKR_ORG_HORIZON = "MKR_ORG_HORIZON" # Vメーカー卸組織展開表 + CONDKEY_ORG_CNV_MST = "ORG_CNV_MST" # V卸組織変換マスタ + CONDKEY_TRAN_KBN_MST = "TRAN_KBN_MST" # V取引区分マスタ + CONDKEY_FCL_MST = "FCL_MST" # V施設マスタ + CONDKEY_PHM_PRD_MST = "PHM_PRD_MST" # V製品マスタ + CONDKEY_PHM_PRICE_MST = "PHM_PRICE_MST" # V製品価格マスタ + CONDKEY_VOP_HCO_MERGE = "VOP_HCO_MERGE" # V施設統合マスタ + CONDKEY_WHS_CUSTOMER_MST = "WHS_CUSTOMER_MST" # V卸得意先情報マスタ + CONDKEY_MDB_CONV_MST = "MDB_CONV_MST" # MDBコード変換表 + CONDKEY_STOCK_SLIP_DATA = "STOCK_SLIP_DATA" # 卸在庫データ + CONDKEY_BIO_SLIP_DATA = "BIO_SLIP_DATA" # 生物由来データ + CONDKEY_LOT_NUM_MST = "LOT_NUM_MST" # ロットマスタデータ + + _KEY_DATA_NAME = "data_name" + _KEY_FILE_PREFIX = "file_prefix" + _KEY_FILE_SUFFIX = "file_suffix" + _KEY_FILE_COLUMN_SEPARATORS = "file_column_separators" + _KEY_ORG_TABLE = "org_table" + _KEY_SRC_TABLE = "src_table" + _KEY_UPSERT_SQL = "upsert_sql" + _VJSK_INTERFACE_MAPPING = { + # 販売実績データ + CONDKEY_SLIP_DATA: { + _KEY_DATA_NAME: "販売実績データ", + _KEY_FILE_PREFIX: "slip_data_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "82", + _KEY_ORG_TABLE: "org05.sales", + _KEY_SRC_TABLE: "src05.sales", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.sales ( + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_whs_org_cd + ,rec_cust_cd + ,rec_comm_cd + ,rec_tran_kbn + ,rev_hsdnymd_wrk + ,rev_hsdnymd_srk + ,rec_urag_num + ,rec_qty + ,rec_nonyu_price + ,rec_nonyu_amt + ,rec_comm_name + ,rec_nonyu_fcl_name + ,free_item + ,rec_nonyu_fcl_addr + ,rec_nonyu_fcl_post + ,rec_nonyu_fcl_tel + ,rec_bef_hsdn_ymd + ,rec_bef_slip_num + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,hsdn_ymd + ,exec_dt + ,v_tran_cd + ,tran_kbn_name + ,whs_org_cd + ,v_whsorg_cd + ,whs_org_name + ,whs_org_kn + ,v_whs_cd + ,whs_name + ,nonyu_fcl_cd + ,v_inst_cd + ,v_inst_kn + ,v_inst_name + ,v_inst_addr + ,comm_cd + ,comm_name + ,nonyu_qty + ,nonyu_price + ,nonyu_amt + ,shikiri_price + ,shikiri_amt + ,nhi_price + ,nhi_amt + ,whspos_err_kbn + ,htdnymd_err_kbn + ,prd_exis_kbn + ,fcl_exis_kbn + ,bef_hsdn_ymd + ,bef_slip_num + ,slip_org_kbn + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,err_flg11 + ,err_flg12 + ,err_flg13 + ,err_flg14 + ,err_flg15 + ,err_flg16 + ,err_flg17 + ,err_flg18 + ,err_flg19 + ,err_flg20 + ,kjyo_ym + ,tksnbk_kbn + ,fcl_exec_kbn + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt + ) + SELECT + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_whs_org_cd + ,t.rec_cust_cd + ,t.rec_comm_cd + ,t.rec_tran_kbn + ,t.rev_hsdnymd_wrk + ,t.rev_hsdnymd_srk + ,t.rec_urag_num + ,t.rec_qty + ,t.rec_nonyu_price + ,t.rec_nonyu_amt + ,t.rec_comm_name + ,t.rec_nonyu_fcl_name + ,t.free_item + ,t.rec_nonyu_fcl_addr + ,t.rec_nonyu_fcl_post + ,t.rec_nonyu_fcl_tel + ,t.rec_bef_hsdn_ymd + ,t.rec_bef_slip_num + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.hsdn_ymd + ,t.exec_dt + ,t.v_tran_cd + ,t.tran_kbn_name + ,t.whs_org_cd + ,t.v_whsorg_cd + ,t.whs_org_name + ,t.whs_org_kn + ,t.v_whs_cd + ,t.whs_name + ,t.nonyu_fcl_cd + ,t.v_inst_cd + ,t.v_inst_kn + ,t.v_inst_name + ,t.v_inst_addr + ,t.comm_cd + ,t.comm_name + ,t.nonyu_qty + ,t.nonyu_price + ,t.nonyu_amt + ,t.shikiri_price + ,t.shikiri_amt + ,t.nhi_price + ,t.nhi_amt + ,t.whspos_err_kbn + ,t.htdnymd_err_kbn + ,t.prd_exis_kbn + ,t.fcl_exis_kbn + ,t.bef_hsdn_ymd + ,t.bef_slip_num + ,t.slip_org_kbn + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.err_flg11 + ,t.err_flg12 + ,t.err_flg13 + ,t.err_flg14 + ,t.err_flg15 + ,t.err_flg16 + ,t.err_flg17 + ,t.err_flg18 + ,t.err_flg19 + ,t.err_flg20 + ,t.kjyo_ym + ,t.tksnbk_kbn + ,t.fcl_exec_kbn + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.ins_usr + ,SYSDATE() + FROM org05.sales AS t + ON DUPLICATE KEY UPDATE + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_whs_org_cd=t.rec_whs_org_cd + ,rec_cust_cd=t.rec_cust_cd + ,rec_comm_cd=t.rec_comm_cd + ,rec_tran_kbn=t.rec_tran_kbn + ,rev_hsdnymd_wrk=t.rev_hsdnymd_wrk + ,rev_hsdnymd_srk=t.rev_hsdnymd_srk + ,rec_urag_num=t.rec_urag_num + ,rec_qty=t.rec_qty + ,rec_nonyu_price=t.rec_nonyu_price + ,rec_nonyu_amt=t.rec_nonyu_amt + ,rec_comm_name=t.rec_comm_name + ,rec_nonyu_fcl_name=t.rec_nonyu_fcl_name + ,free_item=t.free_item + ,rec_nonyu_fcl_addr=t.rec_nonyu_fcl_addr + ,rec_nonyu_fcl_post=t.rec_nonyu_fcl_post + ,rec_nonyu_fcl_tel=t.rec_nonyu_fcl_tel + ,rec_bef_hsdn_ymd=t.rec_bef_hsdn_ymd + ,rec_bef_slip_num=t.rec_bef_slip_num + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,hsdn_ymd=t.hsdn_ymd + ,exec_dt=t.exec_dt + ,v_tran_cd=t.v_tran_cd + ,tran_kbn_name=t.tran_kbn_name + ,whs_org_cd=t.whs_org_cd + ,v_whsorg_cd=t.v_whsorg_cd + ,whs_org_name=t.whs_org_name + ,whs_org_kn=t.whs_org_kn + ,v_whs_cd=t.v_whs_cd + ,whs_name=t.whs_name + ,nonyu_fcl_cd=t.nonyu_fcl_cd + ,v_inst_cd=t.v_inst_cd + ,v_inst_kn=t.v_inst_kn + ,v_inst_name=t.v_inst_name + ,v_inst_addr=t.v_inst_addr + ,comm_cd=t.comm_cd + ,comm_name=t.comm_name + ,nonyu_qty=t.nonyu_qty + ,nonyu_price=t.nonyu_price + ,nonyu_amt=t.nonyu_amt + ,shikiri_price=t.shikiri_price + ,shikiri_amt=t.shikiri_amt + ,nhi_price=t.nhi_price + ,nhi_amt=t.nhi_amt + ,whspos_err_kbn=t.whspos_err_kbn + ,htdnymd_err_kbn=t.htdnymd_err_kbn + ,prd_exis_kbn=t.prd_exis_kbn + ,fcl_exis_kbn=t.fcl_exis_kbn + ,bef_hsdn_ymd=t.bef_hsdn_ymd + ,bef_slip_num=t.bef_slip_num + ,slip_org_kbn=t.slip_org_kbn + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,err_flg11=t.err_flg11 + ,err_flg12=t.err_flg12 + ,err_flg13=t.err_flg13 + ,err_flg14=t.err_flg14 + ,err_flg15=t.err_flg15 + ,err_flg16=t.err_flg16 + ,err_flg17=t.err_flg17 + ,err_flg18=t.err_flg18 + ,err_flg19=t.err_flg19 + ,err_flg20=t.err_flg20 + ,kjyo_ym=t.kjyo_ym + ,tksnbk_kbn=t.tksnbk_kbn + ,fcl_exec_kbn=t.fcl_exec_kbn + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,ins_usr=t.ins_usr + ,dwh_upd_dT=SYSDATE() + ; + """) + }, + + # V卸ホールディングスマスタ + CONDKEY_HLD_MST: { + _KEY_DATA_NAME: "V卸ホールディングスマスタ", + _KEY_FILE_PREFIX: "hld_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "10", + _KEY_ORG_TABLE: "org05.hld_mst_v", + _KEY_SRC_TABLE: "src05.hld_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.hld_mst_v ( + v_hld_cd + ,sub_num + ,name + ,kn_name + ,abb_name + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.v_hld_cd + ,t.sub_num + ,t.name + ,t.kn_name + ,t.abb_name + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.hld_mst_v AS t + ON DUPLICATE KEY UPDATE + v_hld_cd=t.v_hld_cd + ,sub_num=t.sub_num + ,name=t.name + ,kn_name=t.kn_name + ,abb_name=t.abb_name + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V卸マスタ + CONDKEY_WHS_MST: { + _KEY_DATA_NAME: "V卸マスタ", + _KEY_FILE_PREFIX: "whs_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "15", + _KEY_ORG_TABLE: "org05.whs_mst_v", + _KEY_SRC_TABLE: "src05.whs_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whs_mst_v ( + v_whs_cd + ,sub_num + ,name + ,kn_name + ,abb_name + ,postal_cd + ,addr + ,kn_addr + ,tel_num + ,v_hld_cd + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.v_whs_cd + ,t.sub_num + ,t.name + ,t.kn_name + ,t.abb_name + ,t.postal_cd + ,t.addr + ,t.kn_addr + ,t.tel_num + ,t.v_hld_cd + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.whs_mst_v AS t + ON DUPLICATE KEY UPDATE + v_whs_cd=t.v_whs_cd + ,sub_num=t.sub_num + ,name=t.name + ,kn_name=t.kn_name + ,abb_name=t.abb_name + ,postal_cd=t.postal_cd + ,addr=t.addr + ,kn_addr=t.kn_addr + ,tel_num=t.tel_num + ,v_hld_cd=t.v_hld_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # Vメーカー卸組織展開表 + CONDKEY_MKR_ORG_HORIZON: { + _KEY_DATA_NAME: "Vメーカー卸組織展開表", + _KEY_FILE_PREFIX: "mkr_org_horizon_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "45", + _KEY_ORG_TABLE: "org05.mkr_org_horizon_v", + _KEY_SRC_TABLE: "src05.mkr_org_horizon_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.mkr_org_horizon_v ( + vid_kind_1 + ,v_cd_1 + ,name_1 + ,dsp_odr_1 + ,vid_kind_2 + ,v_cd_2 + ,name_2 + ,dsp_odr_2 + ,vid_kind_3 + ,v_cd_3 + ,name_3 + ,dsp_odr_3 + ,vid_kind_4 + ,v_cd_4 + ,name_4 + ,dsp_odr_4 + ,vid_kind_5 + ,v_cd_5 + ,name_5 + ,dsp_odr_5 + ,vid_kind_6 + ,v_cd_6 + ,name_6 + ,dsp_odr_6 + ,vid_kind_7 + ,v_cd_7 + ,name_7 + ,dsp_odr_7 + ,vid_kind_8 + ,v_cd_8 + ,name_8 + ,dsp_odr_8 + ,vid_kind_9 + ,v_cd_9 + ,name_9 + ,dsp_odr_9 + ,vid_kind_10 + ,v_cd_10 + ,name_10 + ,dsp_odr_10 + ,v_whs_cd + ,start_date + ,end_date + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.vid_kind_1 + ,t.v_cd_1 + ,t.name_1 + ,t.dsp_odr_1 + ,t.vid_kind_2 + ,t.v_cd_2 + ,t.name_2 + ,t.dsp_odr_2 + ,t.vid_kind_3 + ,t.v_cd_3 + ,t.name_3 + ,t.dsp_odr_3 + ,t.vid_kind_4 + ,t.v_cd_4 + ,t.name_4 + ,t.dsp_odr_4 + ,t.vid_kind_5 + ,t.v_cd_5 + ,t.name_5 + ,t.dsp_odr_5 + ,t.vid_kind_6 + ,t.v_cd_6 + ,t.name_6 + ,t.dsp_odr_6 + ,t.vid_kind_7 + ,t.v_cd_7 + ,t.name_7 + ,t.dsp_odr_7 + ,t.vid_kind_8 + ,t.v_cd_8 + ,t.name_8 + ,t.dsp_odr_8 + ,t.vid_kind_9 + ,t.v_cd_9 + ,t.name_9 + ,t.dsp_odr_9 + ,t.vid_kind_10 + ,t.v_cd_10 + ,t.name_10 + ,t.dsp_odr_10 + ,t.v_whs_cd + ,t.start_date + ,t.end_date + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.mkr_org_horizon_v AS t + ON DUPLICATE KEY UPDATE + vid_kind_1=t.vid_kind_1 + ,v_cd_1=t.v_cd_1 + ,name_1=t.name_1 + ,dsp_odr_1=t.dsp_odr_1 + ,vid_kind_2=t.vid_kind_2 + ,v_cd_2=t.v_cd_2 + ,name_2=t.name_2 + ,dsp_odr_2=t.dsp_odr_2 + ,vid_kind_3=t.vid_kind_3 + ,v_cd_3=t.v_cd_3 + ,name_3=t.name_3 + ,dsp_odr_3=t.dsp_odr_3 + ,vid_kind_4=t.vid_kind_4 + ,v_cd_4=t.v_cd_4 + ,name_4=t.name_4 + ,dsp_odr_4=t.dsp_odr_4 + ,vid_kind_5=t.vid_kind_5 + ,v_cd_5=t.v_cd_5 + ,name_5=t.name_5 + ,dsp_odr_5=t.dsp_odr_5 + ,vid_kind_6=t.vid_kind_6 + ,v_cd_6=t.v_cd_6 + ,name_6=t.name_6 + ,dsp_odr_6=t.dsp_odr_6 + ,vid_kind_7=t.vid_kind_7 + ,v_cd_7=t.v_cd_7 + ,name_7=t.name_7 + ,dsp_odr_7=t.dsp_odr_7 + ,vid_kind_8=t.vid_kind_8 + ,v_cd_8=t.v_cd_8 + ,name_8=t.name_8 + ,dsp_odr_8=t.dsp_odr_8 + ,vid_kind_9=t.vid_kind_9 + ,v_cd_9=t.v_cd_9 + ,name_9=t.name_9 + ,dsp_odr_9=t.dsp_odr_9 + ,vid_kind_10=t.vid_kind_10 + ,v_cd_10=t.v_cd_10 + ,name_10=t.name_10 + ,dsp_odr_10=t.dsp_odr_10 + ,v_whs_cd=t.v_whs_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V卸組織変換マスタ + CONDKEY_ORG_CNV_MST: { + _KEY_DATA_NAME: "V卸組織変換マスタ", + _KEY_FILE_PREFIX: "org_cnv_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "10", + _KEY_ORG_TABLE: "org05.org_cnv_mst_v", + _KEY_SRC_TABLE: "src05.org_cnv_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.org_cnv_mst_v ( + whs_cd + ,whs_sub_cd + ,org_cd + ,sub_num + ,v_org_cd + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.whs_cd + ,t.whs_sub_cd + ,t.org_cd + ,t.sub_num + ,t.v_org_cd + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.org_cnv_mst_v AS t + ON DUPLICATE KEY UPDATE + whs_cd=t.whs_cd + ,whs_sub_cd=t.whs_sub_cd + ,org_cd=t.org_cd + ,sub_num=t.sub_num + ,v_org_cd=t.v_org_cd + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V取引区分マスタ + CONDKEY_TRAN_KBN_MST: { + _KEY_DATA_NAME: "V取引区分マスタ", + _KEY_FILE_PREFIX: "tran_kbn_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "8", + _KEY_ORG_TABLE: "org05.tran_kbn_mst_v", + _KEY_SRC_TABLE: "src05.tran_kbn_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.tran_kbn_mst_v ( + v_tran_cd + ,sub_num + ,name + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.v_tran_cd + ,t.sub_num + ,t.name + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.tran_kbn_mst_v AS t + ON DUPLICATE KEY UPDATE + v_tran_cd=t.v_tran_cd + ,sub_num=t.sub_num + ,name=t.name + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V施設マスタ + CONDKEY_FCL_MST: { + _KEY_DATA_NAME: "V施設マスタ", + _KEY_FILE_PREFIX: "fcl_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "23", + _KEY_ORG_TABLE: "org05.fcl_mst_v", + _KEY_SRC_TABLE: "src05.fcl_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.fcl_mst_v ( + v_inst_cd + ,sub_num + ,start_date + ,end_date + ,closed_dt + ,fcl_name + ,fcl_kn_name + ,fcl_abb_name + ,fcl_abb_kn_name + ,mkr_cd + ,jsk_proc_kbn + ,fmt_addr + ,fmt_kn_addr + ,postal_cd + ,prft_cd + ,prft_name + ,city_name + ,addr_line_1 + ,tel_num + ,admin_kbn + ,fcl_type + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.v_inst_cd + ,t.sub_num + ,t.start_date + ,t.end_date + ,t.closed_dt + ,t.fcl_name + ,t.fcl_kn_name + ,t.fcl_abb_name + ,t.fcl_abb_kn_name + ,t.mkr_cd + ,t.jsk_proc_kbn + ,t.fmt_addr + ,t.fmt_kn_addr + ,t.postal_cd + ,t.prft_cd + ,t.prft_name + ,t.city_name + ,t.addr_line_1 + ,t.tel_num + ,t.admin_kbn + ,t.fcl_type + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.fcl_mst_v AS t + ON DUPLICATE KEY UPDATE + v_inst_cd=t.v_inst_cd + ,sub_num=t.sub_num + ,start_date=t.start_date + ,end_date=t.end_date + ,closed_dt=t.closed_dt + ,fcl_name=t.fcl_name + ,fcl_kn_name=t.fcl_kn_name + ,fcl_abb_name=t.fcl_abb_name + ,fcl_abb_kn_name=t.fcl_abb_kn_name + ,mkr_cd=t.mkr_cd + ,jsk_proc_kbn=t.jsk_proc_kbn + ,fmt_addr=t.fmt_addr + ,fmt_kn_addr=t.fmt_kn_addr + ,postal_cd=t.postal_cd + ,prft_cd=t.prft_cd + ,prft_name=t.prft_name + ,city_name=t.city_name + ,addr_line_1=t.addr_line_1 + ,tel_num=t.tel_num + ,admin_kbn=t.admin_kbn + ,fcl_type=t.fcl_type + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V製品マスタ + CONDKEY_PHM_PRD_MST: { + _KEY_DATA_NAME: "V製品マスタ", + _KEY_FILE_PREFIX: "phm_prd_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "27", + _KEY_ORG_TABLE: "org05.phm_prd_mst_v", + _KEY_SRC_TABLE: "src05.phm_prd_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.phm_prd_mst_v ( + prd_cd + ,sub_num + ,prd_name + ,prd_e_name + ,mkr_cd + ,mkr_inf_1 + ,mkr_inf_2 + ,phm_itm_cd + ,itm_name + ,itm_abb_name + ,form_cd + ,form_name + ,vol_cd + ,vol_name + ,cont_cd + ,cont_name + ,pkg_cd + ,pkg_name + ,cnv_num + ,jsk_start_dt + ,prd_sale_kbn + ,jsk_proc_kbn + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.prd_cd + ,t.sub_num + ,t.prd_name + ,t.prd_e_name + ,t.mkr_cd + ,t.mkr_inf_1 + ,t.mkr_inf_2 + ,t.phm_itm_cd + ,t.itm_name + ,t.itm_abb_name + ,t.form_cd + ,t.form_name + ,t.vol_cd + ,t.vol_name + ,t.cont_cd + ,t.cont_name + ,t.pkg_cd + ,t.pkg_name + ,t.cnv_num + ,nullif(t.jsk_start_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.prd_sale_kbn + ,t.jsk_proc_kbn + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.phm_prd_mst_v AS t + ON DUPLICATE KEY UPDATE + prd_cd=t.prd_cd + ,sub_num=t.sub_num + ,prd_name=t.prd_name + ,prd_e_name=t.prd_e_name + ,mkr_cd=t.mkr_cd + ,mkr_inf_1=t.mkr_inf_1 + ,mkr_inf_2=t.mkr_inf_2 + ,phm_itm_cd=t.phm_itm_cd + ,itm_name=t.itm_name + ,itm_abb_name=t.itm_abb_name + ,form_cd=t.form_cd + ,form_name=t.form_name + ,vol_cd=t.vol_cd + ,vol_name=t.vol_name + ,cont_cd=t.cont_cd + ,cont_name=t.cont_name + ,pkg_cd=t.pkg_cd + ,pkg_name=t.pkg_name + ,cnv_num=t.cnv_num + ,jsk_start_dt=nullif(t.jsk_start_dt, 0) + ,prd_sale_kbn=t.prd_sale_kbn + ,jsk_proc_kbn=t.jsk_proc_kbn + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V製品価格マスタ + CONDKEY_PHM_PRICE_MST: { + _KEY_DATA_NAME: "V製品価格マスタ", + _KEY_FILE_PREFIX: "phm_price_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "9", + _KEY_ORG_TABLE: "org05.phm_price_mst_v", + _KEY_SRC_TABLE: "src05.phm_price_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.phm_price_mst_v ( + phm_prd_cd + ,phm_price_kind + ,sub_num + ,price + ,start_date + ,end_date + ,dsp_odr + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.phm_prd_cd + ,t.phm_price_kind + ,t.sub_num + ,t.price + ,t.start_date + ,t.end_date + ,t.dsp_odr + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.phm_price_mst_v AS t + ON DUPLICATE KEY UPDATE + phm_prd_cd=t.phm_prd_cd + ,phm_price_kind=t.phm_price_kind + ,sub_num=t.sub_num + ,price=t.price + ,start_date=t.start_date + ,end_date=t.end_date + ,dsp_odr=t.dsp_odr + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V施設統合マスタ + CONDKEY_VOP_HCO_MERGE: { + _KEY_DATA_NAME: "V施設統合マスタ", + _KEY_FILE_PREFIX: "vop_hco_merge_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "3", + _KEY_ORG_TABLE: "org05.vop_hco_merge_v", + _KEY_SRC_TABLE: "src05.vop_hco_merge_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.vop_hco_merge_v ( + v_inst_cd + ,v_inst_cd_merg + ,apply_dt + ,merge_reason + ,dwh_upd_dt + ) + SELECT + t.v_inst_cd + ,t.v_inst_cd_merg + ,t.apply_dt + ,t.merge_reason + ,SYSDATE() + FROM org05.vop_hco_merge_v AS t + ON DUPLICATE KEY UPDATE + v_inst_cd=t.v_inst_cd + ,v_inst_cd_merg=t.v_inst_cd_merg + ,apply_dt=t.apply_dt + ,merge_reason=t.merge_reason + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # V卸得意先情報マスタ + CONDKEY_WHS_CUSTOMER_MST: { + _KEY_DATA_NAME: "V卸得意先情報マスタ", + _KEY_FILE_PREFIX: "whs_customer_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "16", + _KEY_ORG_TABLE: "org05.whs_customer_mst_v", + _KEY_SRC_TABLE: "src05.whs_customer_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whs_customer_mst_v ( + whs_cd + ,whs_sub_cd + ,customer_cd + ,sub_num + ,start_date + ,end_date + ,whs_org_cd + ,src_org_cd + ,name + ,kn_name + ,addr + ,kn_addr + ,postal_cd + ,tel_num + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.whs_cd + ,t.whs_sub_cd + ,t.customer_cd + ,t.sub_num + ,t.start_date + ,t.end_date + ,t.whs_org_cd + ,t.src_org_cd + ,t.name + ,t.kn_name + ,t.addr + ,t.kn_addr + ,t.postal_cd + ,t.tel_num + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.whs_customer_mst_v AS t + ON DUPLICATE KEY UPDATE + whs_cd=t.whs_cd + ,whs_sub_cd=t.whs_sub_cd + ,customer_cd=t.customer_cd + ,sub_num=t.sub_num + ,start_date=t.start_date + ,end_date=t.end_date + ,whs_org_cd=t.whs_org_cd + ,src_org_cd=t.src_org_cd + ,name=t.name + ,kn_name=t.kn_name + ,addr=t.addr + ,kn_addr=t.kn_addr + ,postal_cd=t.postal_cd + ,tel_num=t.tel_num + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # MDBコード変換表 + CONDKEY_MDB_CONV_MST: { + _KEY_DATA_NAME: "MDBコード変換表", + _KEY_FILE_PREFIX: "mdb_conv_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "7", + _KEY_ORG_TABLE: "org05.mdb_cnv_mst_v", + _KEY_SRC_TABLE: "src05.mdb_cnv_mst_v", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.mdb_cnv_mst_v ( + hco_vid_v + ,sub_num + ,mdb_cd + ,reliability + ,start_date + ,rec_sts_kbn + ,ins_dt + ,upd_dt + ,dwh_upd_dt + ) + SELECT + t.hco_vid_v + ,t.sub_num + ,t.mdb_cd + ,t.reliability + ,t.start_date + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.upd_dt + ,SYSDATE() + FROM org05.mdb_cnv_mst_v AS t + ON DUPLICATE KEY UPDATE + hco_vid_v=t.hco_vid_v + ,sub_num=t.sub_num + ,mdb_cd=t.mdb_cd + ,reliability=t.reliability + ,start_date=t.start_date + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,upd_dt=t.upd_dt + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # 卸在庫データ + CONDKEY_STOCK_SLIP_DATA: { + _KEY_DATA_NAME: "卸在庫データ", + _KEY_FILE_PREFIX: "stock_slip_data_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "28", + _KEY_ORG_TABLE: "org05.whole_stock", + _KEY_SRC_TABLE: "src05.whole_stock", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.whole_stock ( + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_sto_place + ,rec_stock_ymd + ,rec_comm_cd + ,rec_qty + ,rec_stock_no_sign + ,rec_jan_cd + ,free_item + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,exec_dt + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt + ) + SELECT + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_sto_place + ,t.rec_stock_ymd + ,t.rec_comm_cd + ,t.rec_qty + ,t.rec_stock_no_sign + ,t.rec_jan_cd + ,t.free_item + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.exec_dt + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.ins_usr + ,SYSDATE() + FROM org05.whole_stock AS t + ON DUPLICATE KEY UPDATE + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_sto_place=t.rec_sto_place + ,rec_stock_ymd=t.rec_stock_ymd + ,rec_comm_cd=t.rec_comm_cd + ,rec_qty=t.rec_qty + ,rec_stock_no_sign=t.rec_stock_no_sign + ,rec_jan_cd=t.rec_jan_cd + ,free_item=t.free_item + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,exec_dt=t.exec_dt + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # 生物由来データ + CONDKEY_BIO_SLIP_DATA: { + _KEY_DATA_NAME: "生物由来データ", + _KEY_FILE_PREFIX: "bio_slip_data_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "77", + _KEY_ORG_TABLE: "org05.bio_sales", + _KEY_SRC_TABLE: "src05.bio_sales", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.bio_sales ( + rec_data + ,rec_whs_cd + ,rec_whs_sub_cd + ,rec_whs_org_cd + ,rec_cust_cd + ,rec_comm_cd + ,rec_tran_kbn + ,rev_hsdnymd_wrk + ,rev_hsdnymd_srk + ,rec_urag_num + ,rec_comm_name + ,rec_nonyu_fcl_name + ,rec_nonyu_fcl_addr + ,rec_lot_num1 + ,rec_qty1 + ,rec_lot_num2 + ,rec_qty2 + ,rec_lot_num3 + ,rec_qty3 + ,rec_ymd + ,sale_data_cat + ,slip_file_name + ,slip_mgt_num + ,row_num + ,hsdn_ymd + ,exec_dt + ,v_tran_cd + ,tran_kbn_name + ,whs_org_cd + ,v_whsorg_cd + ,whs_org_name + ,whs_org_kn + ,v_whs_cd + ,whs_name + ,nonyu_fcl_cd + ,v_inst_cd + ,v_inst_name + ,v_inst_kn + ,v_inst_addr + ,comm_cd + ,product_name + ,htdnymd_err_kbn + ,prd_exis_kbn + ,fcl_exis_kbn + ,qty1 + ,qty2 + ,qty3 + ,slip_org_kbn + ,bef_slip_mgt_num + ,whs_rep_comm_name + ,whs_rep_nonyu_fcl_name + ,whs_rep_nonyu_fcl_addr + ,err_flg1 + ,err_flg2 + ,err_flg3 + ,err_flg4 + ,err_flg5 + ,err_flg6 + ,err_flg7 + ,err_flg8 + ,err_flg9 + ,err_flg10 + ,err_flg11 + ,err_flg12 + ,err_flg13 + ,err_flg14 + ,err_flg15 + ,err_flg16 + ,err_flg17 + ,err_flg18 + ,err_flg19 + ,err_flg20 + ,kjyo_ym + ,tksnbk_kbn + ,fcl_exec_kbn + ,rec_sts_kbn + ,ins_dt + ,ins_usr + ,dwh_upd_dt + ) + SELECT + t.rec_data + ,t.rec_whs_cd + ,t.rec_whs_sub_cd + ,t.rec_whs_org_cd + ,t.rec_cust_cd + ,t.rec_comm_cd + ,t.rec_tran_kbn + ,t.rev_hsdnymd_wrk + ,t.rev_hsdnymd_srk + ,t.rec_urag_num + ,t.rec_comm_name + ,t.rec_nonyu_fcl_name + ,t.rec_nonyu_fcl_addr + ,t.rec_lot_num1 + ,t.rec_qty1 + ,t.rec_lot_num2 + ,t.rec_qty2 + ,t.rec_lot_num3 + ,t.rec_qty3 + ,t.rec_ymd + ,t.sale_data_cat + ,t.slip_file_name + ,t.slip_mgt_num + ,t.row_num + ,t.hsdn_ymd + ,t.exec_dt + ,t.v_tran_cd + ,t.tran_kbn_name + ,t.whs_org_cd + ,t.v_whsorg_cd + ,t.whs_org_name + ,t.whs_org_kn + ,t.v_whs_cd + ,t.whs_name + ,t.nonyu_fcl_cd + ,t.v_inst_cd + ,t.v_inst_name + ,t.v_inst_kn + ,t.v_inst_addr + ,t.comm_cd + ,t.product_name + ,t.htdnymd_err_kbn + ,t.prd_exis_kbn + ,t.fcl_exis_kbn + ,t.qty1 + ,t.qty2 + ,t.qty3 + ,t.slip_org_kbn + ,t.bef_slip_mgt_num + ,t.whs_rep_comm_name + ,t.whs_rep_nonyu_fcl_name + ,t.whs_rep_nonyu_fcl_addr + ,t.err_flg1 + ,t.err_flg2 + ,t.err_flg3 + ,t.err_flg4 + ,t.err_flg5 + ,t.err_flg6 + ,t.err_flg7 + ,t.err_flg8 + ,t.err_flg9 + ,t.err_flg10 + ,t.err_flg11 + ,t.err_flg12 + ,t.err_flg13 + ,t.err_flg14 + ,t.err_flg15 + ,t.err_flg16 + ,t.err_flg17 + ,t.err_flg18 + ,t.err_flg19 + ,t.err_flg20 + ,t.kjyo_ym + ,t.tksnbk_kbn + ,t.fcl_exec_kbn + ,t.rec_sts_kbn + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.ins_usr + ,SYSDATE() + FROM org05.bio_sales AS t + ON DUPLICATE KEY UPDATE + rec_data=t.rec_data + ,rec_whs_cd=t.rec_whs_cd + ,rec_whs_sub_cd=t.rec_whs_sub_cd + ,rec_whs_org_cd=t.rec_whs_org_cd + ,rec_cust_cd=t.rec_cust_cd + ,rec_comm_cd=t.rec_comm_cd + ,rec_tran_kbn=t.rec_tran_kbn + ,rev_hsdnymd_wrk=t.rev_hsdnymd_wrk + ,rev_hsdnymd_srk=t.rev_hsdnymd_srk + ,rec_urag_num=t.rec_urag_num + ,rec_comm_name=t.rec_comm_name + ,rec_nonyu_fcl_name=t.rec_nonyu_fcl_name + ,rec_nonyu_fcl_addr=t.rec_nonyu_fcl_addr + ,rec_lot_num1=t.rec_lot_num1 + ,rec_qty1=t.rec_qty1 + ,rec_lot_num2=t.rec_lot_num2 + ,rec_qty2=t.rec_qty2 + ,rec_lot_num3=t.rec_lot_num3 + ,rec_qty3=t.rec_qty3 + ,rec_ymd=t.rec_ymd + ,sale_data_cat=t.sale_data_cat + ,slip_file_name=t.slip_file_name + ,slip_mgt_num=t.slip_mgt_num + ,row_num=t.row_num + ,hsdn_ymd=t.hsdn_ymd + ,exec_dt=t.exec_dt + ,v_tran_cd=t.v_tran_cd + ,tran_kbn_name=t.tran_kbn_name + ,whs_org_cd=t.whs_org_cd + ,v_whsorg_cd=t.v_whsorg_cd + ,whs_org_name=t.whs_org_name + ,whs_org_kn=t.whs_org_kn + ,v_whs_cd=t.v_whs_cd + ,whs_name=t.whs_name + ,nonyu_fcl_cd=t.nonyu_fcl_cd + ,v_inst_cd=t.v_inst_cd + ,v_inst_name=t.v_inst_name + ,v_inst_kn=t.v_inst_kn + ,v_inst_addr=t.v_inst_addr + ,comm_cd=t.comm_cd + ,product_name=t.product_name + ,htdnymd_err_kbn=t.htdnymd_err_kbn + ,prd_exis_kbn=t.prd_exis_kbn + ,fcl_exis_kbn=t.fcl_exis_kbn + ,qty1=t.qty1 + ,qty2=t.qty2 + ,qty3=t.qty3 + ,slip_org_kbn=t.slip_org_kbn + ,bef_slip_mgt_num=t.bef_slip_mgt_num + ,whs_rep_comm_name=t.whs_rep_comm_name + ,whs_rep_nonyu_fcl_name=t.whs_rep_nonyu_fcl_name + ,whs_rep_nonyu_fcl_addr=t.whs_rep_nonyu_fcl_addr + ,err_flg1=t.err_flg1 + ,err_flg2=t.err_flg2 + ,err_flg3=t.err_flg3 + ,err_flg4=t.err_flg4 + ,err_flg5=t.err_flg5 + ,err_flg6=t.err_flg6 + ,err_flg7=t.err_flg7 + ,err_flg8=t.err_flg8 + ,err_flg9=t.err_flg9 + ,err_flg10=t.err_flg10 + ,err_flg11=t.err_flg11 + ,err_flg12=t.err_flg12 + ,err_flg13=t.err_flg13 + ,err_flg14=t.err_flg14 + ,err_flg15=t.err_flg15 + ,err_flg16=t.err_flg16 + ,err_flg17=t.err_flg17 + ,err_flg18=t.err_flg18 + ,err_flg19=t.err_flg19 + ,err_flg20=t.err_flg20 + ,kjyo_ym=t.kjyo_ym + ,tksnbk_kbn=t.tksnbk_kbn + ,fcl_exec_kbn=t.fcl_exec_kbn + ,rec_sts_kbn=t.rec_sts_kbn + ,ins_dt=nullif(t.ins_dt, 0) + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + + # ロットマスタデータ + CONDKEY_LOT_NUM_MST: { + _KEY_DATA_NAME: "ロットマスタデータ", + _KEY_FILE_PREFIX: "lot_num_mst_", + _KEY_FILE_SUFFIX: ".gz", + _KEY_FILE_COLUMN_SEPARATORS: "5", + _KEY_ORG_TABLE: "org05.lot_num_mst", + _KEY_SRC_TABLE: "src05.lot_num_mst", + _KEY_UPSERT_SQL: textwrap.dedent("""\ + INSERT INTO src05.lot_num_mst ( + ser_num + ,lot_num + ,expr_dt + ,frst_mov_dt + ,ins_dt + ,ins_usr + ,dwh_upd_dt + ) + SELECT + t.ser_num + ,t.lot_num + ,t.expr_dt + ,t.frst_mov_dt + ,nullif(t.ins_dt, 0) -- 受領データがブランクだった場合にゼロ日付で取得されるので明示的にNULL値に変換する + ,t.ins_usr + ,SYSDATE() + FROM org05.lot_num_mst AS t + ON DUPLICATE KEY UPDATE + ser_num=t.ser_num + ,lot_num=t.lot_num + ,expr_dt=t.expr_dt + ,frst_mov_dt=t.frst_mov_dt + ,ins_dt=nullif(t.ins_dt, 0) + ,ins_usr=t.ins_usr + ,dwh_upd_dt=SYSDATE() + ; + """) + }, + } + + def _get_interface_property(self, condkey: str, property_name: str) -> str: + ret = None + if condkey in self._VJSK_INTERFACE_MAPPING: + ret = self._VJSK_INTERFACE_MAPPING.get(condkey).get(property_name) + return ret + + def get_data_name(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_DATA_NAME) + + def get_file_prefix(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_FILE_PREFIX) + + def get_file_suffix(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_FILE_SUFFIX) + + def get_file_column_separators(self, condkey: str) -> int: + return int(self._get_interface_property(condkey, self._KEY_FILE_COLUMN_SEPARATORS)) + + def get_org_table(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_ORG_TABLE) + + def get_src_table(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_SRC_TABLE) + + def get_upsert_sql(self, condkey: str) -> str: + return self._get_interface_property(condkey, self._KEY_UPSERT_SQL) + + def get_condkey_by_s3_file_path(self, s3_file_path: str) -> str: + ret = None + filename = s3_file_path[s3_file_path.rfind("/") + 1:] + for condkey in self._VJSK_INTERFACE_MAPPING: + element = self._VJSK_INTERFACE_MAPPING.get(condkey) + if filename.startswith(element.get(self._KEY_FILE_PREFIX)) \ + and filename.endswith(element.get(self._KEY_FILE_SUFFIX)): + ret = condkey + break + return ret diff --git a/ecs/jskult-batch-daily/src/db/database.py b/ecs/jskult-batch-daily/src/db/database.py index f67a21b9..03c9c068 100644 --- a/ecs/jskult-batch-daily/src/db/database.py +++ b/ecs/jskult-batch-daily/src/db/database.py @@ -13,15 +13,17 @@ logger = get_logger(__name__) class Database: """データベース操作クラス""" __connection: Connection = None - __engine: Engine = None + __transactional_engine: Engine = None + __autocommit_engine: Engine = None __host: str = None __port: str = None __username: str = None __password: str = None __schema: str = None + __autocommit: bool = None __connection_string: str = None - def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None: + def __init__(self, username: str, password: str, host: str, port: int, schema: str, autocommit: bool = False) -> None: """このクラスの新たなインスタンスを初期化します Args: @@ -30,12 +32,14 @@ class Database: host (str): DBホスト名 port (int): DBポート schema (str): DBスキーマ名 + autocommit(bool): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. """ self.__username = username self.__password = password self.__host = host self.__port = int(port) self.__schema = schema + self.__autocommit = autocommit self.__connection_string = URL.create( drivername='mysql+pymysql', @@ -44,19 +48,23 @@ class Database: host=self.__host, port=self.__port, database=self.__schema, - query={"charset": "utf8mb4"} + query={"charset": "utf8mb4", "local_infile": "1"}, ) - self.__engine = create_engine( + self.__transactional_engine = create_engine( self.__connection_string, pool_timeout=5, poolclass=QueuePool ) + self.__autocommit_engine = self.__transactional_engine.execution_options(isolation_level='AUTOCOMMIT') + @classmethod - def get_instance(cls): + def get_instance(cls, autocommit=False): """インスタンスを取得します + Args: + autocommit (bool, optional): 自動コミットモードで接続するかどうか(Trueの場合、トランザクションの有無に限らず即座にコミットされる). Defaults to False. Returns: Database: DB操作クラスインスタンス """ @@ -65,7 +73,8 @@ class Database: password=environment.DB_PASSWORD, host=environment.DB_HOST, port=environment.DB_PORT, - schema=environment.DB_SCHEMA + schema=environment.DB_SCHEMA, + autocommit=autocommit ) @retry( @@ -77,12 +86,15 @@ class Database: stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT)) def connect(self): """ - DBに接続します。接続に失敗した場合、リトライします。 + DBに接続します。接続に失敗した場合、リトライします。\n + インスタンスのautocommitがTrueの場合、自動コミットモードで接続する。(明示的なトランザクションも無視される) Raises: DBException: 接続失敗 """ try: - self.__connection = self.__engine.connect() + self.__connection = ( + self.__autocommit_engine.connect() if self.__autocommit is True + else self.__transactional_engine.connect()) except Exception as e: raise DBException(e) diff --git a/ecs/jskult-batch-daily/src/jobctrl_daily.py b/ecs/jskult-batch-daily/src/jobctrl_daily.py index a98c0d16..3bd09a8d 100644 --- a/ecs/jskult-batch-daily/src/jobctrl_daily.py +++ b/ecs/jskult-batch-daily/src/jobctrl_daily.py @@ -10,9 +10,11 @@ from src.batch.common.calendar_file import CalendarFile from src.batch.dcf_inst_merge import create_dcf_inst_merge from src.batch.laundering import mst_inst_laundering from src.batch.ultmarc import ultmarc_process +from src.batch.vjsk import vjsk_importer from src.error.exceptions import BatchOperationException from src.logging.get_logger import get_logger from src.system_var import constants +from src.batch.ultmarc import output_vjsk_inst_pharm_data logger = get_logger('日次処理コントロール') @@ -75,16 +77,17 @@ def exec(): logger.debug(f'{"アルトマーク取込が行われました。" if batch_context.is_ultmarc_imported else "アルトマーク取込が行われませんでした。"}') try: - logger.info('V実消化用施設・薬局薬店データ作成処理:起動') - ultmarc_process.exec_export() - logger.info('V実消化用施設・薬局薬店データ作成処理:終了') + logger.info('V実消化用施設データ作成処理:起動') + output_vjsk_inst_pharm_data.exec() + logger.info('V実消化用施設データ作成処理:終了') except BatchOperationException as e: - logger.exception(f'V実消化用施設・薬局薬店データ作成処理エラー(異常終了){e}') + logger.exception(f'V実消化用施設データ作成処理エラー(異常終了){e}') return constants.BATCH_EXIT_CODE_SUCCESS logger.info('日次処理(V実消化)') try: logger.info('V実消化取込:起動') + vjsk_importer.exec() logger.info('V実消化取込:終了') except BatchOperationException as e: logger.exception(f'V実消化取込処理エラー(異常終了){e}') diff --git a/ecs/jskult-batch-daily/src/system_var/environment.py b/ecs/jskult-batch-daily/src/system_var/environment.py index b1730224..42ed6073 100644 --- a/ecs/jskult-batch-daily/src/system_var/environment.py +++ b/ecs/jskult-batch-daily/src/system_var/environment.py @@ -12,9 +12,14 @@ ULTMARC_DATA_BUCKET = os.environ['ULTMARC_DATA_BUCKET'] ULTMARC_DATA_FOLDER = os.environ['ULTMARC_DATA_FOLDER'] JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] ULTMARC_BACKUP_FOLDER = os.environ['ULTMARC_BACKUP_FOLDER'] +VJSK_BACKUP_FOLDER = os.environ['VJSK_BACKUP_FOLDER'] JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_HOLIDAY_LIST_FILE_NAME'] +VJSK_DATA_SEND_FOLDER = os.environ['VJSK_DATA_SEND_FOLDER'] +VJSK_DATA_BUCKET = os.environ['VJSK_DATA_BUCKET'] +JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_WHOLESALER_STOCK_FILE_NAME'] +VJSK_DATA_RECEIVE_FOLDER = os.environ['VJSK_DATA_RECEIVE_FOLDER'] # 初期値がある環境変数 LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') @@ -22,3 +27,10 @@ DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_AT DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) + +# 連携データ抽出期間 +SALES_LAUNDERING_EXTRACT_DATE_PERIOD = int(os.environ['SALES_LAUNDERING_EXTRACT_DATE_PERIOD']) +# 洗替対象テーブル名 +SALES_LAUNDERING_TARGET_TABLE_NAME = os.environ['SALES_LAUNDERING_TARGET_TABLE_NAME'] +# 卸実績洗替で作成するデータの期間(年単位) +SALES_LAUNDERING_TARGET_YEAR_OFFSET = os.environ['SALES_LAUNDERING_TARGET_YEAR_OFFSET'] diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py new file mode 100644 index 00000000..dccdd0df --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/conftest.py @@ -0,0 +1,1377 @@ +"""vjsk_file_check用テストフィクスチャoverride""" + + +import os + +import boto3 +import pytest + + +@pytest.fixture +def s3_client(): + conn = boto3.client('s3') + yield conn + + +@pytest.fixture +def bucket_name(): + return os.environ["VJSK_DATA_BUCKET"] + + +@pytest.fixture +def receive_folder(): + return os.environ["VJSK_DATA_RECEIVE_FOLDER"] + + +# TODO 共通fixtureにして15個固定でput/delete、各個別fixtureで15個から引き算でdeleteする +@pytest.fixture +def init_check_received_files_ok1(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ok2(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng01(s3_client, bucket_name, receive_folder): + # setup + + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng02(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng03(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng04(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng05(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng06(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng07(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng08(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng09(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng10(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng11(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng12(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng13(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng14(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng15(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng16(s3_client, bucket_name, receive_folder): + # setup + + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz') + + +@pytest.fixture +def init_check_received_files_ng17(s3_client, bucket_name, receive_folder): + # setup + + # s3_client.put_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + s3_client.put_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + + yield + + # treadown + + # s3_client.delete_object(Bucket=bucket_name, + # Key=f'{receive_folder}/stock_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/vop_hco_merge_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/hld_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/fcl_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mkr_org_horizon_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/tran_kbn_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_prd_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/phm_price_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/whs_customer_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/mdb_conv_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/bio_slip_data_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/lot_num_mst_00000000000000.gz') + s3_client.delete_object(Bucket=bucket_name, + Key=f'{receive_folder}/dummy_00000000000000.gz') diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py new file mode 100644 index 00000000..1c523d0e --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_file_check/test_vjsk_file_check.py @@ -0,0 +1,303 @@ +import pytest + +from src.batch.common.batch_context import BatchContext +from src.batch.vjsk.vjsk_importer import _check_received_files +from src.error.exceptions import BatchOperationException + + +def test_check_received_files_ok1(init_check_received_files_ok1): + """ + 観点 + 正常系 : 卸在庫データ取込対象日 + 期待値 + 例外が発生しない + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + received_s3_files = _check_received_files() + + assert received_s3_files is not None + + # target_path = path.dirname(__file__) + # target_file = "xxxxxxxxxxxx000000000000.gz" + # s3_client.upload_file() + + +def test_check_received_files_ok2(init_check_received_files_ok2): + """ + 観点 + 正常系 : 卸在庫データ取込対象日以外 + 期待値 + 例外が発生しない + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = False + + received_s3_files = _check_received_files() + # with pytest.raises(BatchOperationException): + # received_s3_files = _check_received_files() + + assert received_s3_files is not None + + +def test_check_received_files_ng01(init_check_received_files_ng01): + """ + 観点 + 異常系 : 卸在庫データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸在庫データファイルがありません") > 0 + + +def test_check_received_files_ng02(init_check_received_files_ng02): + """ + 観点 + 異常系 : 卸販売データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸販売データファイルがありません") > 0 + + +def test_check_received_files_ng03(init_check_received_files_ng03): + """ + 観点 + 異常系 : 卸組織変換マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸組織変換マスタファイルがありません") > 0 + + +def test_check_received_files_ng04(init_check_received_files_ng04): + """ + 観点 + 異常系 : 施設統合マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("施設統合マスタファイルがありません") > 0 + + +def test_check_received_files_ng05(init_check_received_files_ng05): + """ + 観点 + 異常系 : 卸マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸マスタファイルがありません") > 0 + + +def test_check_received_files_ng06(init_check_received_files_ng06): + """ + 観点 + 異常系 : 卸ホールディングスマスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸ホールディングスマスタファイルがありません") > 0 + + +def test_check_received_files_ng07(init_check_received_files_ng07): + """異常系 : 施設マスタファイルが欠落""" + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("施設マスタファイルがありません") > 0 + + +def test_check_received_files_ng08(init_check_received_files_ng08): + """ + 観点 + 異常系 : メーカー卸組織展開表ファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("メーカー卸組織展開表ファイルがありません") > 0 + + +def test_check_received_files_ng09(init_check_received_files_ng09): + """異常系 : 取引区分マスタファイルが欠落""" + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("取引区分マスタファイルがありません") > 0 + + +def test_check_received_files_ng10(init_check_received_files_ng10): + """ + 観点 + 異常系 : 製品マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製品マスタファイルがありません") > 0 + + +def test_check_received_files_ng11(init_check_received_files_ng11): + """ + 観点 + 異常系 : 製品価格マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製品価格マスタファイルがありません") > 0 + + +def test_check_received_files_ng12(init_check_received_files_ng12): + """ + 観点 + 異常系 : 卸得意先情報マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("卸得意先情報マスタファイルがありません") > 0 + + +def test_check_received_files_ng13(init_check_received_files_ng13): + """ + 観点 + 異常系 : MDBコード変換マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("MDBコード変換マスタファイルがありません") > 0 + + +def test_check_received_files_ng14(init_check_received_files_ng14): + """ + 観点 + 異常系 : 生物由来データファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("生物由来データファイルがありません") > 0 + + +def test_check_received_files_ng15(init_check_received_files_ng15): + """ + 観点 + 異常系 : 製造ロット番号マスタファイルが欠落 + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("製造ロット番号マスタファイルがありません") > 0 + + +def test_check_received_files_ng16(init_check_received_files_ng16): + """ + 観点 + 異常系 : 想定外のファイルが受領されている(卸在庫データ取込対象日) + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = True + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("想定数を超える受領ファイルがあります") > 0 + + +def test_check_received_files_ng17(init_check_received_files_ng17): + """ + 観点 + 異常系 : 想定外のファイルが受領されている(卸在庫データ取込対象日) + 期待値 + 例外が発生する + """ + batch_context = BatchContext.get_instance() + batch_context.is_vjsk_stock_import_day = False + + with pytest.raises(BatchOperationException) as e: + _check_received_files() + + assert str(e.value).startswith("想定数を超える受領ファイルがあります") > 0 diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py new file mode 100644 index 00000000..dc77a65f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/conftest.py @@ -0,0 +1,41 @@ +"""vjsk_load用テストフィクスチャoverride""" +import os + +import boto3 +import pytest + +from src.batch.vjsk.vjsk_recv_file_mapper import VjskReceiveFileMapper + + +@pytest.fixture +def s3_client(): + conn = boto3.client('s3') + yield conn + + +@pytest.fixture +def bucket_name(): + return os.environ["VJSK_DATA_BUCKET"] + + +@pytest.fixture +def receive_folder(): + return os.environ["VJSK_DATA_RECEIVE_FOLDER"] + + +@pytest.fixture +def mapper(): + return VjskReceiveFileMapper() + +# @pytest.fixture +# def init_Load_ok(s3_client, bucket_name, receive_folder): +# # setup + +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/stock_slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/slip_data_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') +# s3_client.put_object(Bucket=bucket_name, +# Key=f'{receive_folder}/org_cnv_mst_00000000000000.gz', Body=b'aaaaaaaaaaaaaaa') + +# # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py new file mode 100644 index 00000000..2dbe2ef5 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/test_vjsk_load.py @@ -0,0 +1,1295 @@ +import time +from os import path + +import pytest + +from src.aws.s3 import VjskReceiveBucket +from src.batch.common.batch_context import BatchContext +from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager +# from src.batch.vjsk.vjsk_data_load_manager import VjskDataLoadManager +from src.batch.vjsk.vjsk_importer import (_check_received_files, + _import_file_to_db) +from src.db.database import Database +from src.error.exceptions import BatchOperationException +# from tests.testing_vjsk_utility import create_vjsk_assertion_dictionary +from tests.testing_vjsk_utility import (assert_table_results, + create_vjsk_assertion_list) + + +class TestImportFileToDb: + db: Database + batch_context: BatchContext + test_file_path_import_all: str + test_file_path_load_individual: str + test_file_path_unzip_error: str + + @pytest.fixture(autouse=True, scope='function') + def pre_test(self, database: Database): + """テスト実行前後処理""" + # setup + self.test_file_path_import_all = path.join(path.dirname(__file__), "testdata", "TestImportFileToDb") + self.test_file_path_load_individual = path.join(path.dirname(__file__), "testdata") + self.test_file_path_unzip_error = path.join(path.dirname(__file__), "testdata", "UnzipError") + + self.batch_context = BatchContext.get_instance() + + self.db = database + self.db.connect() + self.db.execute("set sql_mode = 'TRADITIONAL';") + # self.db.begin() + + # testing + yield + + # teardown + # self.db.rollback() + self.db.disconnect() + + def test_import_file_to_db_ok(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 正常系 : すべての受領データをデータベースに登録できる + 期待値 + 例外が発生しない + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_import_all, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_HLD_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_FCL_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA)}") + self.db.execute(f"truncate table {mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST)}") + + # assertion + received_s3_files = _check_received_files() + _import_file_to_db(received_s3_files) + + # 検証 + condkey = mapper.CONDKEY_STOCK_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_ORG_CNV_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_VOP_HCO_MERGE + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_WHS_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_HLD_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_FCL_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_MKR_ORG_HORIZON + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_TRAN_KBN_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_PHM_PRD_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_PHM_PRICE_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_WHS_CUSTOMER_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_MDB_CONV_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_BIO_SLIP_DATA + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + condkey = mapper.CONDKEY_LOT_NUM_MST + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_org_table(condkey)}")) + assert 10 == len(self.db.execute_select(f"select * from {mapper.get_src_table(condkey)}")) + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_load_01_stock_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_STOCK_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_STOCK_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "stock_slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_STOCK_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "stock_slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_02_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_03_org_cnv_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_ORG_CNV_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_ORG_CNV_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_ORG_CNV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "org_cnv_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_ORG_CNV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "org_cnv_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.whs_cd = o.whs_cd and s.whs_sub_cd = o.whs_sub_cd and s.org_cd = o.org_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_04_vop_hco_merge_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_VOP_HCO_MERGE) + table_name_src = mapper.get_src_table(mapper.CONDKEY_VOP_HCO_MERGE) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_VOP_HCO_MERGE, + "src_file_path": path.join(self.test_file_path_load_individual, "vop_hco_merge_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_VOP_HCO_MERGE, + "src_file_path": path.join(self.test_file_path_load_individual, "vop_hco_merge_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_inst_cd = o.v_inst_cd and s.apply_dt = o.apply_dt)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_05_whs_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_WHS_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_WHS_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_whs_cd = o.v_whs_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_06_hld_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_HLD_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_HLD_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_HLD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "hld_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_HLD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "hld_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_hld_cd = o.v_hld_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_07_fcl_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_FCL_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_FCL_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_FCL_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "fcl_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns, force_cast_to_str_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns, force_cast_to_str_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_FCL_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "fcl_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns, force_cast_to_str_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_inst_cd = o.v_inst_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + force_cast_to_str_columns = ['closed_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns, force_cast_to_str_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_08_mkr_org_horizon_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_MKR_ORG_HORIZON) + table_name_src = mapper.get_src_table(mapper.CONDKEY_MKR_ORG_HORIZON) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MKR_ORG_HORIZON, + "src_file_path": path.join(self.test_file_path_load_individual, "mkr_org_horizon_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 0row +insert 4row) ※PK項目がないテーブルなのですべてinsertになる + + assetion1_done_dt = self.db.execute_select("select SYSDATE()")[0]["SYSDATE()"] + + # assertion2でinsertされたレコードをdwh_upd_dtで判断するため、assertion1からの実行間隔を明確に空けるためにスリープを挟む + time.sleep(3) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MKR_ORG_HORIZON, + "src_file_path": path.join(self.test_file_path_load_individual, "mkr_org_horizon_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} where dwh_upd_dt > :dt_value", {"dt_value": assetion1_done_dt}) + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 0row + insert 4row = 8row) ※PK項目がないテーブルなのですべてinsertになる + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 8 + + # teardown + + def test_load_09_tran_kbn_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_TRAN_KBN_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_TRAN_KBN_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_TRAN_KBN_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "tran_kbn_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_TRAN_KBN_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "tran_kbn_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.v_tran_cd = o.v_tran_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_10_phm_prd_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_PHM_PRD_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_PHM_PRD_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_prd_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRD_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_prd_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.prd_cd = o.prd_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_11_phm_price_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_PHM_PRICE_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_PHM_PRICE_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.phm_prd_cd = o.phm_prd_cd and s.phm_price_kind = o.phm_price_kind and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_12_whs_customer_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_WHS_CUSTOMER_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_WHS_CUSTOMER_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_CUSTOMER_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_customer_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_WHS_CUSTOMER_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "whs_customer_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.whs_cd = o.whs_cd and s.whs_sub_cd = o.whs_sub_cd and s.customer_cd = o.customer_cd and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_13_mdb_conv_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_MDB_CONV_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_MDB_CONV_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MDB_CONV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "mdb_conv_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_MDB_CONV_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "mdb_conv_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.hco_vid_v = o.hco_vid_v and s.sub_num = o.sub_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_14_bio_slip_data_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_BIO_SLIP_DATA) + table_name_src = mapper.get_src_table(mapper.CONDKEY_BIO_SLIP_DATA) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_BIO_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "bio_slip_data_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_BIO_SLIP_DATA, + "src_file_path": path.join(self.test_file_path_load_individual, "bio_slip_data_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.slip_mgt_num = o.slip_mgt_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_load_15_lot_num_mst_ok(self, mapper): + table_name_org = mapper.get_org_table(mapper.CONDKEY_LOT_NUM_MST) + table_name_src = mapper.get_src_table(mapper.CONDKEY_LOT_NUM_MST) + + # setup + self.batch_context.is_vjsk_stock_import_day = True + self.db.execute(f"truncate table {table_name_src}") + + # assertion1 (insert 4row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_LOT_NUM_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "lot_num_mst_202304280000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果を取得 + result_src = self.db.execute_select(f"select * from {table_name_src}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # assertion2 (update 2row +insert 2row) + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_LOT_NUM_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "lot_num_mst_202304290000.tsv") + } + VjskDataLoadManager.load(target_dict) + + # 期待値データファイル読み込み + assert_list = create_vjsk_assertion_list(target_dict["src_file_path"]) + # orgテーブル結果を取得 + result_org = self.db.execute_select(f"select * from {table_name_org}") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # orgテーブル結果が期待値通りかを突合 + assert_table_results(result_org, assert_list, ignore_columns) + # srcテーブル結果(orgテーブル結果のPK値で一致するもの)を取得 + result_src = self.db.execute_select( + f"select * from {table_name_src} s inner join {table_name_org} o on (s.ser_num = o.ser_num and s.lot_num = o.lot_num)") + # 突合から除外する項目 + ignore_columns = ['dwh_upd_dt'] + # srcテーブル結果が期待値通りかを突合 + assert_table_results(result_src, assert_list, ignore_columns) + + # srcテーブル結果のレコード件数 (insert 4row + update 2row + insert 2row = 6row) + result_src_count = self.db.execute_select(f"select count(*) from {table_name_src} ") + assert result_src_count[0]['count(*)'] == 6 + + # teardown + + def test_unzip_to_error(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 異常系 : gzファイルが解凍できない + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + # ※.gzだが、7zipで圧縮してあるので、解凍に失敗するのが期待値 + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_unzip_error, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # assertion + received_s3_files = _check_received_files() + with pytest.raises(Exception) as e: + _import_file_to_db(received_s3_files) + + # 検証 + assert str(e.value) == "file could not be opened successfully" + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_load_data_error(self, mapper): + """ + 観点 + 異常系 : 日付型矛盾のデータ ※製品価格マスタファイルで確認 + 期待値 + 例外が発生する + """ + + # setup + self.batch_context.is_vjsk_stock_import_day = True + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_dataerror.tsv") + } + + with pytest.raises(BatchOperationException) as e: + VjskDataLoadManager.load(target_dict) + + # 検証 + assert str(e.value).startswith("SQL Error:") > 0 + + # teardown + + def test_load_format_error(self, mapper): + """ + 観点 + 異常系 : tsvファイルが途中で欠落している + 期待値 + 例外が発生する + """ + + # setup + self.batch_context.is_vjsk_stock_import_day = True + + # 処理実行 + target_dict = { + "condkey": mapper.CONDKEY_PHM_PRICE_MST, + "src_file_path": path.join(self.test_file_path_load_individual, "phm_price_mst_formaterror.tsv") + } + + with pytest.raises(BatchOperationException) as e: + VjskDataLoadManager.load(target_dict) + + # 検証 + assert str(e.value).startswith("LOAD文実行時にWARNINGが発生しました。") > 0 + + # teardown + + def test_s3backup_ok(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 正常系 : S3受領ファイルのバックアップフォルダ移動が完了する + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup - テスト用受領ファイルをS3受領バケットにupload + # ※.gzだが、7zipで圧縮してあるので、解凍に失敗するのが期待値 + test_files = [ + "stock_slip_data_202304270000.gz", + "slip_data_202304270000.gz", + "org_cnv_mst_202304270000.gz", + "vop_hco_merge_202304270000.gz", + "whs_mst_202304270000.gz", + "hld_mst_202304270000.gz", + "fcl_mst_202304270000.gz", + "mkr_org_horizon_202304270000.gz", + "tran_kbn_mst_202304270000.gz", + "phm_prd_mst_202304270000.gz", + "phm_price_mst_202304270000.gz", + "whs_customer_mst_202304270000.gz", + "mdb_conv_mst_202304270000.gz", + "bio_slip_data_202304270000.gz", + "lot_num_mst_202304270000.gz" + ] + for test_file in test_files: + file_name = path.join(self.test_file_path_import_all, test_file) + key = f"{receive_folder}/{test_file}" + s3_client.upload_file(file_name, bucket_name, key) + + # assertion + received_s3_files = _check_received_files() + vjsk_recv_bucket.backup_dat_file(received_s3_files, "test") + + # 検証 + + # teardown + for test_file in test_files: + key = f"{receive_folder}/{test_file}" + s3_client.delete_object(Bucket=bucket_name, Key=key) + + def test_s3backup_to_error(self, s3_client, bucket_name, receive_folder, mapper): + """ + 観点 + 異常系 : S3受領ファイルのバックアップフォルダ移動ができない + 期待値 + 例外が発生する + """ + # setup - 卸在庫データ取込対象日 + self.batch_context.is_vjsk_stock_import_day = True + + # setup - S3受領バケットの内容をすべて削除する + vjsk_recv_bucket = VjskReceiveBucket() + s3_files = vjsk_recv_bucket.get_s3_file_list() + for file_obj in s3_files: + s3_client.delete_object(Bucket=bucket_name, Key=file_obj.get("filename")) + + # setup + + # assertion + with pytest.raises(Exception) as e: + # 有りもしないファイルをバックアップフォルダにコピーさせてコケさせる + received_s3_files = [] + received_s3_files.append({"filename": "dummy.dummy"}) + vjsk_recv_bucket.backup_dat_file(received_s3_files, "test") + + # 検証 + assert str(e.value) == "An error occurred (404) when calling the HeadObject operation: Not Found" + + # teardown diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz new file mode 100644 index 00000000..36d1af45 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/bio_slip_data_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz new file mode 100644 index 00000000..24234bfe Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/fcl_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz new file mode 100644 index 00000000..efedd921 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/hld_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz new file mode 100644 index 00000000..615d1451 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/lot_num_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz new file mode 100644 index 00000000..994a80b8 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mdb_conv_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz new file mode 100644 index 00000000..835d2360 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/mkr_org_horizon_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz new file mode 100644 index 00000000..89eb4a65 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/org_cnv_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz new file mode 100644 index 00000000..169f6761 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_prd_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz new file mode 100644 index 00000000..f7658c88 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/phm_price_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz new file mode 100644 index 00000000..5a90e676 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/slip_data_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz new file mode 100644 index 00000000..e2cfdfdf Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/stock_slip_data_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz new file mode 100644 index 00000000..b68d5099 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/tran_kbn_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz new file mode 100644 index 00000000..58fbc63d Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/vop_hco_merge_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz new file mode 100644 index 00000000..fd0ed8cd Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_customer_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz new file mode 100644 index 00000000..013eae2c Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/TestImportFileToDb/whs_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz new file mode 100644 index 00000000..c609baff Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/bio_slip_data_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz new file mode 100644 index 00000000..7f532e3b Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/fcl_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/hld_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/hld_mst_202304270000.gz new file mode 100644 index 00000000..04ebc2c6 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/hld_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz new file mode 100644 index 00000000..821eccd4 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/lot_num_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mdb_conv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mdb_conv_mst_202304270000.gz new file mode 100644 index 00000000..5acc776a Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mdb_conv_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz new file mode 100644 index 00000000..d6327182 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/mkr_org_horizon_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz new file mode 100644 index 00000000..1efc8e0f Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/org_cnv_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz new file mode 100644 index 00000000..c45076da Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_prd_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_price_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_price_mst_202304270000.gz new file mode 100644 index 00000000..3c01fe7d Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/phm_price_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz new file mode 100644 index 00000000..9c2dc3ad Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/slip_data_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz new file mode 100644 index 00000000..3d741b23 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/stock_slip_data_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/tran_kbn_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/tran_kbn_mst_202304270000.gz new file mode 100644 index 00000000..b8a78cb9 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/tran_kbn_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz new file mode 100644 index 00000000..a698e631 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/vop_hco_merge_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz new file mode 100644 index 00000000..88998e26 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_customer_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz new file mode 100644 index 00000000..a97fdb31 Binary files /dev/null and b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/UnzipError/whs_mst_202304270000.gz differ diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv new file mode 100644 index 00000000..0c345954 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rec_hsdnYmd_wrk" "rec_hsdnYmd_srk" "rec_urag_no" "rec_comm_nm" "rec_nnskFcl_nm" "rec_nnsk_fcl_addr" "rec_lot_num1" "rec_amt1" "rec_lot_num2" "rec_amt2" "rec_lot_num3" "rec_amt3" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_nm" "fcl_kn" "fcl_addr_v" "comm_cd" "comm_nm" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "amt1" "amt2" "amt3" "slip_org_kbn" "bef_slip_mgt_no" "whs_rep_comm_nm" "whs_rep_nnskFcl_nm" "whs_rep_nnsk_fcl_addr" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D452960211JD1111311102503851400002304016427519111 496350122バベンチオテンテキ200MG 1V ソウゴウメデイカルニホンコウカンビツクバシ タカサキ 753 BAVB007 000003 000000 000000 " "296" "02" "11JD11113111025" "0385140000" "496350122" "111" "230401" "20230401" "6427519" "バベンチオテンテキ200MG 1V " "ソウゴウメデイカルニホンコウカンビ" "ツクバシ タカサキ 753 " "BAVB007 " "000003" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000126" "129" "20230401" "202305082041" "110" "売上" "11JD" "300001370" "川崎南支店" "" "200000007" "アルフレッサ株式会社" "0385140000" "670235967013012526" "医療法人社団こうかん会 日本鋼管病院" "イリョウホウジンシャダンコウカンカイ ニホンコウカンビョウイン" "210-0852 神奈川県川崎市川崎区鋼管通1−2−1" "496350122" "バベンチオ 注射剤 200mg 1VIAL" "" "1" "" "3" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211G11111377452402930640002304016433215111 496300127ゴナ-ルエフヒカチユウペン450 1トウ セコム)オギクボビヨウイン トウキヨウト シブヤク ジングウマエ 1-5-1 GF4C001 000002 000000 000000 " "296" "02" "11G111113774524" "0293064000" "496300127" "111" "230401" "20230401" "6433215" "ゴナ-ルエフヒカチユウペン450 1トウ " "セコム)オギクボビヨウイン " "トウキヨウト シブヤク ジングウマエ 1-5-1 " "GF4C001 " "000002" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000127" "130" "20230401" "202305082041" "110" "売上" "11G1" "300001351" "杉並・中野支店" "" "200000007" "アルフレッサ株式会社" "0293064000" "670234652241314835" "医療法人財団荻窪病院 荻窪病院" "イリョウホウジンザイダンオギクボビョウイン オギクボビョウイン" "167-0035 東京都杉並区今川3−1−24" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "2" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211V11121120604204799500002304016461276111 496300134ゴナ-ルエフヒカチユウペン900 1トウ ニチイサ-ビスキユウシユウフクオカサンフクオカシ サワラク モモチハマ 1-7-5 7F GF9C002 000010 000000 000000 " "296" "02" "11V111211206042" "0479950000" "496300134" "111" "230401" "20230401" "6461276" "ゴナ-ルエフヒカチユウペン900 1トウ " "ニチイサ-ビスキユウシユウフクオカサン" "フクオカシ サワラク モモチハマ 1-7-5 7F " "GF9C002 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000128" "131" "20230401" "202305082041" "110" "売上" "11V1" "300001491" "福岡第一支店" "" "200000007" "アルフレッサ株式会社" "0479950000" "670235883412145206" "医療法人社団高邦会 福岡山王病院" "イリョウホウジンシャダンコウホウカイ フクオカサンノウビョウイン" "814-0001 福岡県福岡市早良区百道浜3−6−45" "496300134" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211JB1121309300202875030002304016523689111 496300127ゴナ-ルエフヒカチユウペン450 1トウ イツカンドウヤツキヨク0561 カワサキシ アサオク フルサワ 172-1 GF4C001 000001 000000 000000 " "296" "02" "11JB11213093002" "0287503000" "496300127" "111" "230401" "20230401" "6523689" "ゴナ-ルエフヒカチユウペン450 1トウ " "イツカンドウヤツキヨク0561 " "カワサキシ アサオク フルサワ 172-1 " "GF4C001 " "000001" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000129" "132" "20230401" "202305082041" "110" "売上" "11JB" "300001369" "川崎北支店" "" "200000007" "アルフレッサ株式会社" "0287503000" "670237078008644636" "株式会社キリン堂 一貫堂薬局" "カブシキガイシャキリンドウ イッカンドウヤッキョク" "215-0026 神奈川県川崎市麻生区古沢172−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "1" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv new file mode 100644 index 00000000..e7f85ba8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/bio_slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rec_hsdnYmd_wrk" "rec_hsdnYmd_srk" "rec_urag_no" "rec_comm_nm" "rec_nnskFcl_nm" "rec_nnsk_fcl_addr" "rec_lot_num1" "rec_amt1" "rec_lot_num2" "rec_amt2" "rec_lot_num3" "rec_amt3" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_nm" "fcl_kn" "fcl_addr_v" "comm_cd" "comm_nm" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "amt1" "amt2" "amt3" "slip_org_kbn" "bef_slip_mgt_no" "whs_rep_comm_nm" "whs_rep_nnskFcl_nm" "whs_rep_nnsk_fcl_addr" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D452960211V11121120604204799500002304016461276111 496300134ゴナ-ルエフヒカチユウペン900 1トウ ニチイサ-ビスキユウシユウフクオカサンフクオカシ サワラク モモチハマ 1-7-5 7F GF9C002 000010 000000 000000 " "296" "02" "11V111211206042" "0479950000" "496300134" "111" "230401" "20230401" "6461276" "ゴナ-ルエフヒカチユウペン900 1トウ " "ニチイサ-ビスキユウシユウフクオカサン" "フクオカシ サワラク モモチハマ 1-7-5 7F " "GF9C002 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000128" "131" "20230401" "202305082041" "110" "売上" "11V1" "300001491" "福岡第一支店" "" "200000007" "アルフレッサ株式会社" "0479950000" "670235883412145206" "医療法人社団高邦会 福岡山王病院" "イリョウホウジンシャダンコウホウカイ フクオカサンノウビョウイン" "814-0001 福岡県福岡市早良区百道浜3−6−45" "496300134" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D452960211JB1121309300202875030002304016523689111 496300127ゴナ-ルエフヒカチユウペン450 1トウ イツカンドウヤツキヨク0561 カワサキシ アサオク フルサワ 172-1 GF4C001 000001 000000 000000 " "296" "02" "11JB11213093002" "0287503000" "496300127" "111" "230401" "20230401" "6523689" "ゴナ-ルエフヒカチユウペン450 1トウ " "イツカンドウヤツキヨク0561 " "カワサキシ アサオク フルサワ 172-1 " "GF4C001 " "000001" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000129" "132" "20230401" "202305082041" "110" "売上" "11JB" "300001369" "川崎北支店" "" "200000007" "アルフレッサ株式会社" "0287503000" "670237078008644636" "株式会社キリン堂 一貫堂薬局" "カブシキガイシャキリンドウ イッカンドウヤッキョク" "215-0026 神奈川県川崎市麻生区古沢172−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "1" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D4529602K1201130032696508657140002304016527757111 496300110ゴナ-ルエフヒカチユウペン300 1トウ モリノクスリヤヤツキヨク オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 GF3B013 000005 000000 000000 " "296" "02" "K12011300326965" "0865714000" "496300110" "111" "230401" "20230401" "6527757" "ゴナ-ルエフヒカチユウペン300 1トウ " "モリノクスリヤヤツキヨク " "オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 " "GF3B013 " "000005" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000130" "133" "20230401" "202305082041" "110" "売上" "K120" "300006583" "沖縄第二営業部" "" "200000007" "アルフレッサ株式会社" "0865714000" "670232348519842842" "有限会社吾妻サンライズ 森の薬屋薬局" "ユウゲンガイシャアガツマサンライズ モリノクスリヤヤッキョク" "901-0406 沖縄県島尻郡八重瀬町屋宜原238−1" "496300110" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "1" "" "5" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" +"D4529602K1201130032696508657140002304016527757111 496300127ゴナ-ルエフヒカチユウペン450 1トウ モリノクスリヤヤツキヨク オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 GF4B011 000010 000000 000000 " "296" "02" "K12011300326965" "0865714000" "496300127" "111" "230401" "20230401" "6527757" "ゴナ-ルエフヒカチユウペン450 1トウ " "モリノクスリヤヤツキヨク " "オキナワケン シマジリグン ヤエセチヨウ ヤギバル 238-1 " "GF4B011 " "000010" " " "000000" " " "000000" "20230403" "J" "VJSK-BIO_J_MERCK_2023040300.txt" "J2023040300000131" "134" "20230401" "202305082041" "110" "売上" "K120" "300006583" "沖縄第二営業部" "" "200000007" "アルフレッサ株式会社" "0865714000" "670232348519842842" "有限会社吾妻サンライズ 森の薬屋薬局" "ユウゲンガイシャアガツマサンライズ モリノクスリヤヤッキョク" "901-0406 沖縄県島尻郡八重瀬町屋宜原238−1" "496300127" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "1" "" "10" "0" "0" "J" "" "" "" "" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202304" "" "" "0" "23-04-03 20:42:11" "system" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv new file mode 100644 index 00000000..9615f392 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"fcl_cd" "sub_no" "start_dt" "end_dt" "closed_dt" "nm" "kn_nm" "sht_nm" "sht_kn_nm" "mkr_cd" "jsk_proc_kbn" "fmt_addr" "fmt_kn_addr" "post_cd" "prft_cd" "prft_nm" "city_nm" "addr_line_1" "tel_no" "admin_kbn" "fcl_type" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229430760653825" "0" "20000101" "99991231" "" "駅前町歯科診療所" "エキマエチョウシカシンリョウジョ" "駅前町歯科診療所" "エキマエチョウシカシンリョウジョ" "" "0" "700-0023 岡山県岡山市北区駅前町1−6−20" "オカヤマケン オカヤマシキタク エキマエチョウ1-6-20" "700-0023" "33" "岡山県" "岡山市北区" "駅前町1−6−20" "0862236468" "33101" "30" "1" "" "" +"670229435466662922" "0" "20000101" "99991231" "" "医療法人社団仁卓会 ほりかわ歯科クリニック" "イリョウホウジンシャダンジンタクカイ ホリカワシカクリニック" "ほりかわ歯科クリニック (医社)" "ホリカワシカクリニック (イシャ)" "" "0" "675-0101 兵庫県加古川市平岡町新在家1573−1−4F" "ヒョウゴケン カコガワシ ヒラオカチョウシンザイケ1573-1-4F" "675-0101" "28" "兵庫県" "加古川市" "平岡町新在家1573−1−4F" "0794244617" "28210" "30" "1" "" "" +"670229435785430019" "0" "20000101" "99991231" "" "株式会社コミュニティメディカル なつめ薬局 千歳船橋店" "カブシキガイシャコミュニティメディカル ナツメヤッキョク チトセフナバシテン" "なつめ薬局 千歳船橋店 (株)" "ナツメヤッキョク チトセフナバシテン (カ)" "" "0" "156-0054 東京都世田谷区桜丘2−24−2" "トウキョウト セタガヤク サクラガオカ2-24-2" "156-0054" "13" "東京都" "世田谷区" "桜丘2−24−2" "0364136189" "13112" "20" "1" "" "" +"670229447437206529" "0" "20000101" "20230407" "2023-04-07" "ヒカリ薬局" "ヒカリヤッキョク" "ヒカリ薬局" "ヒカリヤッキョク" "" "0" "670-0955 兵庫県姫路市安田4−47−8−1F" "ヒョウゴケン ヒメジシ ヤスダ4-47-8-1F" "670-0955" "28" "兵庫県" "姫路市" "安田4−47−8−1F" "0792846396" "28201" "20" "1" "" "" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv new file mode 100644 index 00000000..b4c05930 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/fcl_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"fcl_cd" "sub_no" "start_dt" "end_dt" "closed_dt" "nm" "kn_nm" "sht_nm" "sht_kn_nm" "mkr_cd" "jsk_proc_kbn" "fmt_addr" "fmt_kn_addr" "post_cd" "prft_cd" "prft_nm" "city_nm" "addr_line_1" "tel_no" "admin_kbn" "fcl_type" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229435785430019" "0" "20000202" "99991231" "" "株式会社コミュニティメディカル なつめ薬局 千歳船橋店" "カブシキガイシャコミュニティメディカル ナツメヤッキョク チトセフナバシテン" "なつめ薬局 千歳船橋店 (株)" "ナツメヤッキョク チトセフナバシテン (カ)" "" "0" "156-0054 東京都世田谷区桜丘2−24−2" "トウキョウト セタガヤク サクラガオカ2-24-2" "156-0054" "13" "東京都" "世田谷区" "桜丘2−24−2" "0364136189" "13112" "20" "1" "" "" +"670229447437206529" "0" "20000202" "20230407" "2023-04-07" "ヒカリ薬局" "ヒカリヤッキョク" "ヒカリ薬局" "ヒカリヤッキョク" "" "0" "670-0955 兵庫県姫路市安田4−47−8−1F" "ヒョウゴケン ヒメジシ ヤスダ4-47-8-1F" "670-0955" "28" "兵庫県" "姫路市" "安田4−47−8−1F" "0792846396" "28201" "20" "1" "" "" +"670229463350395910" "0" "20000101" "99991231" "" "こうだ歯科" "コウダシカ" "こうだ歯科" "コウダシカ" "" "0" "770-0831 徳島県徳島市寺島本町西1−10" "トクシマケン トクシマシ テラシマホンチョウニシ1-10" "770-0831" "36" "徳島県" "徳島市" "寺島本町西1−10" "0886552625" "36201" "30" "1" "" "" +"670229489380246545" "0" "20020521" "99991231" "" "社会医療法人社団埼玉巨樹の会 狭山中央病院" "シャカイイリョウホウジンシャダンサイタマキョジュノカイ サヤマチュウオウビョウイン" "狭山中央病院 (社社)" "サヤマチュウオウビョウイン (シャシャ)" "" "0" "350-1306 埼玉県狭山市富士見2−19−35" "サイタマケン サヤマシ フジミ2-19-35" "350-1306" "11" "埼玉県" "狭山市" "富士見2−19−35" "0429597111" "11215" "10" "1" "" "" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv new file mode 100644 index 00000000..2c4a8c1f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_hld_cd" "sub_no" "nm" "kn_nm" "sht_nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "1" "卸ホールディングス名01-1" "オロシホールディングスメイ01-1" "卸名01-1" "20230101" "99991231" "1" "0" "23-05-09 12:00:01" "23-05-09 13:00:01" +"10001" "2" "卸ホールディングス名01-2" "オロシホールディングスメイ01-2" "卸名01-2" "20230102" "99991231" "1" "0" "23-05-09 12:00:02" "23-05-09 13:00:02" +"10001" "3" "卸ホールディングス名01-3" "オロシホールディングスメイ01-3" "卸名01-3" "20230103" "99991231" "1" "0" "23-05-09 12:00:03" "23-05-09 13:00:03" +"10001" "4" "卸ホールディングス名01-4" "オロシホールディングスメイ01-4" "卸名01-4" "20230104" "99991231" "1" "0" "23-05-09 12:00:04" "23-05-09 13:00:04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv new file mode 100644 index 00000000..9728bb5f --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/hld_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_hld_cd" "sub_no" "nm" "kn_nm" "sht_nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "3" "卸ホールディングス名01-3" "オロシホールディングスメイ01-3" "卸名01-3" "20230101" "99991231" "1" "0" "23-05-09 12:00:03" "23-05-09 13:00:03" +"10001" "4" "卸ホールディングス名01-4" "オロシホールディングスメイ01-4" "卸名01-4" "20230102" "99991231" "1" "0" "23-05-09 12:00:04" "23-05-09 13:00:04" +"10001" "5" "卸ホールディングス名01-5" "オロシホールディングスメイ01-5" "卸名01-5" "20230103" "99991231" "1" "0" "23-05-09 12:00:05" "23-05-09 13:00:05" +"10002" "1" "卸ホールディングス名02-1" "オロシホールディングスメイ02-1" "卸名01-1" "20230104" "99991231" "1" "0" "23-05-09 12:00:06" "23-05-09 13:00:06" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv new file mode 100644 index 00000000..3acfb7cc --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"ser_no" "lot_num" "expr_dt" "frst_mov_dt" "ins_dt" "ins_usr" +"F0110601" "BAVA001" "20230331" "20210510" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA002" "20230331" "20210615" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA003" "20231031" "20210719" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA004" "20231031" "20210823" "23-05-08 20:40:41" "batch" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv new file mode 100644 index 00000000..c7e86b9a --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/lot_num_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"ser_no" "lot_num" "expr_dt" "frst_mov_dt" "ins_dt" "ins_usr" +"F0110601" "BAVA003" "20231031" "20210719" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA004" "20231031" "20210823" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA005" "20231031" "20210927" "23-05-08 20:40:41" "batch" +"F0110601" "BAVA006" "20240131" "20211025" "23-05-08 20:40:41" "batch" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv new file mode 100644 index 00000000..f27cf3ce --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "sub_no" "mdb_cd" "reliability" "start_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"670229780011959315" "1" "003410424" "0" "20020601" "0" "22-03-09 13:56:19" "22-03-09 13:56:19" +"670230081112654862" "0" "004101420" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:17:34" +"670230081112654862" "1" "004104997" "2" "20000101" "0" "22-03-09 14:17:33" "22-03-09 14:17:33" +"670230100414841865" "0" "003622111" "3" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:13:49" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv new file mode 100644 index 00000000..fe0ab79c --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mdb_conv_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "sub_no" "mdb_cd" "reliability" "start_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"670230081112654862" "1" "004104997" "0" "20000101" "0" "22-03-09 14:17:33" "22-03-09 14:17:33" +"670230100414841865" "0" "003622111" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 14:13:49" +"670230100414841865" "1" "003636480" "0" "20000101" "0" "22-03-09 14:13:49" "22-03-09 14:13:49" +"670230330673742853" "0" "004804003" "0" "20000101" "9" "17-10-17 17:06:52" "22-03-09 15:20:35" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv new file mode 100644 index 00000000..3755e224 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304280000.tsv @@ -0,0 +1,5 @@ +"vid_kind_1" "v_cd_1" "nm_1" "dsp_odr_1" "vid_kind_2" "v_cd_2" "nm_2" "dsp_odr_2" "vid_kind_3" "v_cd_3" "nm_3" "dsp_odr_3" "vid_kind_4" "v_cd_4" "nm_4" "dsp_odr_4" "vid_kind_5" "v_cd_5" "nm_5" "dsp_odr_5" "vid_kind_6" "v_cd_6" "nm_6" "dsp_odr_6" "vid_kind_7" "v_cd_7" "nm_7" "dsp_odr_7" "vid_kind_8" "v_cd_8" "nm_8" "dsp_odr_8" "vid_kind_9" "v_cd_9" "nm_9" "dsp_odr_9" "vid_kind_10" "v_cd_10" "nm_10" "dsp_odr_10" "v_whs_cd" "start_dt" "end_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"3" "300003202" "その他営業本部卸" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "3" "300003217" "試薬岐阜(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003138" "北関東甲信越営業本部" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "3" "300003195" "首都圏移管組織" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003202" "その他営業本部卸" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"a" "300003144" "メディカル営業本部1" "1" "b" "300003202" "東海スタッフ医療2" "2" "c" "300003203" "東海スタッフ医療3" "3" "d" "300003204" "東海スタッフ医療4" "4" "e" "300003205" "東海スタッフ医療5" "5" "f" "300003206" "東海スタッフ医療6" "6" "g" "300003207" "東海スタッフ医療7" "7" "h" "300003208" "東海スタッフ医療8" "8" "i" "300003209" "東海スタッフ医療9" "9" "j" "300003210" "東海スタッフ医療10" "10" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv new file mode 100644 index 00000000..71f9d85b --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/mkr_org_horizon_202304290000.tsv @@ -0,0 +1,5 @@ +"vid_kind_1" "v_cd_1" "nm_1" "dsp_odr_1" "vid_kind_2" "v_cd_2" "nm_2" "dsp_odr_2" "vid_kind_3" "v_cd_3" "nm_3" "dsp_odr_3" "vid_kind_4" "v_cd_4" "nm_4" "dsp_odr_4" "vid_kind_5" "v_cd_5" "nm_5" "dsp_odr_5" "vid_kind_6" "v_cd_6" "nm_6" "dsp_odr_6" "vid_kind_7" "v_cd_7" "nm_7" "dsp_odr_7" "vid_kind_8" "v_cd_8" "nm_8" "dsp_odr_8" "vid_kind_9" "v_cd_9" "nm_9" "dsp_odr_9" "vid_kind_10" "v_cd_10" "nm_10" "dsp_odr_10" "v_whs_cd" "start_dt" "end_dt" "rec_sts_kbn" "ins_dt" "upd_dt" +"3" "300003202" "その他営業本部卸" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "3" "300003226" "大垣(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003144" "メディカル営業本部" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "3" "300003201" "東海スタッフ医療" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003202" "その他営業本部卸" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "3" "300003224" "岐阜第二(回収)" "0" "200000007" "20190401" "99991231" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" +"3" "300003143" "医薬営業統括本部" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "3" "300003196" "医薬その他" "0" "200000007" "20190401" "20190930" "0" "19-04-11 11:30:59" "23-04-12 17:52:38" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv new file mode 100644 index 00000000..208d1a20 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "org_cd" "sub_no" "v_org_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"042" "01" "361007" "0" "300006657" "20230401" "99991231" "0" "0" "23-04-12 11:24:06" "23-04-12 11:24:06" +"042" "01" "381207" "0" "300006658" "20230401" "99991231" "0" "0" "23-04-12 11:24:27" "23-04-12 11:24:27" +"080" "00" "02780" "0" "300006526" "20220401" "99991231" "0" "9" "22-04-11 15:57:35" "23-04-12 10:46:48" +"080" "00" "02780" "1" "300006526" "20220401" "20230331" "0" "0" "23-04-12 10:46:48" "23-04-12 10:46:48" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv new file mode 100644 index 00000000..c91eafee --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/org_cnv_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "org_cd" "sub_no" "v_org_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"080" "00" "02780" "0" "300006526" "20220401" "99991231" "0" "9" "22-04-11 15:57:35" "23-04-12 10:46:48" +"080" "00" "02780" "1" "300006526" "20220401" "20230331" "0" "0" "23-04-12 10:46:48" "23-04-12 10:46:48" +"080" "00" "21807" "2" "300006649" "20230401" "99991231" "0" "0" "23-04-12 10:49:23" "23-04-12 10:49:23" +"080" "00" "25110" "0" "300005251" "20000101" "99991231" "0" "9" "21-03-11 14:59:47" "23-04-12 10:47:42" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv new file mode 100644 index 00000000..9c4d4d86 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"prd_cd" "sub_no" "prd_nm" "prd_e_nm" "mkr_cd" "mkr_inf_1" "mkr_inf_2" "phm_itm_cd" "itm_nm" "itm_sht_nm" "form_cd" "form_nm" "vol_cd" "vol_nm" "cont_cd" "cont_nm" "pkg_cd" "pkg_nm" "cnv_num" "jsk_start_dt" "prd_sale_kbn" "jsk_proc_kbn" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"496201110" "0" "セロフェン 錠剤 50mg 30TAB" "" "F21206A0" "セロフェン 錠 50MG" "SEROPHENE TAB. 50 MG. (30)" "001" "セロフェン" "SP" "F003" "錠剤" "0000" "" "V009" "50mg" "P007" "30TAB" "30" "" "0" "0" "20080101" "20190930" "140" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" +"496201127" "0" "セロフェン 錠剤 50mg 30TAB" "" "F21206A0" "セロフェン 錠 50MG" "SEROPHENE TAB. 50 MG. (30)" "001" "セロフェン" "SP" "F003" "錠剤" "0000" "" "V009" "50mg" "P007" "30TAB" "30" "" "0" "0" "20070401" "20190930" "150" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" +"496300110" "2" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "F1990608" "ゴナールエフ皮下注ペン 300" "GONAL-F PEN 300IU (1) - JPN" "005" "セロスティム" "ST" "F005" "皮下注ペン" "0000" "" "V017" "300IU" "P011" "1PEN" "1" "" "0" "0" "20190501" "20190930" "100" "9" "19-04-23 16:35:36" "19-04-23 16:40:38" +"496300127" "2" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "F19D0608" "ゴナールエフ皮下注ペン450" "Gonalef Pen 450 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V018" "450IU" "P011" "1PEN" "1" "" "0" "0" "20190501" "20190930" "120" "9" "19-04-23 16:37:10" "19-04-23 16:40:54" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv new file mode 100644 index 00000000..29824385 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_prd_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"prd_cd" "sub_no" "prd_nm" "prd_e_nm" "mkr_cd" "mkr_inf_1" "mkr_inf_2" "phm_itm_cd" "itm_nm" "itm_sht_nm" "form_cd" "form_nm" "vol_cd" "vol_nm" "cont_cd" "cont_nm" "pkg_cd" "pkg_nm" "cnv_num" "jsk_start_dt" "prd_sale_kbn" "jsk_proc_kbn" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"496300110" "2" "ゴナールエフ 皮下注ペン 300IU 1PEN" "" "F1990608" "ゴナールエフ皮下注ペン 300" "GONAL-F PEN 300IU (1) - JPN" "005" "セロスティム" "ST" "F005" "皮下注ペン" "0000" "" "V017" "300IU" "P011" "1PEN" "1" "20230601" "0" "0" "20190501" "20190930" "100" "9" "19-04-23 16:35:36" "19-04-23 16:40:38" +"496300127" "2" "ゴナールエフ 皮下注ペン 450IU 1PEN" "" "F19D0608" "ゴナールエフ皮下注ペン450" "Gonalef Pen 450 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V018" "450IU" "P011" "1PEN" "1" "20230602" "0" "0" "20190501" "20190930" "120" "9" "19-04-23 16:37:10" "19-04-23 16:40:54" +"496300134" "2" "ゴナールエフ 皮下注ペン 900IU 1PEN" "" "F19B0608" "ゴナールエフ皮下注ペン900" "Gonalef Pen 900 (1)" "008" "BDマイクロファインプラス" "MF" "F005" "皮下注ペン" "0000" "" "V019" "900IU" "P011" "1PEN" "1" "20230603" "0" "0" "20190501" "20190930" "110" "9" "19-04-23 16:38:36" "19-04-23 16:41:05" +"496301315" "0" "プロファシー 注射剤 5000IU 10VIAL" "" "F47706A0" "プロファシー 注 5000IU" "PROFASI M AMP. 5000 IU (10)" "002" "プロファシー" "PF" "F002" "注射剤" "0000" "" "V020" "5000IU" "P005" "10VIAL" "10" "20230604" "0" "0" "20070401" "20190930" "160" "0" "17-11-08 16:52:41" "19-09-19 11:42:45" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv new file mode 100644 index 00000000..b47ae0e6 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"114430502" "01" "2" "10060.2" "20180401" "20190930" "10" "0" "18-03-07 09:33:37" "19-09-19 11:23:47" +"114430502" "01" "3" "10237.2" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" +"114430502" "03" "2" "100602" "20180401" "20190930" "30" "0" "18-03-07 09:39:48" "19-09-19 11:23:47" +"114430502" "03" "3" "102372" "20191001" "99991231" "30" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv new file mode 100644 index 00000000..c5331854 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"114430502" "03" "2" "100602" "20180401" "20190930" "30" "0" "18-03-07 09:39:48" "19-09-19 11:23:47" +"114430502" "03" "3" "102372" "20191001" "99991231" "30" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" +"114430601" "01" "2" "12362.4" "20180401" "20190930" "10" "0" "18-03-07 09:48:00" "19-09-19 11:23:47" +"114430601" "01" "3" "12587.8" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv new file mode 100644 index 00000000..3df124c1 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_dataerror.tsv @@ -0,0 +1,3 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"123456701" "01" "1" "12345.6" "yyyy0401" "20190930" "10" "0" "18-03-07 09:48:00" "19-09-19 11:23:47" +"123456701" "02" "1" "12587.8" "20191001" "99991231" "10" "0" "19-09-19 11:24:05" "19-09-19 11:24:05" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv new file mode 100644 index 00000000..20ebb14d --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/phm_price_mst_formaterror.tsv @@ -0,0 +1,2 @@ +"phm_prd_cd" "phm_price_kind" "sub_no" "price" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"123456701" "01" "1" "12345.6" "202304 \ No newline at end of file diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv new file mode 100644 index 00000000..d9b11f17 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"recvdata" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rev_hsdnYmd_wrk" "rev_hsdnYmd_srk" "rec_urag_no" "rec_amt" "rec_unit_price" "rec_price" "rec_comm_nm" "rec_nnskFcl_nm" "free_item" "rec_nnsk_fcl_addr" "rec_nnsk_fcl_post" "rec_nnsk_fcl_tel" "rec_bef_hsdn_ymd" "rec_bef_slip_no" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_kn" "fcl_nm" "fcl_addr_v" "comm_cd" "comm_nm" "nn_amt" "nn_unitPrice" "nn_price" "unit_price" "unit_amt" "drag_price" "drag_amt" "whsPos_err_kbn" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "bef_hsdn_ymd" "bef_slip_no" "slip_org_kbn" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"D4420202011611A4 0183733 23030133625911102303 4963500230000020003110000000622000000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vハコダテチユウオウビヨウイン 00000408585ハコダテシホンチヨウ33バン2ゴウ " "202" "02" "011611A4 " "0183733 " "496350023" "110" "230301" "20230301" "3362591" "000002" "00031100" "0000062200" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ハコダテチユウオウビヨウイン " "0408585ハコダテシホンチヨウ33バン2ゴウ " "ハコダテシホンチヨウ33バン2ゴウ " "0408585" "" "000000" " " "20230222" "J" "VJSK_J_MERCK_2023022" "J2023022200000022" "29" "20230301" "202303142041" "110" "売上" "01161" "300000383" "函館支店" "" "200000016" "株式会社スズケン" "0183733 " "670234934576694289" "シャカイフクシホウジンハコダテコウセイイン ハコダテチュウオウビョウイン" "社会福祉法人函館厚生院 函館中央病院" "040-0011 北海道函館市本町33−2" "496350023" "アービタックス 注射剤 100mg 1VIAL" "2" "31100" "62200" "31438" "62876" "35309" "70618" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4420202011611A4 0183733 23030133625921102303 4963500230000080003110000002488000000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vハコダテチユウオウビヨウイン 00000408585ハコダテシホンチヨウ33バン2ゴウ " "202" "02" "011611A4 " "0183733 " "496350023" "110" "230301" "20230301" "3362592" "000008" "00031100" "0000248800" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ハコダテチユウオウビヨウイン " "0408585ハコダテシホンチヨウ33バン2ゴウ " "ハコダテシホンチヨウ33バン2ゴウ " "0408585" "" "000000" " " "20230222" "J" "VJSK_J_MERCK_2023022" "J2023022200000023" "30" "20230301" "202303142041" "110" "売上" "01161" "300000383" "函館支店" "" "200000016" "株式会社スズケン" "0183733 " "670234934576694289" "シャカイフクシホウジンハコダテコウセイイン ハコダテチュウオウビョウイン" "社会福祉法人函館厚生院 函館中央病院" "040-0011 北海道函館市本町33−2" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "31100" "248800" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4416101311101A8 5140013 23030173719811122303 4963500230000120002738100003285720000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7371981" "000012" "00027381" "0000328572" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000011" "16" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "12" "27381" "328572" "31438" "377256" "35309" "423708" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"D4416101311101A8 5140013 23030173720211122303 4963500230000080002738100002190480000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7372021" "000008" "00027381" "0000219048" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000012" "17" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "27381" "219048" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv new file mode 100644 index 00000000..5299839a --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"recvdata" "rec_whs_cd" "rec_whs_sub_cd" "rec_whs_org_cd" "rec_cust_cd" "rec_comm_cd" "rec_tran_kbn" "rev_hsdnYmd_wrk" "rev_hsdnYmd_srk" "rec_urag_no" "rec_amt" "rec_unit_price" "rec_price" "rec_comm_nm" "rec_nnskFcl_nm" "free_item" "rec_nnsk_fcl_addr" "rec_nnsk_fcl_post" "rec_nnsk_fcl_tel" "rec_bef_hsdn_ymd" "rec_bef_slip_no" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "hsdn_ymd" "exec_dt" "v_tran_cd" "tran_kbn_nm" "whs_org_cd" "v_whsOrg_cd" "whs_org_nm" "whs_org_kn" "v_whs_cd" "whs_nm" "nnsk_cd" "fcl_cd" "fcl_kn" "fcl_nm" "fcl_addr_v" "comm_cd" "comm_nm" "nn_amt" "nn_unitPrice" "nn_price" "unit_price" "unit_amt" "drag_price" "drag_amt" "whsPos_err_kbn" "htdnYmd_err_kbn" "prd_exis_kbn" "fcl_exis_kbn" "bef_hsdn_ymd" "bef_slip_no" "slip_org_kbn" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "err_flg11" "err_flg12" "err_flg13" "err_flg14" "err_flg15" "err_flg16" "err_flg17" "err_flg18" "err_flg19" "err_flg20" "kjyo_ym" "tksNbk_kbn" "fcl_exec_kbn" "rec_sts_kbn" "ins_dt" "ins_usr" +"DAY2-101311101A8 5140013 23030173719811122303 4963500230000120002738100003285720000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7371981" "000012" "00027381" "0000328572" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000011" "16" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "12" "27381" "328572" "31438" "377256" "35309" "423708" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-101311101A8 5140013 23030173720211122303 4963500230000080002738100002190480000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトツトリニツセキビヨウイン 00006808517トツトリケントツトリシシヨウトクチヨウ117 " "161" "01" "311101A8 " "5140013 " "496350023" "112" "230301" "20230301" "7372021" "000008" "00027381" "0000219048" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トツトリニツセキビヨウイン " "6808517トツトリケントツトリシシヨウトクチヨウ117 " "トツトリケントツトリシシヨウトクチヨウ117 " "6808517" "" "000000" " " "20230224" "J" "VJSK_J_MERCK_2023022" "J2023022400000012" "17" "20230301" "202303142041" "110" "売上" "31110" "300000391" "鳥取支店" "" "200000015" "株式会社サンキ" "5140013 " "670237031040828444" "ニホンセキジュウジシャ トットリセキジュウジビョウイン" "日本赤十字社 鳥取赤十字病院" "680-0017 鳥取県鳥取市尚徳町117" "496350023" "アービタックス 注射剤 100mg 1VIAL" "8" "27381" "219048" "31438" "251504" "35309" "282472" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-202041131A1 1409581 23030106357711102303 4963500230000070002966000002076200000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vトウホクロウサイビヨウイン 00009818563センダイシアオバクダイノハラ " "202" "02" "041131A1 " "1409581 " "496350023" "110" "230301" "20230301" "0635771" "000007" "00029660" "0000207620" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "トウホクロウサイビヨウイン " "9818563センダイシアオバクダイノハラ " "センダイシアオバクダイノハラ " "9818563" "" "000000" " " "20230227" "J" "VJSK_J_MERCK_2023022" "J2023022700000128" "135" "20230301" "202303142041" "110" "売上" "04113" "300000354" "北仙台支店" "" "200000016" "株式会社スズケン" "1409581 " "670232828063007745" "ドクリツギョウセイホウジンロウドウシャケンコウアンゼンキコウ トウホクロウサイビョウイン" "独立行政法人労働者健康安全機構 東北労災病院" "981-0911 宮城県仙台市青葉区台原4−3−21" "496350023" "アービタックス 注射剤 100mg 1VIAL" "7" "29660" "207620" "31438" "220066" "35309" "247163" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" +"DAY2-202141234B1 2607506 23030109289511102303 4963500230000100003036600003036600000000000000000000000 ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1Vヨコスカキヨウサイビヨウイン 00002380011カナガワケンヨコスカシヨネガハマドオリ1-16 " "202" "02" "141234B1 " "2607506 " "496350023" "110" "230301" "20230301" "0928951" "000010" "00030366" "0000303660" "ア-ビタツクスチユウシヤエキ100MG 100MG 20MLX1V" "ヨコスカキヨウサイビヨウイン " "2380011カナガワケンヨコスカシヨネガハマドオリ1-16 " "カナガワケンヨコスカシヨネガハマドオリ1-16 " "2380011" "" "000000" " " "20230228" "J" "VJSK_J_MERCK_2023022" "J2023022800000094" "101" "20230301" "202303142041" "110" "売上" "14123" "300000274" "磯子支店" "" "200000016" "株式会社スズケン" "2607506 " "670236609488110605" "コッカコウムインキョウサイクミアイレンゴウカイ ヨコスカキョウサイビョウイン" "国家公務員共済組合連合会 横須賀共済病院" "238-0011 神奈川県横須賀市米が浜通1−16" "496350023" "アービタックス 注射剤 100mg 1VIAL" "10" "30366" "303660" "31438" "314380" "35309" "353090" "" "" "1" "" "" "" "J" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "202303" "" "" "0" "23-03-14 20:41:26" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv new file mode 100644 index 00000000..cc6c3902 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304280000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_sto_place" "rec_stock_ymd" "rec_comm_cd" "rec_amt" "rev_stok_no_sign" "rev_jan_cd" "rec_free_item" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "exec_dt" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "rec_sts_kbn" "ins_dt" "ins_usr" +"D463630101 23022849630021900003500000 セトロタイドチユウシヤヨウ0.25MG 1V" "363" "01" "01 " "230228" "496300219" "000035" "0" "0000" " セトロタイドチユウシヤヨウ0.25MG 1V" "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000059" "59" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963001270000040000001ゴナールエフヒカチユウペン450 1トウ40 " "253" "01" "026 " "230228" "496300127" "000004" "0" "0000" "01ゴナールエフヒカチユウペン450 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000060" "60" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963001340000220000001ゴナールエフヒカチユウペン900 1トウ40 " "253" "01" "026 " "230228" "496300134" "000022" "0" "0000" "01ゴナールエフヒカチユウペン900 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000061" "61" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"D4625301026 2302284963004170000500000001オビドレルヒカチユウシリンジ250MCG 140 " "253" "01" "026 " "230228" "496300417" "000050" "0" "0000" "01オビドレルヒカチユウシリンジ250MCG 140 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000062" "62" "202303142041" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "23-03-14 20:41:57" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv new file mode 100644 index 00000000..0b180c24 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/stock_slip_data_202304290000.tsv @@ -0,0 +1,5 @@ +"rec_data" "rec_whs_cd" "rec_whs_sub_cd" "rec_sto_place" "rec_stock_ymd" "rec_comm_cd" "rec_amt" "rev_stok_no_sign" "rev_jan_cd" "rec_free_item" "rec_ymd" "sale_data_cat" "slip_file_nm" "slip_mgt_no" "row_num" "exec_dt" "err_flg1" "err_flg2" "err_flg3" "err_flg4" "err_flg5" "err_flg6" "err_flg7" "err_flg8" "err_flg9" "err_flg10" "rec_sts_kbn" "ins_dt" "ins_usr" +"DAY2-301026 2302284963001340000220000001ゴナールエフヒカチユウペン900 1トウ40 " "253" "01" "026 " "230228" "496300134" "000022" "0" "0000" "01ゴナールエフヒカチユウペン900 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000061" "61" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301026 2302284963004170000500000001オビドレルヒカチユウシリンジ250MCG 140 " "253" "01" "026 " "230228" "496300417" "000050" "0" "0000" "01オビドレルヒカチユウシリンジ250MCG 140 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000062" "62" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301027 2302284963001100000020000001ゴナールエフヒカチユウペン300 1トウ40 " "253" "01" "027 " "230228" "496300110" "000002" "0" "0000" "01ゴナールエフヒカチユウペン300 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000063" "63" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" +"DAY2-301027 2302284963001270000110000001ゴナールエフヒカチユウペン450 1トウ40 " "253" "01" "027 " "230228" "496300127" "000011" "0" "0000" "01ゴナールエフヒカチユウペン450 1トウ40 " "20230314" "J" "VJSK-STOCK_J_MERCK_2023031400.txt" "J2023031400000064" "64" "202303142041" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "23-03-14 20:41:57" "SYSTEM" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv new file mode 100644 index 00000000..cb5d5ff8 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_tran_cd" "sub_no" "nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "1" "Veeva取引区分名01-1" "20230101" "99991231" "1" "0" "2023-05-09 12:00:01" "2023-05-09 13:00:01" +"10001" "2" "Veeva取引区分名01-2" "20230102" "99991231" "1" "0" "2023-05-09 12:00:02" "2023-05-09 13:00:02" +"10001" "3" "Veeva取引区分名01-3" "20230103" "99991231" "1" "0" "2023-05-09 12:00:03" "2023-05-09 13:00:03" +"10001" "4" "Veeva取引区分名01-4" "20230104" "99991231" "1" "0" "2023-05-09 12:00:04" "2023-05-09 13:00:04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv new file mode 100644 index 00000000..0813b3df --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/tran_kbn_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_tran_cd" "sub_no" "nm" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"10001" "3" "Veeva取引区分名01-3" "20230201" "20301231" "1" "0" "2023-05-09 12:00:03" "2023-05-09 13:00:03" +"10001" "4" "Veeva取引区分名01-4" "20230202" "20301231" "1" "0" "2023-05-09 12:00:04" "2023-05-09 13:00:04" +"10001" "5" "Veeva取引区分名01-5" "20230203" "20301231" "1" "0" "2023-05-09 12:00:05" "2023-05-09 13:00:05" +"10002" "1" "Veeva取引区分名02-1" "20230204" "20301231" "1" "0" "2023-05-09 12:00:06" "2023-05-09 13:00:06" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv new file mode 100644 index 00000000..dca02347 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304280000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "hco_vid__v_merge" "apply_dt" "merge_reason" +"100000001" "900000001" "20230509" "事由01" +"100000002" "900000002" "20230509" "事由02" +"100000003" "900000003" "20230509" "事由03" +"100000004" "900000004" "20230509" "事由04" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv new file mode 100644 index 00000000..c99d9f76 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/vop_hco_merge_202304290000.tsv @@ -0,0 +1,5 @@ +"hco_vid__v" "hco_vid__v_merge" "apply_dt" "merge_reason" +"100000003" "900000003" "20230509" "DAY-2事由03" +"100000004" "900000004" "20230509" "DAY-2事由04" +"100000005" "900000005" "20230509" "DAY-2事由05" +"100000006" "900000006" "20230509" "DAY-2事由06" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv new file mode 100644 index 00000000..0c23e674 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "customer_cd" "sub_no" "start_dt" "end_dt" "org_cd" "src_org_cd" "nm" "kn_nm" "addr" "kn_addr" "zip_cd" "tel_no" "rec_sts_kbn" "ins_dt" "upd_dt" +"006" "01" "1002900000" "0" "20000101" "99991231" "11" "1131A2283316" "辻内科小児科医院               " "ツジナイカシヨウニカ イイン" "長崎県 佐世保市皆瀬町29                   " "ナガサキケン サセボシカイゼチヨウ 29" "8570144" "0956492319" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1005400000" "0" "20000101" "99991231" "12" "1211C3415515" "医療法人 愛恵会 佐世保愛恵病院       " "イリヨウホウジンアイケイカイサセボアイケイビ" "長崎県 佐世保市瀬戸越4丁目 2−15             " "ナガサキケン サセボシセトゴシ 4チヨウメ 2-15" "8570134" "0956493335" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007200000" "0" "20000101" "99991231" "11" "1131A2407312" "医療法人 山祇診療所             " "イリヨウホウジンヤマズミシンリヨウジヨ" "長崎県 佐世保市山祇町 19−36               " "ナガサキケン サセボシヤマズミチョウ" "8570822" "0956313633" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007800000" "0" "20000101" "99991231" "11" "1121A2402213" "医療法人道仁会 品川医院           " "イリヨウホウジンドウジンカイ シナガワイイン" "長崎県 佐世保市柚木町2188                 " "ナガサキケン サセボシユノキチヨウ 2188" "8570112" "0956460005" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv new file mode 100644 index 00000000..e5c22a27 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_customer_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"whs_cd" "whs_sub_cd" "customer_cd" "sub_no" "start_dt" "end_dt" "org_cd" "src_org_cd" "nm" "kn_nm" "addr" "kn_addr" "zip_cd" "tel_no" "rec_sts_kbn" "ins_dt" "upd_dt" +"006" "01" "1007200000" "0" "20000101" "99991231" "11" "1131A2407312" "医療法人 山祇診療所2            " "イリヨウホウジンヤマズミシンリヨウジヨ" "長崎県 佐世保市山祇町 19−36               " "ナガサキケン サセボシヤマズミチョウ" "8570822" "0956313633" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1007800000" "0" "20000101" "99991231" "11" "1121A2402213" "医療法人道仁会 品川医院2          " "イリヨウホウジンドウジンカイ シナガワイイン" "長崎県 佐世保市柚木町2188                 " "ナガサキケン サセボシユノキチヨウ 2188" "8570112" "0956460005" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1008000000" "0" "20000101" "99991231" "11" "1131A2283316" "北原整形外科医院2              " "キタハラセイケイゲカ イイン" "長崎県 佐世保市瀬戸越町4丁目1298−1           " "ナガサキケン サセボシセトゴシチヨウ 1298-1" "8570135" "0956497773" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" +"006" "01" "1009100000" "0" "20000101" "99991231" "11" "1121A2224212" "山口医院2                  " "ヤマグチイイン" "長崎県 佐世保市春日町29−14                " "ナガサキケン サセボシカスガチヨウ29-14" "8570011" "0956228610" "0" "23-04-14 11:53:14" "23-04-14 11:53:14" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv new file mode 100644 index 00000000..e9280e36 --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304280000.tsv @@ -0,0 +1,5 @@ +"v_whs_cd" "sub_no" "nm" "kn_nm" "sht_nm" "zip_cd" "addr" "kn_addr" "tel_no" "v_hld_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"200000002" "0" "株式会社モロオ" "カナ01" "モロオ" "1110001" "住所01" "ジュウショ01" "00-0000-0001" "0" "20000101" "99991231" "20" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000005" "0" "岩渕薬品株式会社" "カナ02" "岩渕薬品" "1110002" "住所02" "ジュウショ02" "00-0000-0002" "0" "20000101" "99991231" "50" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000009" "0" "株式会社マルタケ" "カナ03" "マルタケ" "1110003" "住所03" "ジュウショ03" "00-0000-0003" "0" "20000101" "99991231" "90" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000010" "0" "株式会社ファイネス" "カナ04" "ファイネス" "1110004" "住所04" "ジュウショ04" "00-0000-0004" "0" "20000101" "99991231" "100" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" diff --git a/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv new file mode 100644 index 00000000..c16f188e --- /dev/null +++ b/ecs/jskult-batch-daily/tests/batch/vjsk/vjsk_load/testdata/whs_mst_202304290000.tsv @@ -0,0 +1,5 @@ +"v_whs_cd" "sub_no" "nm" "kn_nm" "sht_nm" "zip_cd" "addr" "kn_addr" "tel_no" "v_hld_cd" "start_dt" "end_dt" "dsp_odr" "rec_sts_kbn" "ins_dt" "upd_dt" +"200000009" "0" "株式会社マルタケ" "" "マルタケ" "" "" "" "" "0" "20000101" "99991231" "90" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000010" "0" "株式会社ファイネス" "" "ファイネス" "" "" "" "" "0" "20000101" "99991231" "100" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000011" "0" "鍋林株式会社" "" "鍋林" "" "" "" "" "0" "20000101" "99991231" "110" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" +"200000012" "0" "岡野薬品株式会社" "" "岡野薬品" "" "" "" "" "0" "20000101" "99991231" "120" "0" "16-04-15 16:25:33" "16-04-15 16:25:33" diff --git a/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py new file mode 100644 index 00000000..441c5c9e --- /dev/null +++ b/ecs/jskult-batch-daily/tests/testing_vjsk_utility.py @@ -0,0 +1,118 @@ + + +import csv +import decimal +from datetime import datetime + + +def create_vjsk_assertion_list(file_path: str) -> list: + """DB登録期待値リストを作成する + + Args: + file_path (str): DB登録期待値ファイル(tsvファイル)のパス + ※DB登録期待値ファイルの前提 + 受領データファイルと同じ + BOM付きtsv形式 + 一行目はカラム名になっているヘッダ行 + + Returns: + List(dict) DB登録期待値辞書リスト + """ + with open(file_path, encoding='utf_8_sig', newline='') as tsv_file: + header = tsv_file.readline().strip('\n').replace('"', '').split('\t') + reader = csv.DictReader(tsv_file, fieldnames=header, delimiter='\t') + rows = [r for r in reader] + + # DB抽出値と比較できるように、リテラル値をDB抽出値と同じデータフォーマットに変換 + for row in rows: + for k, v in row.items(): + converted_value = v + if v == 'NULL': + converted_value = None + if is_valid_date_format(v, '%Y/%m/%d') is True: # YYYY/MM/DD + converted_value = datetime.strptime(v, '%Y/%m/%d').date() + if is_valid_date_format(v, '%Y-%m-%d') is True: # YYYY-MM-DD + converted_value = datetime.strptime(v, '%Y-%m-%d').date() + if is_valid_date_format(v, '%Y/%m/%d %H:%M:%S') is True: # YYYY/MM/DD HH:MM:SS + converted_value = datetime.strptime(v, '%Y/%m/%d %H:%M:%S') + if is_valid_date_format(v, '%Y-%m-%d %H:%M:%S') is True: # YYYY-MM-DD HH:MM:SS + converted_value = datetime.strptime(v, '%Y-%m-%d %H:%M:%S') + if is_valid_date_format(v, '%y-%m-%d %H:%M:%S') is True: # YY-MM-DD HH:MM:SS + converted_value = datetime.strptime(v, '%y-%m-%d %H:%M:%S') + + row[k] = converted_value + + return rows + + +def is_valid_date_format(date_str: str, date_format): + """日付文字列が、与えられたフォーマットにマッチするかを検査する + + Args: + date_str (str): 日付文字列 + date_format (str, optional): 日付のフォーマット + + Returns: + _type_: 正しい日付文字列の場合、True、それ以外はFalse + """ + try: + datetime.strptime(date_str, date_format) + return True + except ValueError: + return False + + +def assert_table_results(actual_rows: list[dict], expect_rows: list[dict], ignore_col_names: list = None, force_cast_to_str_columns: list = []) -> None: + """テーブル同士の取得結果突き合わせ + + Args: + actual_rows (list[dict]): テスト結果の辞書リスト + expect_rows (list[dict]): 期待値の辞書リスト + ignore_col_name (list): 比較を無視するDBのカラム名. Default None. + """ + # 取得件数が一致すること + assert len(actual_rows) == len(expect_rows), f'レコード件数が一致しません。DBレコード数 : {len(actual_rows)} 期待値 : {len(expect_rows)}' + + line_number = 0 + # 1行ずつ調査 + for actual_row, expect_row in zip(actual_rows, expect_rows): + line_number += 1 + # 1カラムずつ調査 + for actual_col_name, expect_col_name in zip(actual_row, expect_row): + # テストメソッド側で個別に確認するものはスキップさせる + if ignore_col_names is not None and actual_col_name in ignore_col_names: + continue + else: + actual_value = actual_row[actual_col_name] + expect_value = expect_row[expect_col_name] + + # 期待値を、DBのデータ型(リフレクションされたpythonのデータ型)にキャストする + if actual_col_name in force_cast_to_str_columns: + # DB項目(varchar)に日付型としてキャスト可能な値が期待値である場合、force_cast_to_str_columnsに基づいて強制的に文字列キャストする + if type(expect_value).__name__ == 'date': + expect_value = expect_value.strftime('%Y-%m-%d') + elif isinstance(actual_value, (int)): + # DB項目(int)の場合、期待値もintにキャストする + expect_value = int(expect_value) + elif isinstance(actual_value, (float)): + # DB項目(float)の場合、期待値もfloatにキャストする + expect_value = float(expect_value) + elif isinstance(actual_value, (decimal.Decimal)): + # DB項目(decimal)の場合、期待値もdecimalにキャストする + expect_value = decimal.Decimal(expect_value) + elif type(actual_value).__name__ == "date": + # DB項目(date)の場合、期待値("YYYYMMDD")もdateにキャストする + if is_valid_date_format(expect_value, '%Y%m%d') is True: # YYYYMMDD + expect_value = datetime.strptime(expect_value, '%Y%m%d').date() + elif actual_value is None and expect_value == "": + # DB項目値がNULLの場合、期待値が""であればNoneに置換する + expect_value = None + elif actual_value == "0000-00-00" and expect_value == "": + # DB項目(date)がゼロ日付(NULL代替値)の場合、期待値が""であれば"0000-00-00"に置換する + expect_value = "0000-00-00" + elif actual_value == "0000-00-00 00:00:00" and expect_value == "": + # DB項目(datetime)がゼロ日付(NULL代替値)の場合、期待値が""であれば"0000-00-00 00:00:00"に置換する + expect_value = "0000-00-00 00:00:00" + + # 検証 + assert actual_value == expect_value, f'{line_number}行目:"{actual_col_name}" : "{actual_value}" ({type(actual_value)})が、期待値 "{expect_value}" ({type(expect_value)}) と一致しませんでした' diff --git a/ecs/jskult-batch-monthly/.dockerignore b/ecs/jskult-batch-monthly/.dockerignore new file mode 100644 index 00000000..8b9da402 --- /dev/null +++ b/ecs/jskult-batch-monthly/.dockerignore @@ -0,0 +1,12 @@ +tests/* +.coverage +.env +.env.example +.report/* +.vscode/* +.pytest_cache/* +*/__pychache__/* +Dockerfile +pytest.ini +README.md +*.sql diff --git a/ecs/jskult-batch-monthly/.env.example b/ecs/jskult-batch-monthly/.env.example new file mode 100644 index 00000000..d1f67281 --- /dev/null +++ b/ecs/jskult-batch-monthly/.env.example @@ -0,0 +1,19 @@ +DB_HOST=************ +DB_PORT=3306 +DB_USERNAME=************ +DB_PASSWORD=************ +DB_SCHEMA=src05 + +ARISJ_DATA_BUCKET=mbj-newdwh2021-staging-jskult-arisj +JSKULT_BACKUP_BUCKET=mbj-newdwh2021-staging-backup-jskult +JSKULT_CONFIG_BUCKET=mbj-newdwh2021-staging-config + +LOG_LEVEL=INFO +ARISJ_DATA_FOLDER=DATA +ARISJ_BACKUP_FOLDER=arisj +JSKULT_CONFIG_CALENDAR_FOLDER=jskult/calendar +JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME=jskult_arisj_output_day_list.txt +DB_CONNECTION_MAX_RETRY_ATTEMPT=************ +DB_CONNECTION_RETRY_INTERVAL_INIT=************ +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS=************ +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS=************ diff --git a/ecs/jskult-batch-monthly/.gitignore b/ecs/jskult-batch-monthly/.gitignore new file mode 100644 index 00000000..bd0b37f8 --- /dev/null +++ b/ecs/jskult-batch-monthly/.gitignore @@ -0,0 +1,10 @@ +.vscode/settings.json +.env + +# python +__pycache__ + +# python test +.pytest_cache +.coverage +.report/ \ No newline at end of file diff --git a/ecs/jskult-batch-monthly/.vscode/launch.json b/ecs/jskult-batch-monthly/.vscode/launch.json new file mode 100644 index 00000000..e0267567 --- /dev/null +++ b/ecs/jskult-batch-monthly/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // IntelliSense を使用して利用可能な属性を学べます。 + // 既存の属性の説明をホバーして表示します。 + // 詳細情報は次を確認してください: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(DEBUG)jskult batch monthly", + "type": "python", + "request": "launch", + "program": "entrypoint.py", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/ecs/jskult-batch-monthly/.vscode/recommended_settings.json b/ecs/jskult-batch-monthly/.vscode/recommended_settings.json new file mode 100644 index 00000000..b5e79d73 --- /dev/null +++ b/ecs/jskult-batch-monthly/.vscode/recommended_settings.json @@ -0,0 +1,31 @@ +{ + "[python]": { + "editor.defaultFormatter": null, + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + }, + // 自身の環境に合わせて変えてください + "python.defaultInterpreterPath": "", + "python.linting.lintOnSave": true, + "python.linting.enabled": true, + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": [ + "--max-line-length=200", + "--ignore=F541" + ], + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Path": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length", "200", + "--ignore=F541" + ], + "python.testing.pytestArgs": [ + "tests/batch/ultmarc" + ], + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/ecs/jskult-batch-monthly/Dockerfile b/ecs/jskult-batch-monthly/Dockerfile new file mode 100644 index 00000000..dd891d48 --- /dev/null +++ b/ecs/jskult-batch-monthly/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.9 + +ENV TZ="Asia/Tokyo" + +WORKDIR /usr/src/app +COPY Pipfile Pipfile.lock ./ +RUN \ + apt update -y && \ + # パッケージのセキュリティアップデートのみを適用するコマンド + apt install -y unattended-upgrades && \ + unattended-upgrades && \ + pip install --upgrade pip wheel setuptools && \ + pip install pipenv --no-cache-dir && \ + pipenv install --system --deploy && \ + pip uninstall -y pipenv virtualenv-clone virtualenv + +COPY src ./src +COPY entrypoint.py entrypoint.py + +CMD ["python", "entrypoint.py"] diff --git a/ecs/jskult-batch-monthly/Pipfile b/ecs/jskult-batch-monthly/Pipfile new file mode 100644 index 00000000..fe0fdc38 --- /dev/null +++ b/ecs/jskult-batch-monthly/Pipfile @@ -0,0 +1,20 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +boto3 = "*" +sqlalchemy = "*" +tenacity = "*" +pymysql = "*" + +[dev-packages] +autopep8 = "*" +flake8 = "*" + +[requires] +python_version = "3.9" + +[pipenv] +allow_prereleases = true diff --git a/ecs/jskult-batch-monthly/Pipfile.lock b/ecs/jskult-batch-monthly/Pipfile.lock new file mode 100644 index 00000000..10d05022 --- /dev/null +++ b/ecs/jskult-batch-monthly/Pipfile.lock @@ -0,0 +1,262 @@ +{ + "_meta": { + "hash": { + "sha256": "a2be870e254760b62220c10400b05fa66d24b2cc1bcd6f21044735e320a62e53" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:908f9c277325d68963dfcfce963a05336f0eb19505fc239c0ab9d01f4cba0296", + "sha256:e1e535e9fb23977252f13652ed2fa9b4f2d59a53b04a5f2fad3ee415b6a3b2b0" + ], + "index": "pypi", + "version": "==1.27.0" + }, + "botocore": { + "hashes": [ + "sha256:b9cb5b78a289f0615a48d85066f01869029aa41b95993f2c0c55003df037c23f", + "sha256:cac1333f41ec98e6f75bbba3f2c74b9e76aa3847469ecea6e7773a0af0049bee" + ], + "markers": "python_version >= '3.7'", + "version": "==1.30.0" + }, + "greenlet": { + "hashes": [ + "sha256:0a9dfcadc1d79696e90ccb1275c30ad4ec5fd3d1ab3ae6671286fac78ef33435", + "sha256:0f313771cb8ee0a04dfdf586b7d4076180d80c94be09049daeea018089b5b957", + "sha256:17503397bf6cbb5e364217143b6150c540020c51a3f6b08f9a20cd67c25e2ca8", + "sha256:180ec55cb127bc745669eddc9793ffab6e0cf7311e67e1592f183d6ca00d88c1", + "sha256:1b3f3568478bc21b85968e8038c4f98f4bf0039a692791bc324b5e0d1522f4b1", + "sha256:1bd4ea36f0aeb14ca335e0c9594a5aaefa1ac4e2db7d86ba38f0be96166b3102", + "sha256:21ebcb570e0d8501457d6a2695a44c5af3b6c2143dc6644ec73574beba067c90", + "sha256:24071eee113d75fedebaeb86264d94f04b5a24e311c5ba3e8003c07d00112a7e", + "sha256:270432cfdd6a50016b8259b3bbf398a3f7c06a06f2c68c7b93e49f53bc193bcf", + "sha256:271ed380389d2f7e4c1545b6e0837986e62504ab561edbaff05da9c9f3f98f96", + "sha256:2840187a94e258445e62ff1545e34f0b1a14aef4d0078e5c88246688d2b6515e", + "sha256:2cda110faee67613fed221f90467003f477088ef1cc84c8fc88537785a5b4de9", + "sha256:2e160a65cc6023a237be870f2072513747d512a1d018efa083acce0b673cccc0", + "sha256:2fcf7af83516db35af3d0ed5d182dea8585eddd891977adff1b74212f4bfd2fd", + "sha256:36cebce1f30964d5672fd956860e7e7b69772da69658d5743cb676b442eeff36", + "sha256:42bfe67824a9b53e73f568f982f0d1d4c7ac0f587d2e702a23f8a7b505d7b7c2", + "sha256:450a7e52a515402fd110ba807f1a7d464424bfa703be4effbcb97e1dfbfcc621", + "sha256:463d63ca5d8c236788284a9a44b9715372a64d5318a6b5eee36815df1ea0ba3d", + "sha256:4d0c0ffd732466ff324ced144fad55ed5deca36f6036c1d8f04cec69b084c9d6", + "sha256:4ff2a765f4861fc018827eab4df1992f7508d06c62de5d2fe8a6ac2233d4f1d0", + "sha256:53abf19b7dc62795c67b8d0a3d8ef866db166b21017632fff2624cf8fbf3481c", + "sha256:5552d7be37d878e9b6359bbffa0512d857bb9703616a4c0656b49c10739d5971", + "sha256:585810056a8adacd3152945ebfcd25deb58335d41f16ae4e0f3d768918957f9a", + "sha256:5942b1d6ba447cff1ec23a21ec525dde2288f00464950bc647f4e0f03bd537d1", + "sha256:5c355c99be5bb23e85d899b059a4f22fdf8a0741c57e7029425ee63eb436f689", + "sha256:5f61df4fe07864561f49b45c8bd4d2c42e3f03d2872ed05c844902a58b875028", + "sha256:665942d3a954c3e4c976581715f57fb3b86f4cf6bae3ac30b133f8ff777ac6c7", + "sha256:68368e908f14887fb202a81960bfbe3a02d97e6d3fa62b821556463084ffb131", + "sha256:6aac94ff957b5dea0216af71ab59c602e1b947b394e4f5e878a5a65643090038", + "sha256:889934aa8d72b6bfc46babd1dc4b817a56c97ec0f4a10ae7551fb60ab1f96fae", + "sha256:a00550757fca1b9cbc479f8eb1cf3514dbc0103b3f76eae46341c26ddcca67a9", + "sha256:a4a2d6ed0515c05afd5cc435361ced0baabd9ba4536ddfe8ad9a95bcb702c8ce", + "sha256:a8dd92fd76a61af2abc8ccad0c6c6069b3c4ebd4727ecc9a7c33aae37651c8c7", + "sha256:ab81f9ff3e3c2ca65e824454214c10985a846cd9bee5f4d04e15cd875d9fe13b", + "sha256:ac10196b8cde7a082e4e371ff171407270d3337c8d57ed43030094eb01d9c95c", + "sha256:b767930af686551dc96a5eb70af3736709d547ffa275c11a5e820bfb3ae61d8d", + "sha256:b9a1f4d256b81f59ba87bb7a29b9b38b1c018e052dba60a543cb0ddb5062d159", + "sha256:ba94c08321b5d345100fc64eb1ab235f42faf9aabba805cface55ebe677f1c2c", + "sha256:bab71f73001cd15723c4e2ca398f2f48e0a3f584c619eefddb1525e8986e06eb", + "sha256:bce5cf2b0f0b29680396c5c98ab39a011bd70f2dfa8b8a6811a69ee6d920cf9f", + "sha256:c02e514c72e745e49a3ae7e672a1018ba9b68460c21e0361054e956e5d595bc6", + "sha256:c3fb459ced6c5e3b2a895f23f1400f93e9b24d85c30fbe2d637d4f7706a1116b", + "sha256:cd31ab223e43ac64fd23f8f5dad249addadac2a459f040546200acbf7e84e353", + "sha256:ce70aa089ec589b5d5fab388af9f8c9f9dfe8fe4ad844820a92eb240d8628ddf", + "sha256:d47b2e1ad1429da9aa459ef189fbcd8a74ec28a16bc4c3f5f3cf3f88e36535eb", + "sha256:d61bad421c1f496f9fb6114dbd7c30a1dac0e9ff90e9be06f4472cbd8f7a1704", + "sha256:d7ba2e5cb119eddbc10874b41047ad99525e39e397f7aef500e6da0d6f46ab91", + "sha256:dde0ab052c7a1deee8d13d72c37f2afecee30ebdf6eb139790157eaddf04dd61", + "sha256:df34b52aa50a38d7a79f3abc9fda7e400791447aa0400ed895f275f6d8b0bb1f", + "sha256:e0fc20e6e6b298861035a5fc5dcf9fbaa0546318e8bda81112591861a7dcc28f", + "sha256:e20d5e8dc76b73db9280464d6e81bea05e51a99f4d4dd29c5f78dc79f294a5d3", + "sha256:e31d1a33dc9006b278f72cb0aacfe397606c2693aa2fdc0c2f2dcddbad9e0b53", + "sha256:e3a99f890f2cc5535e1b3a90049c6ca9ff9da9ec251cc130c8d269997f9d32ee", + "sha256:e7b192c3df761d0fdd17c2d42d41c28460f124f5922e8bd524018f1d35610682", + "sha256:ed0f4fad4c3656e34d20323a789b6a2d210a6bb82647d9c86dded372f55c58a1", + "sha256:f34ec09702be907727fd479046193725441aaaf7ed4636ca042734f469bb7451", + "sha256:f3530c0ec1fc98c43d5b7061781a8c55bd0db44f789f8152e19d9526cbed6021", + "sha256:f5672082576d0e9f52fa0fa732ff57254d65faeb4a471bc339fe54b58b3e79d2", + "sha256:ffb9f8969789771e95d3c982a36be81f0adfaa7302a1d56e29f168ca15e284b8" + ], + "markers": "platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))", + "version": "==3.0.0a1" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "pymysql": { + "hashes": [ + "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96", + "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7" + ], + "index": "pypi", + "version": "==1.1.0" + }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.2" + }, + "s3transfer": { + "hashes": [ + "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346", + "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9" + ], + "markers": "python_version >= '3.7'", + "version": "==0.6.1" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:04383f1e3452f6739084184e427e9d5cb4e68ddc765d52157bf5ef30d5eca14f", + "sha256:125f9f7e62ddf8b590c069729080ffe18b68a20d9882eb0947f72e06274601d7", + "sha256:1822620c89779b85f7c23d535c8e04b79c517739ae07aaed48c81e591ed5498e", + "sha256:21583808d37f126a647652c90332ac1d3a102edf3c94bcc3319edcc0ea2300cc", + "sha256:218fb20c01e95004f50a3062bf4c447dcb360cab8274232f31947e254f118298", + "sha256:2269b1f9b8be47e52b70936069a25a3771eff53367aa5cc59bb94f28a6412e13", + "sha256:234678ed6576531b8e4be255b980f20368bf07241a2e67b84e6b0fe679edb9c4", + "sha256:28da17059ecde53e2d10ba813d38db942b9f6344360b2958b25872d5cb729d35", + "sha256:2c6ff5767d954f6091113fedcaaf49cdec2197ae4c5301fe83d5ae4393c82f33", + "sha256:36a87e26fe8fa8c466fae461a8fcb780d0a1cbf8206900759fc6fe874475a3ce", + "sha256:394ac3adf3676fad76d4b8fcecddf747627f17f0738dc94bac15f303d05b03d4", + "sha256:40a3dc52b2b16f08b5c16b9ee7646329e4b3411e9280e5e8d57b19eaa51cbef4", + "sha256:48111d56afea5699bab72c38ec95561796b81befff9e13d1dd5ce251ab25f51d", + "sha256:48b40dc2895841ea89d89df9eb3ac69e2950a659db20a369acf4259f68e6dc1f", + "sha256:513411d73503a6fc5804f01fae3b3d44f267c1b3a06cfeac02e9286a7330e857", + "sha256:51736cfb607cf4e8fafb693906f9bc4e5ee55be0b096d44bd7f20cd8489b8571", + "sha256:5f40e3a7d0a464f1c8593f2991e5520b2f5b26da24e88000bbd4423f86103d4f", + "sha256:6150560fcffc6aee5ec9a97419ac768c7a9f56baf7a7eb59cb4b1b6a4d463ad9", + "sha256:724355973297bbe547f3eb98b46ade65a67a3d5a6303f17ab59a2dc6fb938943", + "sha256:74ddcafb6488f382854a7da851c404c394be3729bb3d91b02ad86c5458140eff", + "sha256:7830e01b02d440c27f2a5be68296e74ccb55e6a5b5962ffafd360b98930b2e5e", + "sha256:7f31d4e7ca1dd8ca5a27fd5eaa0f9e2732fe769ff7dd35bf7bba179597e4df07", + "sha256:8741d3d401383e54b2aada37cbd10f55c5d444b360eae3a82f74a2be568a7710", + "sha256:910d45bf3673f0e4ef13858674bd23cfdafdc8368b45b948bf511797dbbb401d", + "sha256:aa995b21f853864996e4056d9fde479bcecf8b7bff4beb3555eebbbba815f35d", + "sha256:af7e2ba75bf84b64adb331918188dda634689a2abb151bc1a583e488363fd2f8", + "sha256:b0eaf82cc844f6b46defe15ad243ea00d1e39ed3859df61130c263dc7204da6e", + "sha256:b114a16bc03dfe20b625062e456affd7b9938286e05a3f904a025b9aacc29dd4", + "sha256:b47be4c6281a86670ea5cfbbbe6c3a65366a8742f5bc8b986f790533c60b5ddb", + "sha256:ba03518e64d86f000dc24ab3d3a1aa876bcbaa8aa15662ac2df5e81537fa3394", + "sha256:cc9c2630c423ac4973492821b2969f5fe99d9736f3025da670095668fbfcd4d5", + "sha256:cf07ff9920cb3ca9d73525dfd4f36ddf9e1a83734ea8b4f724edfd9a2c6e82d9", + "sha256:cf175d26f6787cce30fe6c04303ca0aeeb0ad40eeb22e3391f24b32ec432a1e1", + "sha256:d0aeb3afaa19f187a70fa592fbe3c20a056b57662691fd3abf60f016aa5c1848", + "sha256:e186e9e95fb5d993b075c33fe4f38a22105f7ce11cecb5c17b5618181e356702", + "sha256:e2d5c3596254cf1a96474b98e7ce20041c74c008b0f101c1cb4f8261cb77c6d3", + "sha256:e3189432db2f5753b4fde1aa90a61c69976f4e7e31d1cf4611bfe3514ed07478", + "sha256:e3a6b2788f193756076061626679c5c5a6d600ddf8324f986bc72004c3e9d92e", + "sha256:ead58cae2a089eee1b0569060999cb5f2b2462109498a0937cc230a7556945a1", + "sha256:f2f389f77c68dc22cb51f026619291c4a38aeb4b7ecb5f998fd145b2d81ca513", + "sha256:f593170fc09c5abb1205a738290b39532f7380094dc151805009a07ae0e85330" + ], + "index": "pypi", + "version": "==2.0.17" + }, + "tenacity": { + "hashes": [ + "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0", + "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0" + ], + "index": "pypi", + "version": "==8.2.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36", + "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2" + ], + "markers": "python_version >= '3.7'", + "version": "==4.7.1" + }, + "urllib3": { + "hashes": [ + "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f", + "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.16" + } + }, + "develop": { + "autopep8": { + "hashes": [ + "sha256:86e9303b5e5c8160872b2f5ef611161b2893e9bfe8ccc7e2f76385947d57a2f1", + "sha256:f9849cdd62108cb739dbcdbfb7fdcc9a30d1b63c4cc3e1c1f893b5360941b61c" + ], + "index": "pypi", + "version": "==2.0.2" + }, + "flake8": { + "hashes": [ + "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7", + "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181" + ], + "index": "pypi", + "version": "==6.0.0" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053", + "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610" + ], + "markers": "python_version >= '3.6'", + "version": "==2.10.0" + }, + "pyflakes": { + "hashes": [ + "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf", + "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd" + ], + "markers": "python_version >= '3.6'", + "version": "==3.0.1" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + } + } +} diff --git a/ecs/jskult-batch-monthly/README.md b/ecs/jskult-batch-monthly/README.md new file mode 100644 index 00000000..f6e7c2da --- /dev/null +++ b/ecs/jskult-batch-monthly/README.md @@ -0,0 +1,48 @@ +# 実消化&アルトマーク 月次バッチ + +## 概要 + +実消化&アルトマークの月次バッチ処理。 + +## 環境情報 + +- Python 3.9 +- MySQL 8.23 +- VSCode + +## 環境構築 + +- Python の構築 + + - Merck_NewDWH 開発 2021 の Wiki、[Python 環境構築](https://nds-tyo.backlog.com/alias/wiki/1874930)を参照 + - 「Pipenv の導入」までを行っておくこと + - 構築完了後、プロジェクト配下で以下のコマンドを実行し、Python の仮想環境を作成する + - `pipenv install --dev --python ` + - この手順で出力される仮想環境のパスは、後述する VSCode の設定手順で使用するため、控えておく + +- MySQL の環境構築 + - Windows の場合、以下のリンクからダウンロードする + - + - Docker を利用する場合、「newsdwh-tools」リポジトリの MySQL 設定を使用すると便利 + - 「crm-table-to-ddl」フォルダ内で以下のコマンドを実行すると + - `docker-compose up -d` + - Docker の構築手順は、[Docker のセットアップ手順](https://nds-tyo.backlog.com/alias/wiki/1754332)を参照のこと + - データを投入する + - 立ち上げたデータベースに「src05」スキーマを作成する + - [ローカル開発用データ](https://ndstokyo.sharepoint.com/:f:/r/sites/merck-new-dwh-team/Shared%20Documents/03.NewDWH%E6%A7%8B%E7%AF%89%E3%83%95%E3%82%A7%E3%83%BC%E3%82%BA3/02.%E9%96%8B%E7%99%BA/90.%E9%96%8B%E7%99%BA%E5%85%B1%E6%9C%89/%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E9%96%8B%E7%99%BA%E7%94%A8%E3%83%87%E3%83%BC%E3%82%BF?csf=1&web=1&e=VVcRUs)をダウンロードし、mysql コマンドを使用して復元する + - `mysql -h <ホスト名> -P <ポート> -u <ユーザー名> -p src05 < src05_dump.sql` +- 環境変数の設定 + - 「.env.example」ファイルをコピーし、「.env」ファイルを作成する + - 環境変数を設定する。設定内容は PRJ メンバーより共有を受けてください +- VSCode の設定 + - 「.vscode/recommended_settings.json」ファイルをコピーし、「settings.json」ファイルを作成する + - 「python.defaultInterpreterPath」を、Python の構築手順で作成した仮想環境のパスに変更する + +## 実行 + +- VSCode 上で「F5」キーを押下すると、バッチ処理が起動する。 +- 「entrypoint.py」が、バッチ処理のエントリーポイント。 +- 実際の処理は、「src/jobctrl_monthly.py」で行っている。 + + +## フォルダ構成(工事中) diff --git a/ecs/jskult-batch-monthly/entrypoint.py b/ecs/jskult-batch-monthly/entrypoint.py new file mode 100644 index 00000000..4ab8d3b3 --- /dev/null +++ b/ecs/jskult-batch-monthly/entrypoint.py @@ -0,0 +1,10 @@ +"""実消化&アルトマーク 月次バッチのエントリーポイント""" +from src import jobctrl_monthly + +if __name__ == '__main__': + try: + exit(jobctrl_monthly.exec()) + except Exception: + # エラーが起きても、正常系のコードで返す。 + # エラーが起きた事実はbatch_process内でログを出す。 + exit(0) diff --git a/ecs/jskult-batch-monthly/src/__init__.py b/ecs/jskult-batch-monthly/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/aws/__init__.py b/ecs/jskult-batch-monthly/src/aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/aws/s3.py b/ecs/jskult-batch-monthly/src/aws/s3.py new file mode 100644 index 00000000..17ed0fe9 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/aws/s3.py @@ -0,0 +1,95 @@ +import os.path as path +import tempfile + +import boto3 + +from src.system_var import environment + + +class S3Client: + __s3_client = boto3.client('s3') + _bucket_name: str + + def list_objects(self, bucket_name: str, folder_name: str): + response = self.__s3_client.list_objects_v2(Bucket=bucket_name, Prefix=folder_name) + if response['KeyCount'] == 0: + return [] + contents = response['Contents'] + # 末尾がスラッシュで終わるものはフォルダとみなしてスキップする + objects = [{'filename': content['Key'], 'size': content['Size']} for content in contents if not content['Key'].endswith('/')] + return objects + + def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None: + copy_source = {'Bucket': src_bucket, 'Key': src_key} + self.__s3_client.copy(copy_source, dest_bucket, dest_key) + return + + def download_file(self, bucket_name: str, file_key: str, file): + self.__s3_client.download_fileobj( + Bucket=bucket_name, + Key=file_key, + Fileobj=file + ) + return + + def upload_file(self, local_file_path: str, bucket_name: str, file_key: str): + self.__s3_client.upload_file( + local_file_path, + Bucket=bucket_name, + Key=file_key + ) + + def delete_file(self, bucket_name: str, file_key: str): + self.__s3_client.delete_object( + Bucket=bucket_name, + Key=file_key + ) + + +class S3Bucket(): + _s3_client = S3Client() + _bucket_name: str = None + + +class ConfigBucket(S3Bucket): + _bucket_name = environment.JSKULT_CONFIG_BUCKET + + def download_arisj_output_day_list(self): + # 一時ファイルとして保存する + temporary_dir = tempfile.mkdtemp() + temporary_file_path = path.join(temporary_dir, environment.JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME) + arisj_output_day_list_key = f'{environment.JSKULT_CONFIG_CALENDAR_FOLDER}/{environment.JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME}' + with open(temporary_file_path, mode='wb') as f: + self._s3_client.download_file(self._bucket_name, arisj_output_day_list_key, f) + f.seek(0) + return temporary_file_path + + +class ArisjBucket(S3Bucket): + _bucket_name = environment.ARISJ_DATA_BUCKET + _folder = environment.ARISJ_BACKUP_FOLDER + + def upload_arisj_csv_file(self, arisj_create_csv: str, csv_file_path: str): + # s3にCSVファイルをUPする + Bucket = environment.ARISJ_DATA_BUCKET + folder = environment.ARISJ_DATA_FOLDER + csv_file_name = f'{folder}/{arisj_create_csv}' + s3_client = S3Client() + s3_client.upload_file(csv_file_path, Bucket, csv_file_name) + return + + def backup_arisj_csv_file(self, dat_file_key: str, datetime_key: str): + # バックアップバケットにコピー + arisj_backup_bucket = ArisjBackupBucket() + folder = environment.ARISJ_DATA_FOLDER + dat_file_key = f'{folder}/{dat_file_key}' + backup_key = f'{arisj_backup_bucket._folder}/{datetime_key}/{dat_file_key.replace(f"{self._folder}/", "")}' + self._s3_client.copy(self._bucket_name, dat_file_key, arisj_backup_bucket._bucket_name, backup_key) + + +class JskUltBackupBucket(S3Bucket): + _bucket_name = environment.JSKULT_BACKUP_BUCKET + + +class ArisjBackupBucket(JskUltBackupBucket): + _folder = environment.ARISJ_BACKUP_FOLDER diff --git a/ecs/jskult-batch-monthly/src/batch/batch_functions.py b/ecs/jskult-batch-monthly/src/batch/batch_functions.py new file mode 100644 index 00000000..40cf84f2 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/batch_functions.py @@ -0,0 +1,51 @@ +"""バッチ処理の共通関数""" +import logging +import textwrap +from datetime import datetime + +from src.db.database import Database +from src.error.exceptions import BatchOperationException, DBException + + +def get_batch_statuses() -> tuple[str, str]: + """日付テーブルから、以下を取得して返す。 + - バッチ処理中フラグ + - 処理日(YYYY/MM/DD) + + Raises: + BatchOperationException: 日付テーブルが取得できないとき、何らかのエラーが発生したとき + + Returns: + tuple[str, str]: [0]バッチ処理中フラグ,[1]処理日 + """ + db = Database.get_instance() + sql = 'SELECT bch_actf, src05.get_syor_date() AS syor_date FROM src05.hdke_tbl' + try: + db.connect() + hdke_tbl_result = db.execute_select(sql) + except DBException as e: + raise BatchOperationException(e) + finally: + db.disconnect() + + if len(hdke_tbl_result) == 0: + raise BatchOperationException('日付テーブルが取得できませんでした') + + # 必ず1件取れる + hdke_tbl_record = hdke_tbl_result[0] + batch_processing_flag = hdke_tbl_record['bch_actf'] + syor_date = hdke_tbl_record['syor_date'] + # 処理日を文字列に変換する + syor_date_str = datetime.strftime(syor_date, '%Y/%m/%d') + + return batch_processing_flag, syor_date_str + + +def logging_sql(logger: logging.Logger, sql: str) -> None: + """SQL文をデバッグログで出力する + + Args: + logger (logging.Logger): ロガー + sql (str): SQL文 + """ + logger.debug(f'\n{"-" * 15}\n{textwrap.dedent(sql)[1:-1]}\n{"-" * 15}') diff --git a/ecs/jskult-batch-monthly/src/batch/common/__init__.py b/ecs/jskult-batch-monthly/src/batch/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/batch/common/batch_context.py b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py new file mode 100644 index 00000000..8c8c12fb --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/common/batch_context.py @@ -0,0 +1,29 @@ +class BatchContext: + __instance = None + __syor_date: str # 処理日(yyyy/mm/dd形式) + __is_arisj_output_day: bool # 月次バッチ起動日フラグ + + def __init__(self) -> None: + self.__is_arisj_output_day = False + + @classmethod + def get_instance(cls): + if cls.__instance is None: + cls.__instance = cls() + return cls.__instance + + @property + def syor_date(self): + return self.__syor_date + + @syor_date.setter + def syor_date(self, syor_date_str: str): + self.__syor_date = syor_date_str + + @property + def is_arisj_output_day(self): + return self.__is_arisj_output_day + + @is_arisj_output_day.setter + def is_arisj_output_day(self, flag: bool): + self.__is_arisj_output_day = flag diff --git a/ecs/jskult-batch-monthly/src/batch/common/calendar_file.py b/ecs/jskult-batch-monthly/src/batch/common/calendar_file.py new file mode 100644 index 00000000..b456f03c --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/common/calendar_file.py @@ -0,0 +1,32 @@ +from src.system_var import constants + + +class CalendarFile: + """カレンダーファイル""" + + __calendar_file_lines: list[str] + + def __init__(self, calendar_file_path): + with open(calendar_file_path) as f: + self.__calendar_file_lines: list[str] = f.readlines() + + def compare_date(self, date_str: str) -> bool: + """与えられた日付がカレンダーファイル内に含まれているかどうか + カレンダーファイル内の日付はyyyy/mm/ddで書かれている前提 + コメント(#)が含まれている行は無視される + + Args: + date_str (str): yyyy/mm/dd文字列 + + Returns: + bool: 含まれていればTrue + """ + for calendar_date in self.__calendar_file_lines: + # コメント行が含まれている場合はスキップ + if constants.CALENDAR_COMMENT_SYMBOL in calendar_date: + continue + + if date_str in calendar_date: + return True + + return False diff --git a/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py new file mode 100644 index 00000000..d3f81597 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/batch/output_arisj_file_process.py @@ -0,0 +1,305 @@ + +from datetime import datetime + +from src.db.database import Database +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.aws.s3 import ArisjBucket +from src.batch.common.batch_context import BatchContext +import tempfile +import os.path as path +import csv + +logger = get_logger('ARIS-J連携データ出力') + +sql_err_msg = "SQL実行エラーです。" + + +def exec(): + """ 実消化&アルトマーク月次バッチ """ + create_date = datetime.now().strftime('%Y%m%d%H%M%S') + arisj_csv_file_name = f'D0004_ARIS_M_DCF_{create_date}.csv' + + try: + logger.info('バッチ処理を開始しました。') + + try: + db = Database.get_instance() + # DB接続 + db.connect() + except Exception as e: + logger.info('DB接続エラーです') + raise e + + # トランザクションの開始 + db.begin() + + # 正常系データの反映 + # 前回保管した施設IFワークを削除する + delete_previous_wk_inst_aris_if_record(db) + + # 正常系データを取得しWKテーブルに保存する。 + insert_normal_record_into_wk_inst_aris_if(db) + + # 正常系データの件数を取得 + suc_count = count_wk_inst_aris_if_record(db) + + # 警告系データの反映 + # 前回保管した施設IF警告ワークを削除する + delete_previous_wk_inst_aris_if_wrn_record(db) + + # 異常系データを取得しWKテーブルに保存する。 + insert_abnormal_record_into_wk_inst_aris_if_wrn(db) + + # 異常系データの件数を取得 + wrn_count = count_wk_inst_aris_if_wrn_record(db) + + # CSVファイルの作成用のSQL実行 + record_csv = csv_data_select(db) + + # CSVファイル作成 + csv_file_path = make_csv_data(record_csv, arisj_csv_file_name) + + # トランザクションの終了 + db.commit() + + # ログに処理件数を出力 + sum_count = suc_count + wrn_count + logger.info(f'(対象件数:{sum_count}/正常件数:{suc_count}/警告件数:{wrn_count})') + + arisj_bucket = ArisjBucket() + # CSVファイル移動処理 + try: + arisj_bucket.upload_arisj_csv_file(arisj_csv_file_name, csv_file_path) + except Exception as e: + logger.info('S3バケットArisjへのCSVデータ、移動できませんでした。') + raise e + + # 処理後ファイルをバックアップ + try: + batch_context = BatchContext.get_instance() + arisj_bucket.backup_arisj_csv_file(arisj_csv_file_name, batch_context.syor_date) + except Exception as e: + logger.info('S3バケットArisjバックアップへCSVデータ、コピーできませんでした。') + raise e + + logger.info('バッチ処理を終了しました。') + + except Exception as e: + raise BatchOperationException(e) + + finally: + # 終了時に必ずコミットする + db.commit() + db.disconnect() + + +def delete_previous_wk_inst_aris_if_record(db): + # 前回保管した施設IFワークを削除する + try: + # WKテーブルの過去分削除SQL + sql = """\ + DELETE FROM src05.wk_inst_aris_if + """ + db.execute(sql) + return + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def insert_normal_record_into_wk_inst_aris_if(db): + # 正常系データを取得しWKテーブルに保存する。 + try: + # 正常系データを取得しWKテーブルに保存SQL + sql = """\ + INSERT src05.wk_inst_aris_if + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' FROM ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' FROM SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' FROM cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' FROM cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' FROM ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' FROM ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%Y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%Y%m%d') AS delete_date + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND ci.dcf_dsf_inst_cd IS NOT NULL + AND ci.form_inst_name_kanji IS NOT NULL + AND ci.prefc_cd IS NOT NULL + AND cp.prefc_name IS NOT NULL + AND cc.city_name IS NOT NULL + AND ci.inst_addr IS NOT NULL + ORDER BY ci.dcf_dsf_inst_cd + """ + + db.execute(sql) + return + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def count_wk_inst_aris_if_record(db): + # 正常系データの件数を取得 + try: + # 正常系データの件数を取得SQL + sql = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if + """ + record_count = db.execute_select(sql) + return record_count[0]['countNum'] + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def delete_previous_wk_inst_aris_if_wrn_record(db): + # 前回保管した施設IF警告ワークを削除する + try: + # 異常系WKテーブルの過去分削除SQL + sql = """\ + DELETE FROM src05.wk_inst_aris_if_wrn + """ + + db.execute(sql) + return + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def insert_abnormal_record_into_wk_inst_aris_if_wrn(db): + # 異常系データを取得しWKテーブルに保存する。 + try: + # 異常系データを取得しWKテーブルに保存SQL + sql = """\ + INSERT src05.wk_inst_aris_if_wrn + SELECT + TRIM(' ' FROM TRIM(' ' FROM SUBSTRING(ci.dcf_dsf_inst_cd,3))) AS dcf_inst_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kanji,1,50))) AS inst_name_form + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.inst_name_kanji,1,10))) AS inst_name + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(ci.form_inst_name_kana,1,80))) AS inst_name_kana_form + ,TRIM(' ' FROM TRIM(' ' from ci.prefc_cd)) AS pref_cd + ,TRIM(' ' FROM TRIM(' ' from SUBSTR(cp.prefc_name,1,8))) AS pref_name + ,TRIM(' ' FROM TRIM(' ' from ci.postal_number)) AS postal_cd + ,TRIM(' ' FROM TRIM(' ' from cc.city_name)) AS city_name + ,TRIM(' ' FROM TRIM(' ' from ci.inst_addr)) AS address + ,TRIM(' ' FROM TRIM(' ' from cd.inst_div_name)) + ,TRIM(' ' FROM TRIM(' ' from ci.inst_phone_number)) AS phone_no + ,TRIM(' ' FROM TRIM(' ' from ci.inst_div_cd)) + ,TRIM(' ' FROM TRIM(' ' from ci.manage_cd)) + ,DATE_FORMAT(ci.sys_update_date,'%Y%m%d') AS update_date + ,DATE_FORMAT(ci.abolish_ymd,'%Y%m%d') AS delete_date + ,IF(ci.dcf_dsf_inst_cd IS NULL,'bi0402000001', NULL) AS wrnid_dcf_inst_cd + ,IF(ci.form_inst_name_kanji IS NULL,'bi0402000002', NULL) AS wrnid_inst_name_form + ,IF(ci.prefc_cd IS NULL,'bi0402000003', NULL) AS wrnid_pref_cd + ,IF(cp.prefc_name IS NULL,'bi0402000004', NULL) AS wrnid_pref_name + ,IF(cc.city_name IS NULL,'bi0402000005', NULL) AS wrnid_city_name + ,IF(ci.inst_addr IS NULL,'bi0402000006', NULL) AS wrnid_address + ,sysdate() + FROM src05.com_inst ci + LEFT JOIN src05.mst_prefc cp + ON ci.prefc_cd = cp.prefc_cd + LEFT JOIN src05.mst_city cc + ON ci.prefc_cd = cc.prefc_cd + AND ci.city_cd = cc.city_cd + LEFT OUTER JOIN src05.com_inst_div cd + ON ci.inst_div_cd = cd.inst_div_cd + WHERE ci.dcf_dsf_inst_cd NOT LIKE '%9999999%' + AND( ci.dcf_dsf_inst_cd IS NULL + OR ci.form_inst_name_kanji IS NULL + OR ci.prefc_cd IS NULL + OR cp.prefc_name IS NULL + OR cc.city_name IS NULL + OR ci.inst_addr IS NULL) + ORDER BY ci.dcf_dsf_inst_cd + """ + + db.execute(sql) + return + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def count_wk_inst_aris_if_wrn_record(db): + # 異常系データの件数を取得 + try: + # 異常系データの件数を取得SQL + sql = """\ + SELECT COUNT(*) AS countNum FROM src05.wk_inst_aris_if_wrn + """ + + record_count = db.execute_select(sql) + + return record_count[0]['countNum'] + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def csv_data_select(db): + # CSVファイルの作成用のSQL実行 + try: + # CSVファイルの作成用のSQL + sql = """\ + SELECT dcf_inst_cd, inst_name_form, inst_name, inst_name_kana_form, pref_cd, pref_name, + postal_cd, city_name, address, inst_div_name, phone_no, inst_div_cd, manage_cd, + '', inst_delete_date + FROM src05.wk_inst_aris_if ORDER BY dcf_inst_cd + """ + + return db.execute_select(sql) + except Exception as e: + logger.debug(f'{sql_err_msg}') + raise e + + +def make_csv_data(record_csv: list, arisj_csv_file_name: str): + # 一時ファイルとして保存する(CSVファイル) + try: + temporary_dir = tempfile.mkdtemp() + csv_file_path = path.join(temporary_dir, arisj_csv_file_name) + + head_str = ['TC_HOSPITAL', 'TJ_HOSPITAL', 'TJ_HOSPITALSHORT', 'TK_HOSPITAL', + 'TC_PREFECTURE', 'TJ_PREFECTURE', 'TJ_ZIPCODE', 'TJ_CITY', 'TJ_ADDRESS', 'TJ_DEPARTMENT', + 'TJ_TELEPHONENUMBER', 'TC_HOSPITALCAT', 'TC_HOSPITALTYPE', 'TS_UPDATE', 'TD_UPDATE'] + + with open(csv_file_path, mode='w', encoding='cp932') as csv_file: + # ヘッダ行書き込み(くくり文字をつけない為にwriterowではなく、writeを使用しています) + csv_file.write(f"{','.join(head_str)}\n") + + # Shift-JIS、CRLF、価囲いありで書き込む + writer = csv.writer(csv_file, delimiter=',', lineterminator='\n', + quotechar='"', doublequote=True, quoting=csv.QUOTE_ALL, + strict=True + ) + # データ部分書き込み + for record_data in record_csv: + record_value = list(record_data.values()) + csv_data = ['' if n is None else n for n in record_value] + writer.writerow(csv_data) + + except Exception as e: + logger.info('ワークデータの作成に失敗しました。') + logger.info('バッチ処理を異常終了しました。') + raise e + + return csv_file_path diff --git a/ecs/jskult-batch-monthly/src/db/__init__.py b/ecs/jskult-batch-monthly/src/db/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/db/database.py b/ecs/jskult-batch-monthly/src/db/database.py new file mode 100644 index 00000000..f67a21b9 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/db/database.py @@ -0,0 +1,178 @@ +from sqlalchemy import (Connection, CursorResult, Engine, QueuePool, + create_engine, text) +from sqlalchemy.engine.url import URL +from tenacity import retry, stop_after_attempt, wait_exponential + +from src.error.exceptions import DBException +from src.logging.get_logger import get_logger +from src.system_var import environment + +logger = get_logger(__name__) + + +class Database: + """データベース操作クラス""" + __connection: Connection = None + __engine: Engine = None + __host: str = None + __port: str = None + __username: str = None + __password: str = None + __schema: str = None + __connection_string: str = None + + def __init__(self, username: str, password: str, host: str, port: int, schema: str) -> None: + """このクラスの新たなインスタンスを初期化します + + Args: + username (str): DBユーザー名 + password (str): DBパスワード + host (str): DBホスト名 + port (int): DBポート + schema (str): DBスキーマ名 + """ + self.__username = username + self.__password = password + self.__host = host + self.__port = int(port) + self.__schema = schema + + self.__connection_string = URL.create( + drivername='mysql+pymysql', + username=self.__username, + password=self.__password, + host=self.__host, + port=self.__port, + database=self.__schema, + query={"charset": "utf8mb4"} + ) + + self.__engine = create_engine( + self.__connection_string, + pool_timeout=5, + poolclass=QueuePool + ) + + @classmethod + def get_instance(cls): + """インスタンスを取得します + + Returns: + Database: DB操作クラスインスタンス + """ + return cls( + username=environment.DB_USERNAME, + password=environment.DB_PASSWORD, + host=environment.DB_HOST, + port=environment.DB_PORT, + schema=environment.DB_SCHEMA + ) + + @retry( + wait=wait_exponential( + multiplier=environment.DB_CONNECTION_RETRY_INTERVAL_INIT, + min=environment.DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS, + max=environment.DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS + ), + stop=stop_after_attempt(environment.DB_CONNECTION_MAX_RETRY_ATTEMPT)) + def connect(self): + """ + DBに接続します。接続に失敗した場合、リトライします。 + Raises: + DBException: 接続失敗 + """ + try: + self.__connection = self.__engine.connect() + except Exception as e: + raise DBException(e) + + def execute_select(self, select_query: str, parameters=None) -> list[dict]: + """SELECTクエリを実行します。 + + Args: + select_query (str): SELECT文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + list[dict]: カラム名: 値の辞書リスト + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(select_query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(select_query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + result_rows = result.mappings().all() + return result_rows + + def execute(self, query: str, parameters=None) -> CursorResult: + """SQLクエリを実行します。 + + Args: + query (str): SQL文 + parameters (dict, optional): クエリのプレースホルダーに埋め込む変数の辞書. Defaults to None. + + Raises: + DBException: DBエラー + + Returns: + CursorResult: 取得結果 + """ + if self.__connection is None: + raise DBException('DBに接続していません') + + result = None + try: + # トランザクションが開始している場合は、トランザクションを引き継ぐ + if self.__connection.in_transaction(): + result = self.__connection.execute(text(query), parameters) + else: + # トランザクションが明示的に開始していない場合は、クエリ単位でトランザクションをbegin-commitする。 + result = self.__execute_with_transaction(query, parameters) + except Exception as e: + raise DBException(f'SQL Error: {e}') + + return result + + def begin(self): + """トランザクションを開始します。""" + if not self.__connection.in_transaction(): + self.__connection.begin() + + def commit(self): + """トランザクションをコミットします""" + if self.__connection.in_transaction(): + self.__connection.commit() + + def rollback(self): + """トランザクションをロールバックします""" + if self.__connection.in_transaction(): + self.__connection.rollback() + + def disconnect(self): + """DB接続を切断します。""" + if self.__connection is not None: + self.__connection.close() + self.__connection = None + + def __execute_with_transaction(self, query: str, parameters: dict): + # トランザクションを開始してクエリを実行する + with self.__connection.begin(): + try: + result = self.__connection.execute(text(query), parameters=parameters) + except Exception as e: + self.__connection.rollback() + raise e + # ここでコミットされる + return result diff --git a/ecs/jskult-batch-monthly/src/error/__init__.py b/ecs/jskult-batch-monthly/src/error/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/error/exceptions.py b/ecs/jskult-batch-monthly/src/error/exceptions.py new file mode 100644 index 00000000..055c24f6 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/error/exceptions.py @@ -0,0 +1,10 @@ +class MeDaCaException(Exception): + pass + + +class DBException(MeDaCaException): + pass + + +class BatchOperationException(MeDaCaException): + pass diff --git a/ecs/jskult-batch-monthly/src/jobctrl_monthly.py b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py new file mode 100644 index 00000000..0534808d --- /dev/null +++ b/ecs/jskult-batch-monthly/src/jobctrl_monthly.py @@ -0,0 +1,72 @@ +"""実消化&アルトマーク 月次バッチ処理""" + +from src.aws.s3 import ConfigBucket +from src.aws.s3 import ArisjBackupBucket +from src.batch.batch_functions import get_batch_statuses +from src.batch.common.batch_context import BatchContext +from src.batch.common.calendar_file import CalendarFile +from src.error.exceptions import BatchOperationException +from src.logging.get_logger import get_logger +from src.system_var import constants +from src.batch import output_arisj_file_process + +logger = get_logger('月次処理コントロール(ARIS-J)') + +# バッチ共通設定を取得 +batch_context = BatchContext.get_instance() +arisj_bucket = ArisjBackupBucket() + + +def exec(): + try: + logger.info('月次バッチ:開始') + try: + logger.info('処理日取得') + # 月次バッチ処置中フラグ、処理日を取得 + batch_processing_flag, syor_date = get_batch_statuses() + except BatchOperationException as e: + logger.exception(f'日次ジョブ取得エラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + # 日次バッチ処理中の場合、後続の処理は行わない + logger.info('日次ジョブ処理中判定') + if batch_processing_flag == constants.BATCH_ACTF_BATCH_IN_PROCESSING: + logger.error('日次ジョブ処理中エラー(異常終了)') + return constants.BATCH_EXIT_CODE_SUCCESS + + # バッチ共通設定に処理日を追加 + batch_context.syor_date = syor_date + + logger.info(f'処理日取得={syor_date}') + + # 稼働日かかどうかを、実消化&アルトマーク月次バッチ稼働日ファイルをダウンロードして判定 + try: + arisj_output_day_list_file_path = ConfigBucket().download_arisj_output_day_list() + arisj_output_day_calendar = CalendarFile(arisj_output_day_list_file_path) + batch_context.is_arisj_output_day = arisj_output_day_calendar.compare_date(syor_date) + except Exception as e: + logger.exception(f'処理日取得エラー(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + # 調査目的で実消化&アルトマーク月次バッチ稼働日かどうかをログ出力 + if not batch_context.is_arisj_output_day: + logger.info('ARIS-J連携データ出力日でない為、処理終了') + return constants.BATCH_EXIT_CODE_SUCCESS + + logger.info('ARIS-J連携データ出力日です') + + try: + logger.info('ARIS-J連携データ出力:起動') + output_arisj_file_process.exec() + logger.info('ARIS-J連携データ出力:終了') + except BatchOperationException as e: + logger.exception(f'ARIS-J連携データ出力(異常終了){e}') + return constants.BATCH_EXIT_CODE_SUCCESS + + # 正常終了を保守ユーザーに通知 + logger.info('[NOTICE]月次バッチ:終了(正常終了)') + return constants.BATCH_EXIT_CODE_SUCCESS + + except Exception as e: + logger.exception(f'月次バッチ処理中に想定外のエラーが発生しました {e}') + raise e diff --git a/ecs/jskult-batch-monthly/src/logging/get_logger.py b/ecs/jskult-batch-monthly/src/logging/get_logger.py new file mode 100644 index 00000000..f36f1199 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/logging/get_logger.py @@ -0,0 +1,37 @@ +import logging + +from src.system_var.environment import LOG_LEVEL + +# boto3関連モジュールのログレベルを事前に個別指定し、モジュール内のDEBUGログの表示を抑止する +for name in ["boto3", "botocore", "s3transfer", "urllib3"]: + logging.getLogger(name).setLevel(logging.WARNING) + + +def get_logger(log_name: str) -> logging.Logger: + """一意のログ出力モジュールを取得します。 + + Args: + log_name (str): ロガー名 + + Returns: + _type_: _description_ + """ + logger = logging.getLogger(log_name) + level = logging.getLevelName(LOG_LEVEL) + if not isinstance(level, int): + level = logging.INFO + logger.setLevel(level) + + if not logger.hasHandlers(): + handler = logging.StreamHandler() + logger.addHandler(handler) + + formatter = logging.Formatter( + '%(name)s\t[%(levelname)s]\t%(asctime)s\t%(message)s', + '%Y-%m-%d %H:%M:%S' + ) + + for handler in logger.handlers: + handler.setFormatter(formatter) + + return logger diff --git a/ecs/jskult-batch-monthly/src/system_var/__init__.py b/ecs/jskult-batch-monthly/src/system_var/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ecs/jskult-batch-monthly/src/system_var/constants.py b/ecs/jskult-batch-monthly/src/system_var/constants.py new file mode 100644 index 00000000..aaa8a3c0 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/system_var/constants.py @@ -0,0 +1,8 @@ +# バッチ正常終了コード +BATCH_EXIT_CODE_SUCCESS = 0 + +# バッチ処理中フラグ:処理中 +BATCH_ACTF_BATCH_IN_PROCESSING = '1' + +# カレンダーファイルのコメントシンボル +CALENDAR_COMMENT_SYMBOL = '#' diff --git a/ecs/jskult-batch-monthly/src/system_var/environment.py b/ecs/jskult-batch-monthly/src/system_var/environment.py new file mode 100644 index 00000000..08d6cd16 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/system_var/environment.py @@ -0,0 +1,24 @@ +import os + +# Database +DB_HOST = os.environ['DB_HOST'] +DB_PORT = int(os.environ['DB_PORT']) +DB_USERNAME = os.environ['DB_USERNAME'] +DB_PASSWORD = os.environ['DB_PASSWORD'] +DB_SCHEMA = os.environ['DB_SCHEMA'] + +# AWS +ARISJ_DATA_BUCKET = os.environ['ARISJ_DATA_BUCKET'] +JSKULT_BACKUP_BUCKET = os.environ['JSKULT_BACKUP_BUCKET'] +JSKULT_CONFIG_BUCKET = os.environ['JSKULT_CONFIG_BUCKET'] +ARISJ_DATA_FOLDER = os.environ['ARISJ_DATA_FOLDER'] +ARISJ_BACKUP_FOLDER = os.environ['ARISJ_BACKUP_FOLDER'] +JSKULT_CONFIG_CALENDAR_FOLDER = os.environ['JSKULT_CONFIG_CALENDAR_FOLDER'] +JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME = os.environ['JSKULT_CONFIG_CALENDAR_ARISJ_OUTPUT_DAY_LIST_FILE_NAME'] + +# 初期値がある環境変数 +LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO') +DB_CONNECTION_MAX_RETRY_ATTEMPT = int(os.environ.get('DB_CONNECTION_MAX_RETRY_ATTEMPT', 4)) +DB_CONNECTION_RETRY_INTERVAL_INIT = int(os.environ.get('DB_CONNECTION_RETRY_INTERVAL', 5)) +DB_CONNECTION_RETRY_INTERVAL_MIN_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MIN_SECONDS', 5)) +DB_CONNECTION_RETRY_INTERVAL_MAX_SECONDS = int(os.environ.get('DB_CONNECTION_RETRY_MAX_SECONDS', 50)) diff --git a/ecs/jskult-batch-monthly/src/time/elapsed_time.py b/ecs/jskult-batch-monthly/src/time/elapsed_time.py new file mode 100644 index 00000000..c1432e91 --- /dev/null +++ b/ecs/jskult-batch-monthly/src/time/elapsed_time.py @@ -0,0 +1,22 @@ +import time + + +class ElapsedTime: + """処理実行時間計測クラス""" + def __init__(self) -> None: + """このクラスの新たなインスタンスを初期化します。""" + self.__start = time.perf_counter() + + @property + def of(self): + """インスタンス化してからの経過時間をhh:mm:ssの形式にフォーマットして返す + Returns: + str: 時分秒形式の経過時間 + """ + elapsed_time = time.perf_counter() - self.__start + h, rem = divmod(elapsed_time, 3600) + m, s = divmod(rem, 60) + h_str = f'{h:02.0f} hour ' if h > 0.0 else '' + m_str = f'{m:02.0f} min ' if m > 0.0 else '' + s_str = f'{s:06.02f} sec' if s > 0.0 else '' + return f"{h_str}{m_str}{s_str}" diff --git a/ecs/jskult-webapp/src/controller/login.py b/ecs/jskult-webapp/src/controller/login.py index 09032af5..c8a5663c 100644 --- a/ecs/jskult-webapp/src/controller/login.py +++ b/ecs/jskult-webapp/src/controller/login.py @@ -69,16 +69,20 @@ def login( try: jwt_token = login_service.login(request.username, request.password) except NotAuthorizeException as e: - logger.exception(e) + logger.info(f'ログイン失敗:{e}') raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) except JWTTokenVerifyException as e: - logger.exception(e) - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + logger.info(f'ログイン失敗:{e}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) verified_token = jwt_token.verify_token() # 普通の認証だと、`cognito:username`に入る。 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') @@ -126,12 +130,17 @@ def sso_authorize( # トークン検証 verified_token = jwt_token.verify_token() except JWTTokenVerifyException as e: - logger.exception(e) - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + logger.info(f'SSOログイン失敗:{e}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) # トークンからユーザーIDを取得 user_id = verified_token.user_id user_record = login_service.logged_in_user(user_id) + + # ユーザーがマスタに存在しない場合、ログアウトにリダイレクトする + if user_record is None: + logger.info(f'存在しないユーザー: {user_id}, ユーザーID: {user_id}') + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=constants.LOGOUT_REASON_LOGIN_ERROR) # ユーザーが有効ではない場合、ログアウトにリダイレクトする if not user_record.is_enable_user(): logger.info(f'無効なユーザー: {user_id}, 有効フラグ: {user_record.enabled_flg}') diff --git a/ecs/jskult-webapp/src/controller/logout.py b/ecs/jskult-webapp/src/controller/logout.py index 79de281c..c841e48c 100644 --- a/ecs/jskult-webapp/src/controller/logout.py +++ b/ecs/jskult-webapp/src/controller/logout.py @@ -22,11 +22,16 @@ def logout_view( reason: Optional[str] = None, session: Union[UserSession, None] = Depends(verify_session) ): + # どういうルートでログインしたかを判断するため、refererを取得 + referer = request.headers.get('referer', '') + redirect_to = '/login/userlogin' link_text = 'MeDaCA機能メニューへ' - if session is not None and session.user_flg == '1': + # セッションが切れておらず、メンテユーザである、またはメンテログイン画面から遷移した場合、メンテログイン画面に戻す + if (session is not None and session.user_flg == '1') or referer.endswith('maintlogin'): redirect_to = '/login/maintlogin' link_text = 'Login画面に戻る' + logout = LogoutViewModel() logout.redirect_to = redirect_to logout.reason = constants.LOGOUT_REASON_MESSAGE_MAP.get(reason, '') diff --git a/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py b/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py index 788d9748..12ced11c 100644 --- a/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py +++ b/ecs/jskult-webapp/src/model/db/ultmarc_trt_course.py @@ -5,5 +5,5 @@ from src.util.sanitize import sanitize @sanitize -class UltmarcTrtCourseDBModel(BaseDBModel): +class UltmarcDrTrtCourseDBModel(BaseDBModel): trt_course_name: Optional[str] diff --git a/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py b/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py index 5de18e11..f517e4a3 100644 --- a/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py +++ b/ecs/jskult-webapp/src/model/view/ultmarc_doctor_info_view_model.py @@ -10,7 +10,7 @@ from src.model.db.ultmarc_doctor_wrkplace_his import \ from src.model.db.ultmarc_sosiety import UltmarcSosietyDBModel from src.model.db.ultmarc_specialist_license import \ UltmarcSpecialistLicenseDBModel -from src.model.db.ultmarc_trt_course import UltmarcTrtCourseDBModel +from src.model.db.ultmarc_trt_course import UltmarcDrTrtCourseDBModel from src.system_var import environment @@ -18,7 +18,7 @@ class UltmarcDoctorInfoViewModel(BaseModel): subtitle: str = '医師情報' is_batch_processing: Optional[bool] doctor_info_data: Optional[UltmarcDoctorInfoDBModel] - trt_coursed_data: Optional[list[UltmarcTrtCourseDBModel]] + trt_coursed_data: Optional[list[UltmarcDrTrtCourseDBModel]] sosiety_data: Optional[list[UltmarcSosietyDBModel]] specialist_license_data: Optional[list[UltmarcSpecialistLicenseDBModel]] doctor_wrkplace_data: Optional[list[UltmarcDoctorWrkplaceDBModel]] diff --git a/ecs/jskult-webapp/src/repositories/inst_master_repository.py b/ecs/jskult-webapp/src/repositories/inst_master_repository.py index d4b18505..4de6b732 100644 --- a/ecs/jskult-webapp/src/repositories/inst_master_repository.py +++ b/ecs/jskult-webapp/src/repositories/inst_master_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.inst_div_master import InstDivMasterModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_施設区分取得') + class InstDivMasterRepository(BaseRepository): @@ -21,9 +24,7 @@ class InstDivMasterRepository(BaseRepository): models = [InstDivMasterModel(**r) for r in result_data] return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] getOroshiData DB Error. ") - print(f"[ERROR] ErrorMessage: {e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/prefc_master_repository.py b/ecs/jskult-webapp/src/repositories/prefc_master_repository.py index c304f59f..e83e5d78 100644 --- a/ecs/jskult-webapp/src/repositories/prefc_master_repository.py +++ b/ecs/jskult-webapp/src/repositories/prefc_master_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.prefc_master import PrefcMasterModel from src.repositories.base_repository import BaseRepository +logger = get_logger('都道府県マスタ取得') + class PrefcMasterRepository(BaseRepository): @@ -23,9 +26,7 @@ class PrefcMasterRepository(BaseRepository): models = [PrefcMasterModel(**r) for r in result_data] return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] getOroshiData DB Error. ") - print(f"[ERROR] ErrorMessage: {e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py index 901078ab..03ee8271 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_doctor_repository.py @@ -1,11 +1,15 @@ +import mojimoji + from src.db import sql_condition as condition from src.db.sql_condition import SQLCondition +from src.logging.get_logger import get_logger from src.model.db.ultmarc_doctor import UltmarcDoctorDBModel from src.model.db.ultmarc_doctor_info import UltmarcDoctorInfoDBModel from src.model.request.ultmarc_doctor import UltmarcDoctorSearchModel from src.repositories.base_repository import BaseRepository from src.util.string_util import is_not_empty -import mojimoji + +logger = get_logger('COM_医師取得') class UltmarcDoctorRepository(BaseRepository): @@ -56,8 +60,7 @@ class UltmarcDoctorRepository(BaseRepository): return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() @@ -79,7 +82,7 @@ class UltmarcDoctorRepository(BaseRepository): if is_not_empty(parameter.dr_name_kana): # 必ず部分一致で検索 # ひらがなを全角カタカナへ変換 - zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) + zenkaku_katakana = ''.join([chr(n + 96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) for n in [ord(c) for c in parameter.dr_name_kana]]) # 全角カタカナを半角カタカナへ変換 hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana) @@ -101,7 +104,7 @@ class UltmarcDoctorRepository(BaseRepository): if is_not_empty(parameter.form_inst_name_kana): # 必ず部分一致で検索 # ひらがなを全角カタカナへ変換 - zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) + zenkaku_katakana = ''.join([chr(n + 96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) for n in [ord(c) for c in parameter.form_inst_name_kana]]) # 全角カタカナを半角カタカナへ変換 hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana) @@ -178,8 +181,7 @@ class UltmarcDoctorRepository(BaseRepository): return None return models[0] except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py index 1fab4d45..daa82533 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_his_repository.py @@ -1,7 +1,10 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_doctor_wrkplace_his import \ UltmarcDoctorWrkplaceHisDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_医師勤務先履歴取得') + class UltmarcDoctorWrkplaceHisRepository(BaseRepository): @@ -35,8 +38,7 @@ class UltmarcDoctorWrkplaceHisRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py index b05ce6fd..4798c467 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_dr_wrkplace_repository.py @@ -1,8 +1,11 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_doctor_wrkplace import UltmarcDoctorWrkplaceDBModel from src.model.db.ultmarc_doctor_wrkplace_count import \ UltmarcDoctorWrkplaceCountDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_医師勤務先取得') + class UltmarcDoctorWrkplaceRepository(BaseRepository): @@ -34,8 +37,7 @@ class UltmarcDoctorWrkplaceRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() @@ -56,8 +58,7 @@ class UltmarcDoctorWrkplaceRepository(BaseRepository): return 0 return models[0].count except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py index 2522006f..ef1cab76 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_inst_repository.py @@ -1,11 +1,15 @@ +import mojimoji + from src.db import sql_condition as condition from src.db.sql_condition import SQLCondition +from src.logging.get_logger import get_logger from src.model.db.ultmarc_inst import UltmarcInstDBModel from src.model.db.ultmarc_inst_info import UltmarcInstInfoDBModel from src.model.request.ultmarc_inst import UltmarcInstSearchModel from src.repositories.base_repository import BaseRepository from src.util.string_util import is_not_empty -import mojimoji + +logger = get_logger('COM_施設取得') class UltmarcInstRepository(BaseRepository): @@ -43,8 +47,7 @@ class UltmarcInstRepository(BaseRepository): return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() @@ -72,7 +75,7 @@ class UltmarcInstRepository(BaseRepository): if is_not_empty(parameter.form_inst_name_kana): # 部分一致検索 # ひらがなを全角カタカナへ変換 - zenkaku_katakana = ''.join([chr(n+96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) + zenkaku_katakana = ''.join([chr(n + 96) if (12352 < n and n < 12439) or n == 12445 or n == 12446 else chr(n) for n in [ord(c) for c in parameter.form_inst_name_kana]]) # 全角カタカナを半角カタカナへ変換 hankaku_katakana = mojimoji.zen_to_han(zenkaku_katakana) @@ -187,8 +190,7 @@ class UltmarcInstRepository(BaseRepository): return None return models[0] except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py index eaee391a..4b3c0385 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_inst_trt_course_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_inst_trt_course import UltmarcInstTrtCourseDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_施設診療科目取得') + class UltmarcInstTrtCourseRepository(BaseRepository): @@ -24,8 +27,7 @@ class UltmarcInstTrtCourseRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py index e3c9ac13..0cd55e8c 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_sosiety_repository.py @@ -1,6 +1,9 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_sosiety import UltmarcSosietyDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_学会取得') + class UltmarcSosietyRepository(BaseRepository): @@ -23,8 +26,7 @@ class UltmarcSosietyRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py index a4927b44..7024b616 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_specialist_license_repository.py @@ -1,7 +1,10 @@ +from src.logging.get_logger import get_logger from src.model.db.ultmarc_specialist_license import \ UltmarcSpecialistLicenseDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_専門医資格取得') + class UltmarcSpecialistLicenseRepository(BaseRepository): @@ -25,8 +28,7 @@ class UltmarcSpecialistLicenseRepository(BaseRepository): return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py b/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py index c76032a3..a88ffbbe 100644 --- a/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py +++ b/ecs/jskult-webapp/src/repositories/ultmarc_trt_course_repository.py @@ -1,31 +1,33 @@ -from src.model.db.ultmarc_trt_course import UltmarcTrtCourseDBModel +from src.logging.get_logger import get_logger +from src.model.db.ultmarc_trt_course import UltmarcDrTrtCourseDBModel from src.repositories.base_repository import BaseRepository +logger = get_logger('COM_医師診療科目取得') -class UltmarcTrtCourseRepository(BaseRepository): + +class UltmarcDrTrtCourseRepository(BaseRepository): FETCH_SQL = """\ SELECT trt_course_name FROM src05.com_dr LEFT JOIN src05.com_dr_trt_course ON com_dr.dcf_pcf_dr_cd = com_dr_trt_course.dcf_pcf_dr_cd - LEFT JOIN src05.com_trt_course ON com_dr_trt_course.trt_course_cd = com_trt_course.trt_course_cd + LEFT JOIN src05.com_trt_course ON com_dr_trt_course.trt_course_cd = com_trt_course.trt_course_cd WHERE com_dr.dcf_pcf_dr_cd = :id ORDER BY com_trt_course.trt_course_cd """ - def fetch_many(self, id) -> list[UltmarcTrtCourseDBModel]: + def fetch_many(self, id) -> list[UltmarcDrTrtCourseDBModel]: try: self._database.connect() query = self.FETCH_SQL result = self._database.execute_select(query, {'id': id}) - models = [UltmarcTrtCourseDBModel(**r) for r in result] + models = [UltmarcDrTrtCourseDBModel(**r) for r in result] if len(models) == 0: return None return models except Exception as e: - # TODO: ファイルへの書き出しはloggerでやる - print(f"[ERROR] DB Error : Exception={e.args}") + logger.exception(f"DB Error : Exception={e.args}") raise e finally: self._database.disconnect() diff --git a/ecs/jskult-webapp/src/router/session_router.py b/ecs/jskult-webapp/src/router/session_router.py index 90f3a5c9..324c777f 100644 --- a/ecs/jskult-webapp/src/router/session_router.py +++ b/ecs/jskult-webapp/src/router/session_router.py @@ -90,8 +90,7 @@ class BeforeCheckSessionRoute(MeDaCaRoute): verified_session = verify_session(checked_session) # セッションが有効でない場合、エラーにする if verified_session is None: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, - detail=constants.LOGOUT_REASON_SESSION_EXPIRED) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) scope = request.scope scope['session'] = verified_session session_request = Request(receive=request.receive, scope=scope) diff --git a/ecs/jskult-webapp/src/services/ultmarc_view_service.py b/ecs/jskult-webapp/src/services/ultmarc_view_service.py index 5c441421..24cf1d75 100644 --- a/ecs/jskult-webapp/src/services/ultmarc_view_service.py +++ b/ecs/jskult-webapp/src/services/ultmarc_view_service.py @@ -23,7 +23,7 @@ from src.repositories.ultmarc_sosiety_repository import \ from src.repositories.ultmarc_specialist_license_repository import \ UltmarcSpecialistLicenseRepository from src.repositories.ultmarc_trt_course_repository import \ - UltmarcTrtCourseRepository + UltmarcDrTrtCourseRepository from src.services.base_service import BaseService @@ -33,7 +33,7 @@ class UltmarcViewService(BaseService): 'prefc_repository': PrefcMasterRepository, 'inst_div_repository': InstDivMasterRepository, 'ultmarc_inst_repository': UltmarcInstRepository, - 'ultmarc_trt_course_repository': UltmarcTrtCourseRepository, + 'ultmarc_trt_course_repository': UltmarcDrTrtCourseRepository, 'ultmarc_inst_trt_course_repository': UltmarcInstTrtCourseRepository, 'ultmarc_sosiety_repository': UltmarcSosietyRepository, 'ultmarc_doctor_wrkplace_repository': UltmarcDoctorWrkplaceRepository, @@ -45,7 +45,7 @@ class UltmarcViewService(BaseService): prefc_repository: PrefcMasterRepository inst_div_repository: InstDivMasterRepository ultmarc_inst_repository: UltmarcInstRepository - ultmarc_trt_course_repository: UltmarcTrtCourseRepository + ultmarc_trt_course_repository: UltmarcDrTrtCourseRepository ultmarc_inst_trt_course_repository: UltmarcInstTrtCourseRepository ultmarc_sosiety_repository: UltmarcSosietyRepository ultmarc_doctor_wrkplace_repository: UltmarcDoctorWrkplaceRepository diff --git a/ecs/jskult-webapp/src/static/css/bioStyle.css b/ecs/jskult-webapp/src/static/css/bioStyle.css index 8bc72999..7ecde3c5 100644 --- a/ecs/jskult-webapp/src/static/css/bioStyle.css +++ b/ecs/jskult-webapp/src/static/css/bioStyle.css @@ -1,97 +1,103 @@ +/* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */ +/* @see https://bootstrap-guide.com/content/reboot#page-defaults */ +*, ::after, ::before { + box-sizing: initial; +} + body { - white-space: nowrap; - background-color: LightCyan; - font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + white-space: nowrap; + background-color: LightCyan; + font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } h1 { - font-size: 155%; - margin-left: 2%; - margin-top: 0%; - margin-bottom: 0%; + font-size: 155%; + margin-left: 2%; + margin-top: 0%; + margin-bottom: 0%; } .title { - width: 800px; + width: 800px; } table{ - border-collapse : collapse; + border-collapse : collapse; } .search_table { - margin-bottom: 30px; - padding-bottom: 15px; - border-bottom: solid 1px gray; - width: 1132px; + margin-bottom: 30px; + padding-bottom: 15px; + border-bottom: solid 1px gray; + width: 1132px; } ._form { - width: 1132px; - margin-left: 10px; - margin-right: 20px; + width: 1132px; + margin-left: 10px; + margin-right: 20px; } .back_bt { - padding-bottom: 10px; + padding-bottom: 10px; } ._form input[type=text] { - width: 193px; - height: 25px; + width: 193px; + height: 25px; } ._form input[type=checkbox] { - width: 13px; - height: 13px; + width: 13px; + height: 13px; } ._form select { - width: 193px; - height: 25px; + width: 193px; + height: 25px; } .result_info { - text-align: right; + text-align: right; } .search_tb { - padding-right: 25px; + padding-right: 25px; } .search_bt { - /* width: 60px; */ - margin-left: 10px; + /* width: 60px; */ + margin-left: 10px; } .clear_bt{ - margin-left: 120px; - /* width: 60px */ + margin-left: 120px; + /* width: 60px */ } .search_dropdown { - width: 175px; + width: 175px; } .bioScroll_div { - overflow: auto; - padding-top: 10px; - height: 250px; - width: 1132px; + overflow: auto; + margin-top: 1%; + height: 250px; + width: 1132px; } .noLine{ - text-decoration: none; + text-decoration: none; } .resultAreaMsg { - margin-top: 5%; - text-align: center; - font-size: 150%; + margin-top: 5%; + text-align: center; + font-size: 150%; } .search_btTd { - text-align: right; + text-align: right; } .selection { @@ -103,76 +109,70 @@ table{ } .search_middleTd { - padding-right: 25px; - width : 450px; -} - -.docSearchScroll_div { - overflow: auto; - height: 200px; - width: 1132px; + padding-right: 25px; + width : 450px; } .transition{ - text-align: right; - margin-right: 60px; + text-align: right; + margin-right: 60px; } .transition_bt{ - width: 110px; - height: 40px; - margin-left: 15px; - margin-right: 15px; + width: 110px; + height: 40px; + margin-left: 15px; + margin-right: 15px; } .instutionInfo_table{ - width: 1132px; - margin-bottom: 50px; + width: 1132px; + margin-bottom: 50px; } .institution_column { - width : 160px; - background : rgb(225, 233, 250); - border : solid 1px; + width : 160px; + background : rgb(225, 233, 250); + border : solid 1px; } .institution_data { - background : rgb(244, 244, 244); - border : solid 1px; - padding-left : 0.5em; - padding-right : 0.5em; + background : rgb(244, 244, 244); + border : solid 1px; + padding-left : 0.5em; + padding-right : 0.5em; } .data_width_long { - width : 500px; + width : 500px; } .data_width_middle { - width : 300px; + width : 300px; } .data_width_short { - width : 100px; + width : 100px; } .checkbox_margin { - margin-left : 20px; + margin-left : 20px; } .border_top_none { - border-top-style:none; + border-top-style:none; } .border_bottom_none { - border-bottom-style:none; + border-bottom-style:none; } .textbox_margin { - margin-left : 20px; + margin-left : 20px; } .textbox_margin_short { - margin-left : 5px; + margin-left : 5px; } .label_margin { @@ -181,111 +181,110 @@ table{ } .trt_course{ - width: 70px; + width: 70px; } .small_tb{ - width: 100px; + width: 100px; } .docBelongScroll_div { - overflow: auto; - height: 100px; - width: 500px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 100px; + width: 500px; + margin: 0px 30px 0px 30px; } .rightPadding_table{ - padding-right: 50px; + padding-right: 50px; } .verticalBar_td{ - width: 1px; - height: 150px; - background-color: gray; + width: 1px; + height: 150px; + background-color: gray; } .docPlaceScroll_div { - overflow: auto; - height: 150px; - width: 700px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 150px; + width: 700px; + margin: 0px 30px 0px 30px; } .result_tr{ - overflow-y: scroll; - overflow-x: scroll; - + overflow-y: scroll; + overflow-x: scroll; } .result_data{ - overflow-y: scroll; - overflow-x: scroll; - width: 50px; + overflow-y: scroll; + overflow-x: scroll; + width: 50px; } /* tablesoter */ table.tablesorter { - font-family:arial; - background-color: #CDCDCD; - font-size: 12pt; - text-align: left; + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; } table.tablesorter thead tr th, table.tablesorter tfoot tr th { - background-color: #e6EEEE; - border: 0.1px solid silver; - font-size: 12pt; - padding: 4px; - padding-right: 20px; + background-color: #e6EEEE; + border: 0.1px solid silver; + font-size: 12pt; + padding: 4px; + padding-right: 20px; } table.tablesorter thead tr .header { - background-image: url(bg.gif); - background-repeat: no-repeat; - background-position: center right; - cursor: pointer; + background-image: url(bg.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; } table.tablesorter tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; } table.tablesorter tbody td div{ - float: right; + float: right; } table.tablesorter tbody tr.odd td { - background-color:#F0F0F6; + background-color:#F0F0F6; } table.tablesorter thead tr .headerSortUp { - background-image: url(asc.gif); + background-image: url(asc.gif); } table.tablesorter thead tr .headerSortDown { - background-image: url(desc.gif); + background-image: url(desc.gif); } table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp { - background-color: #8dbdd8; + background-color: #8dbdd8; } #loading { - z-index: 10000; - position: fixed; - top: 0; - left: 0; - width: 100%; - height: 100%; - background-color: #FFF; - overflow-x: hidden; - overflow-y: auto; - outline: 0; - text-align: center; - display: none; - opacity: 0.7; + z-index: 10000; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: #FFF; + overflow-x: hidden; + overflow-y: auto; + outline: 0; + text-align: center; + display: none; + opacity: 0.7; } #loading_content { - position: absolute; - top: 50%; - left: 50%; + position: absolute; + top: 50%; + left: 50%; } \ No newline at end of file diff --git a/ecs/jskult-webapp/src/static/css/masterMainte.css b/ecs/jskult-webapp/src/static/css/masterMainte.css index ed02abb8..0b8b8a8e 100644 --- a/ecs/jskult-webapp/src/static/css/masterMainte.css +++ b/ecs/jskult-webapp/src/static/css/masterMainte.css @@ -1,26 +1,26 @@ body{ - background-color: LightCyan; - font-family : "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + background-color: LightCyan; + font-family : "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } h1{ - margin-left : 1%; + margin-left : 1%; } /*ヘッダー*/ .headerTable{ - width: 100%; + width: 100%; } .headerTdLeft{ - width: 80%; + width: 80%; } .headerTdRight{ - text-align: right; - padding-right: 2%; - width: 20%; + text-align: right; + padding-right: 2%; + width: 20%; } .csvup_buttonSize{ @@ -31,123 +31,123 @@ h1{ /*施設担当者データCSVダウンロード*/ /*////////////////////////*/ .searchColumnTd{ - width: 14%; + width: 14%; } .searchTextboxTd{ - width: 18%; + width: 18%; } .searchTable{ - margin-left: 3%; - margin-right: 3%; - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 94%; + margin-left: 3%; + margin-right: 3%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 94%; } .searchLabelTd{ - text-align: right; - width: 10%; + text-align: right; + width: 10%; } .searchInputTd{ - width: 19%; + width: 19%; } .searchTextbox{ - width: 90%; - margin-left: 2.5%; - margin-right: 2.5%; - margin-top: 0.8%; - margin-bottom: 0.8%; + width: 90%; + margin-left: 2.5%; + margin-right: 2.5%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .searchDateTextbox{ - width: 37%; - margin-left: 2.5%; - margin-right: 2.5%; - margin-top: 0.8%; - margin-bottom: 0.8%; + width: 37%; + margin-left: 2.5%; + margin-right: 2.5%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .searchButtonTd{ - text-align: right; - padding-top: 1%; + text-align: right; + padding-top: 1%; } .csvOutputMessage{ - margin-left: 3%; + margin-left: 3%; } .errorColor{ - color: red; + color: red; } /*//////////////////////////*/ /*施設担当者データExcelアップロード*/ /*//////////////////////////*/ .inputTable{ - margin-left: 3%; - margin-right: 3%; - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 94%; + margin-left: 3%; + margin-right: 3%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 94%; } .inputLabelTd{ - width: 10%; + width: 10%; } .inputTd{ - width:20%; + width:20%; } .inputButtonTd{ - width: 50%; - text-align: right; + width: 50%; + text-align: right; } .dataCntDisp{ - text-align: right; - margin-right: 3%; + text-align: right; + margin-right: 3%; } table.inputData { - font-family:arial; - background-color: #CDCDCD; - font-size: 12pt; - text-align: left; - white-space: nowrap; - border: 0.1px solid silver; - padding: 4px; - padding-right: 20px; - border-collapse: collapse; - margin-left: 3%; - width: 94%; + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; + white-space: nowrap; + border: 0.1px solid silver; + padding: 4px; + padding-right: 20px; + border-collapse: collapse; + margin-left: 3%; + width: 94%; } table.inputData tbody th { - color: #3D3D3D; - padding: 4px; - background-color: #e6EEEE; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #e6EEEE; + border: 0.1px solid silver; + vertical-align: top; } table.inputData tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; } .footerMsg{ - margin-left: 3%; + margin-left: 3%; } @@ -155,10 +155,10 @@ table.inputData tbody td { /*データ上書きコピー */ /*//////////////////////////*/ .tableOverRide{ - margin-right: 3%; - margin-left: 3%; - margin-bottom: 2%; - border-bottom: solid 1px gray; - width: 94%; + margin-right: 3%; + margin-left: 3%; + margin-bottom: 2%; + border-bottom: solid 1px gray; + width: 94%; } diff --git a/ecs/jskult-webapp/src/static/css/menuStyle.css b/ecs/jskult-webapp/src/static/css/menuStyle.css index 3a07d9fc..cabd5197 100644 --- a/ecs/jskult-webapp/src/static/css/menuStyle.css +++ b/ecs/jskult-webapp/src/static/css/menuStyle.css @@ -1,49 +1,49 @@ body{ - background-color: LightCyan; - background-size: 220%,220%; - font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + background-color: LightCyan; + background-size: 220%,220%; + font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } .background{ - margin-top: 5%; - padding: 2%; - background-color: white; - width: 40%; - border-radius: 25px; - box-shadow:5px 5px rgba(0,0,0,0.4);; + margin-top: 5%; + padding: 2%; + background-color: white; + width: 40%; + border-radius: 25px; + box-shadow:5px 5px rgba(0,0,0,0.4);; } .btn_width { - width: 80%; + width: 80%; } .form_login{ - width: 80%; - font-size: 180%; - margin: 1%; + width: 80%; + font-size: 180%; + margin: 1%; } .form_login::-webkit-input-placeholder{ - color: gray; + color: gray; } .form_login:-ms-input-placeholder{ - color: gray; + color: gray; } .form_login::-moz-placeholder{ - color: gray; + color: gray; } .logout_p{ - font-size: 160%; + font-size: 160%; } .notUseBioMsg,.notUseMainteMsg{ - font-size: 143%; - color: red; + font-size: 143%; + color: red; } .batchMsg{ - color: red; - font-size: 120%; - text-align: center; + color: red; + font-size: 120%; + text-align: center; } \ No newline at end of file diff --git a/ecs/jskult-webapp/src/static/css/pagenation.css b/ecs/jskult-webapp/src/static/css/pagenation.css index 3edbf3b2..72e4aecc 100644 --- a/ecs/jskult-webapp/src/static/css/pagenation.css +++ b/ecs/jskult-webapp/src/static/css/pagenation.css @@ -3,10 +3,6 @@ padding-top: 10px; } -/* .paginationjs { - width: 100%; -} */ - .paginationjs > .paginationjs-nav.J-paginationjs-nav{ position: absolute; right: 0; @@ -19,10 +15,12 @@ div.paginationjs-pages ul { } .paginationjs-pages > ul > li > a { - padding: 6px 18px; + padding: 6px 9px; color: white; - background-color: gainsboro; - border: 1px solid; + background-color: whitesmoke; + border: 1px solid #bbb; + border-radius: 3px; + box-shadow: 0 1px 2px rgba(0, 0, 0, 0.2) } .paginationjs-pages > ul > li > a:hover { color: black; @@ -31,27 +29,38 @@ div.paginationjs-pages ul { } .paginationjs-pages > ul > li.active > a { color: white; - background-color: gray; + background-color: #666666; + box-shadow: none; } .paginationjs-pages > ul > li.active > a:hover { color: white; - background-color: gray; + background-color: #666666; cursor: text; } .paginationjs-pages > ul > li.disabled > a { color: white; - background-color: gray; - cursor: text; + background-color: #666666; + box-shadow: none; } .paginationjs-pages > ul > li.disabled > a:hover { color: white; - background-color: gray; + background-color: #666666; cursor: text; } -.paginationjs-page { +.paginationjs-page,.paginationjs-prev,.paginationjs-next { + text-decoration: underline; margin: 0 4px; } + +.paginationjs-page:hover,.paginationjs-prev:hover,.paginationjs-next:hover { + text-decoration: none; +} + +.paginationjs-page.active,.paginationjs-prev.disabled,.paginationjs-next.disabled { + text-decoration: none; +} + .paginationjs-pages > ul { display: flex; align-items: baseline; diff --git a/ecs/jskult-webapp/src/static/css/ultStyle.css b/ecs/jskult-webapp/src/static/css/ultStyle.css index 8d018b01..f5eea0e0 100644 --- a/ecs/jskult-webapp/src/static/css/ultStyle.css +++ b/ecs/jskult-webapp/src/static/css/ultStyle.css @@ -1,500 +1,484 @@ +/* Bootstrap 5.10以降、box-sizingのデフォルト値によってテーブルがずれるため、このページ限定的にリセット */ +/* @see https://bootstrap-guide.com/content/reboot#page-defaults */ +*, ::after, ::before { + box-sizing: initial; +} + body { - background-color: LightCyan; - font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; + background-color: LightCyan; + font-family: "ヒラギノ角ゴ Pro W3", "Hiragino Kaku Gothic Pro", "メイリオ", Meiryo, Osaka, "MS Pゴシック", "MS PGothic", sans-serif; } h1 { - font-size: 150%; - margin-left: 2%; - margin-top: 0%; - margin-bottom: 0%; + font-size: 150%; + margin-left: 2%; + margin-top: 0%; + margin-bottom: 0%; } table{ - border-collapse : collapse; + border-collapse : collapse; } .header_bt{ - width: 8%; - margin-bottom: 0.8%; - margin-left: 78.5%; + width: 8%; + margin-bottom: 0.8%; + margin-left: 78.5%; } .search_textbox{ - width: 100%; + width: 100%; } .search_dropdown{ - width: 100%; + width: 100%; + height: 1.8em; } .search_longtextbox{ - width: 100% + width: 100% } .instSearchResult { - width: 100%; + width: 100%; } .scroll_table{ - overflow: auto; - white-space: nowrap; - margin-bottom: 2%; - /*スクロール時カラムが動く問題の修正 width: 100%;をコメントアウト*/ - width: 100%; - height: 250px; + overflow: auto; + white-space: nowrap; + margin-top: 1%; + margin-bottom: 1%; + width: 100%; + height: 250px; } .scroll_table::-webkit-scrollbar { - height: 5px; - width: 10px; + height: 5px; + width: 10px; } .scroll_table::-webkit-scrollbar-track { - border-radius: 5px; - background: #eee; + border-radius: 5px; + background: #eee; } .scroll_table::-webkit-scrollbar-thumb { - border-radius: 5px; - background: #666; + border-radius: 5px; + background: #666; } .ult_bt { - width: 20%; - height: 80%; + width: 20%; + height: 80%; } .info_bt{ - width: 10% + width: 10% } .search_bt{ - margin-left: 3%; - margin-top: 0.8%; - margin-bottom: 0.8%; + margin-left: 3%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .notFind{ - margin-top: 5%; - text-align: center; - font-size: 150%; + margin-top: 5%; + text-align: center; + font-size: 150%; } .search_table { - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 100%; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 100%; } .search_tb { - padding-right: 2%; - padding-top: 0.2%; - padding-bottom: 0.2%; + padding-right: 2%; + padding-top: 0.2%; + padding-bottom: 0.2%; } .leftSearch_tb{ - width: 35%; + width: 35%; } .batchMsg{ - color: red; - font-size: 120%; - text-align: center; + color: red; + font-size: 120%; + text-align: center; } ._form { - width: 95%; - margin-left: 3%; + width: 95%; + margin-left: 3%; } .result_info { - text-align: right; + text-align: right; } /*施設検索一覧ヘッダー*/ .instSearchHeaderTable{ - width: 100%; + width: 100%; } .instSearchHeaderTd{ - width: 24%; + width: 24%; } .instSearchHeaderTdCenter{ - text-align: center; - width: 50%; + text-align: center; + width: 50%; } .instSearchHeaderTdRight{ - text-align: right; - padding-right: 2%; + text-align: right; + padding-right: 2%; } .instSearchButchMsg{ - /* font-size: 80%; */ - color: red; + /* font-size: 80%; */ + color: red; } .instSearchHeader_bt{ - width: 40%; + width: 40%; } /*施設詳細*/ .instInfoTable{ - margin-top: 1%; - margin-left: 5%; - margin-right: 2%; - margin-bottom: 2%; - width: 93%; + margin-top: 1%; + margin-left: 5%; + margin-right: 2%; + margin-bottom: 2%; + width: 93%; } .instInfoTableHalf1{ - margin-top: 1%; - margin-left: 5%; - margin-right: 2%; - width: 93%; + margin-top: 1%; + margin-left: 5%; + margin-right: 2%; + width: 93%; } .instInfoTableHalf2{ - margin-top: -0.05%; - margin-left: 5%; - margin-right: 2%; - margin-bottom: 2%; - width: 93%; + margin-top: -0.05%; + margin-left: 5%; + margin-right: 2%; + margin-bottom: 2%; + width: 93%; } .instInfoColumn { - width : 9%; - height: 40px; - background : rgb(225, 233, 250); - border : solid 1px; + width : 9%; + height: 40px; + background : rgb(225, 233, 250); + border : solid 1px; } .instData { - background : rgb(244, 244, 244); - border : solid 1px; - padding-left : 0.5%; - padding-right : 0.5%; - padding-top: 0.25%; - padding-bottom: 0.25%; + background : rgb(244, 244, 244); + border : solid 1px; + padding-left : 0.5%; + padding-right : 0.5%; + padding-top: 0.25%; + padding-bottom: 0.25%; } .instDataMid{ - /*NO5修正前 width: 51%;*/ - width: 20%; + /*NO5修正前 width: 51%;*/ + width: 20%; } /*NO5にて追加 START*/ .instDataLarge{ - width: 85%; + width: 85%; } .instDataLeft{ - width: 20%; + width: 20%; } .instDataCenter{ - width: 7%; + width: 7%; } .instDataRight{ - width: 25%; + width: 25%; } /*NO5にて追加 END*/ .instDataSmallTextbox{ - width: 45%; + width: 45%; } /*NO5にて追加 START*/ .instDataCenterTextbox{ - width: 80%; + width: 80%; } /*NO5にて追加 END*/ .instInfoTextbox{ - width: 98%; - padding-right: 1%; - padding-left: 1%; + width: 98%; + padding-right: 1%; + padding-left: 1%; } .instCdTextbox{ - /*NO5修正前 width: 13%;*/ - width: 35%; - margin-left: 0.5%; - margin-right: 2%; + /*NO5修正前 width: 13%;*/ + width: 35%; + margin-left: 0.5%; + margin-right: 2%; } .delReasonCdTextbox{ - /*NO5修正前 width: 2%;*/ - width: 5%; - margin-left: 0.5%; - margin-right: 1%; + /*NO5修正前 width: 2%;*/ + width: 5%; + margin-left: 0.5%; + margin-right: 1%; } .delReasonTextbox{ - /*NO5修正前 width: 43%;*/ - width: 88%; - margin-left: 0.5%; - margin-right: 2%; + /*NO5修正前 width: 43%;*/ + width: 88%; + margin-left: 0.5%; + margin-right: 2%; } .manageTextbox{ - width: 40%; + width: 40%; } .textboxMargin { - margin-left : 0.1%; + margin-left : 0.1%; } .transitionBt{ - width: 98%; - height: 30px; + width: 98%; + height: 30px; } .instHeaderTable{ - margin-left: 40%; + margin-left: 40%; } .instHeaderTd{ - width: 10%; - font-size: 140%; - text-align: center; - padding-right: 2%; + width: 10%; + font-size: 140%; + text-align: center; + padding-right: 2%; } .trtCourseTextbox{ - width: 6%; + width: 6%; } .bedTd{ - width: 46%; + width: 46%; } .bedTextbox{ - width: 70%; + width: 70%; } .xSmallTd{ - width: 9%; + width: 9%; } .xSmallTextbox{ - width: 75%; + width: 75%; } .reExamTd{ - width: 13%; + width: 13%; } .repreTd{ - width: 50%; + width: 50%; } .repreTextbox{ - width: 95%; + width: 95%; } .trtTextbox{ - width: 5%; - margin-right: 1.2%; + width: 5%; + margin-right: 1.2%; } .parentCdTextBox{ - width: 15%; + width: 15%; } .parentNameTextBox{ - width: 75%; + width: 75%; } .hpInfoColumn{ - width : 12%; - height: 40px; - background : rgb(225, 233, 250); - border : solid 1px; + width : 12%; + height: 40px; + background : rgb(225, 233, 250); + border : solid 1px; } .hpAssrtTd{ - width: 12%; + width: 12%; } .hpAssrtTextbox{ - width: 85%; + width: 85%; } .border_bottom_none { - border-bottom-style:none; + border-bottom-style:none; } .numberBox{ - text-align: right; + text-align: right; } /*医師検索*/ /*ヘッダー*/ .docHeaderTable{ - width: 100%; + width: 100%; } .docHeaderTd{ - width: 24%; + width: 24%; } .docHeaderTdCenter{ - text-align: center; - width: 50%; + text-align: center; + width: 50%; } .docHeaderTdRight{ - text-align: right; - padding-right: 2%; + text-align: right; + padding-right: 2%; } .docButchMsg{ - /* font-size: 80%; */ - color: red; + /* font-size: 80%; */ + color: red; } .docHeader_bt{ - width: 40%; + width: 40%; } /* アルトマーク課題管理表No.2の修正 8% → 10% */ /* アルトマーク課題管理表No.8の修正 10% → 14% */ .docSearchColumnTd{ - width: 14%; + width: 14%; } .docSearchTextboxTd{ - width: 18%; + width: 18%; } .docSearchTextbox_td{ - width: 94%; + width: 94%; } .docSearchTextbox{ - width: 90%; - margin-right: 5%; - margin-top: 0.8%; - margin-bottom: 0.8%; + width: 90%; + margin-right: 5%; + margin-top: 0.8%; + margin-bottom: 0.8%; } .docSearchTableDivOne{ - width: 100%; + width: 100%; } .docSearchTableDivTwo{ - margin-bottom: 1%; - padding-bottom: 1%; - border-bottom: solid 1px gray; - width: 100%; -} - -.docSearchScroll{ - overflow: auto; - white-space: nowrap; - margin-bottom: 2%; - width: 100%; - height: 270px; -} - -.docSearchScroll::-webkit-scrollbar { - height: 5px; - width: 10px; -} - -.docSearchScroll::-webkit-scrollbar-track { - border-radius: 5px; - background: #eee; -} - -.docSearchScroll::-webkit-scrollbar-thumb { - border-radius: 5px; - background: #666; + margin-bottom: 1%; + padding-bottom: 1%; + border-bottom: solid 1px gray; + width: 100%; } .allOnOffButton{ - width: 6%; + width: 6%; } /*医師情報*/ .docInfoTable{ - margin-top: 1%; - margin-left: 5%; - margin-right: 2%; - margin-bottom: 1%; - width: 93%; - border-bottom: solid 1px gray; + margin-top: 1%; + margin-left: 5%; + margin-right: 2%; + margin-bottom: 1%; + width: 93%; + border-bottom: solid 1px gray; } .docInfoTd{ - padding-bottom: 0.5%; + padding-bottom: 0.5%; } .docInfoTextBox{ - margin-left: 0.5%; - margin-right: 2%; - width: 8%; + margin-left: 0.5%; + margin-right: 2%; + width: 8%; } .docInfoTrtTextBox{ - margin-left: 0.5%; + margin-left: 0.5%; } .docBelongTable{ - margin-left: 1%; - width: 98%; - border-bottom: solid 1px gray; + margin-left: 1%; + width: 98%; + border-bottom: solid 1px gray; } .docBelongTd{ - width: 49%; - height: 150px; + width: 49%; + height: 150px; } .docSocietyTable{ - width: 100%; + width: 100%; } .scroll{ - overflow: auto; - height: 120px; - width: 90%; - margin-left: 7%; - margin-bottom: 4%; + overflow: auto; + height: 120px; + width: 90%; + margin-left: 7%; + margin-bottom: 4%; } .scroll::-webkit-scrollbar { - height: 5px; - width: 10px; + height: 5px; + width: 10px; } .scroll::-webkit-scrollbar-track { - border-radius: 5px; - background: #eee; + border-radius: 5px; + background: #eee; } .scroll::-webkit-scrollbar-thumb { - border-radius: 5px; - background: #666; + border-radius: 5px; + background: #666; } .rightBoderLine{ - border-right: solid 1px gray; + border-right: solid 1px gray; } .wrkplaceH1{ - margin-top: 0.3%; + margin-top: 0.3%; } .wrkplaceTable{ - width: 100%; + width: 100%; } @@ -508,17 +492,17 @@ table{ .clear_bt{ - margin-left: 120px; - width: 60px + margin-left: 120px; + width: 60px } .back_bt{ - margin-left: 1042px; - width: 80px + margin-left: 1042px; + width: 80px } .noLine{ - text-decoration: none; + text-decoration: none; } @@ -527,13 +511,13 @@ table{ /*共通:アルトマーク施設検索,医師検索,施設詳細*/ .maxWidth_tb { - width: 100%; + width: 100%; } /*アルトマーク施設検索,医師検索共通*/ .search_btTd { - text-align: right; + text-align: right; } .selection { @@ -547,20 +531,14 @@ table{ /*医師検索*/ .search_middleTd { - padding-right: 25px; - width : 450px; -} - -.docSearchScroll_div { - overflow: auto; - height: 200px; - width: 1132px; + padding-right: 25px; + width : 450px; } /*共通:施設詳細、医師詳細*/ .transition{ - text-align: right; - margin-right: 60px; + text-align: right; + margin-right: 60px; } @@ -568,18 +546,18 @@ table{ .data_width_middle { - width : 300px; + width : 300px; } .border_top_none { - border-top-style:none; + border-top-style:none; } .textbox_margin_short { - margin-left : 5px; + margin-left : 5px; } .label_margin { @@ -590,82 +568,82 @@ table{ /*医師詳細*/ .docInfo_table{ - margin-bottom: 30px; - border-bottom: solid 1px gray; - width: 1132px; + margin-bottom: 30px; + border-bottom: solid 1px gray; + width: 1132px; } .small_tb{ - width: 100px; + width: 100px; } .docBelongScroll_div { - overflow: auto; - height: 100px; - width: 500px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 100px; + width: 500px; + margin: 0px 30px 0px 30px; } .rightPadding_table{ - padding-right: 50px; + padding-right: 50px; } .docPlaceScroll_div { - overflow: auto; - height: 150px; - width: 700px; - margin: 0px 30px 0px 30px; + overflow: auto; + height: 150px; + width: 700px; + margin: 0px 30px 0px 30px; } .result_tr{ - overflow-y: scroll; - overflow-x: scroll; + overflow-y: scroll; + overflow-x: scroll; } .result_data{ - overflow-y: scroll; - overflow-x: scroll; - width: 50px; + overflow-y: scroll; + overflow-x: scroll; + width: 50px; } /* tablesoter */ table.tablesorter { - font-family:arial; - background-color: #CDCDCD; - font-size: 8pt; - text-align: left; + font-family:arial; + background-color: #CDCDCD; + font-size: 12pt; + text-align: left; } table.tablesorter thead tr th, table.tablesorter tfoot tr th { - background-color: #e6EEEE; - border: 0.1px solid silver; - font-size: 8pt; - padding: 4px; - padding-right: 20px; + background-color: #e6EEEE; + border: 0.1px solid silver; + font-size: 12pt; + padding: 4px; + padding-right: 20px; } table.tablesorter thead tr .header { - background-image: url(bg.gif); - background-repeat: no-repeat; - background-position: center right; - cursor: pointer; + background-image: url(bg.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; } table.tablesorter tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - border: 0.1px solid silver; - vertical-align: top; + color: #3D3D3D; + padding: 4px; + background-color: #FFF; + border: 0.1px solid silver; + vertical-align: top; } table.tablesorter tbody tr.odd td { - background-color:#F0F0F6; + background-color:#F0F0F6; } table.tablesorter thead tr .headerSortUp { - background-image: url(asc.gif); + background-image: url(asc.gif); } table.tablesorter thead tr .headerSortDown { - background-image: url(desc.gif); + background-image: url(desc.gif); } table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp { - background-color: #8dbdd8; + background-color: #8dbdd8; } diff --git a/ecs/jskult-webapp/src/static/function/businessLogicScript.js b/ecs/jskult-webapp/src/static/function/businessLogicScript.js index bd39eafd..8e711fbe 100644 --- a/ecs/jskult-webapp/src/static/function/businessLogicScript.js +++ b/ecs/jskult-webapp/src/static/function/businessLogicScript.js @@ -96,7 +96,7 @@ function enableDatePicker() { // 日付入力チェック // 引数:チェックするテキストボックスNo function autoModifyDate($this){ - // 日付フォーマットチェック + // 日付フォーマットチェック if($this.value === "" || (!$this.value.match(/^\d{4}\/\d{2}\/\d{2}$/) && !$this.value.match(/^\d{4}\d{2}\d{2}$/))) @@ -110,46 +110,24 @@ function autoModifyDate($this){ // yyyyMMddの場合→yyyy/MM/dd const datePatternMatches = strFormat.match(/^(\d{4})(\d{2})(\d{2})$/); if (datePatternMatches){ - strFormat = `${datePatternMatches[1]}/${datePatternMatches[2]}/${datePatternMatches[3]}`; + strFormat = `${datePatternMatches[1]}/${datePatternMatches[2]}/${datePatternMatches[3]}`; } // yyyy/00/00~yyyy/00/00の場合→yyyy/01/01~yyyy/12/31 // yyyy/MM/00~yyyy/MM/01の場合→yyyy/MM/01~yyyy/MM/末日 - // 開始日の場合 - if ($this.name.includes('from')){ + // 開始日の場合 + if ($this.name.includes('from')){ strFormat = strFormat.replace("00/00", "01/01"); strFormat = strFormat.replace("00", "01"); } // 終了日の場合 - else if ($this.name.includes('to')){ + else if ($this.name.includes('to')){ strFormat = strFormat.replace("00/00", "12/31"); const date = new Date(strFormat.slice(0, 4), strFormat.slice(5, 7), 0).getDate(); strFormat = strFormat.replace("00", date.toString()); } - $this.value = strFormat; + $this.value = strFormat; } -// 他のページで共通化しよう -// ページが読み込まれたときにsendクラスのボタンを押せないようにする -// 初期値をdisabledにしときゃいい -$(function(){ - $(".send").prop('disabled',true); -}); - -// 検索結果のところのボタンをチェックが1個でも付いたら押せるようにして、チェックがなければ押せないようにする関数 -// 条件:チェックボックスのクラス名に"selectedページ数"というのがついていること -// 条件:ボタンにクラス名 send がついていること -function resultBtDisablead(){ - var selected = ".selected" + tableCurrentPage; - var cnt1 = $(selected + ' :checked').length; - selected += " input.checkbox"; - - if(cnt1 == 0) { - $(".send").prop('disabled',true); - } - else { - $(".send").prop('disabled',false); - } -} // 前のスペースを許さない入力チェック function checkSpaceForm($this) @@ -186,19 +164,12 @@ function checkPassForm($this) $this.value=str; } -// 廃止予定 -function DisplayErrorDialog(strMesssage) { - $("#errorTxt").html(strMesssage); - $("#error").dialog("open"); -} - -/* ult.jsから移植 */ // チェックボックス全選択関数 // 条件:チェックボックスのクラス名に"selected"というのがついていること // 条件:ボタンにクラス名 send がついていること function allOn(){ - $(".selected").prop("checked", true); - $(".send").prop('disabled',false); + $(".selected").prop("checked", true); + $(".send").prop('disabled',false); } // チェックボックス全解除関数 diff --git a/ecs/jskult-webapp/src/system_var/constants.py b/ecs/jskult-webapp/src/system_var/constants.py index 200e4dcd..54e4c575 100644 --- a/ecs/jskult-webapp/src/system_var/constants.py +++ b/ecs/jskult-webapp/src/system_var/constants.py @@ -119,7 +119,6 @@ LOGOUT_REASON_BATCH_PROCESSING = 'batch_processing' LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE = 'batch_processing_ult' LOGOUT_REASON_BACKUP_PROCESSING = 'dump_processing' LOGOUT_REASON_NOT_LOGIN = 'not_login' -LOGOUT_REASON_SESSION_EXPIRED = 'session_expired' LOGOUT_REASON_DB_ERROR = 'db_error' LOGOUT_REASON_UNEXPECTED = 'unexpected' @@ -130,7 +129,6 @@ LOGOUT_REASON_MESSAGE_MAP = { LOGOUT_REASON_BATCH_PROCESSING_FOR_MAINTE: '日次バッチ処理中のため、
マスタ-メンテは使用出来ません。', LOGOUT_REASON_BACKUP_PROCESSING: 'バックアップ取得を開始しました。
日次バッチ更新が終了するまでマスターメンテは使用できません', LOGOUT_REASON_NOT_LOGIN: 'Loginしてからページにアクセスしてください。', - LOGOUT_REASON_SESSION_EXPIRED: 'セッションが切れています。
再度Loginしてください。', LOGOUT_REASON_DB_ERROR: 'DB接続に失敗しました。
再度Loginするか、
管理者にお問い合わせください。', LOGOUT_REASON_UNEXPECTED: '予期しないエラーが発生しました。
再度Loginするか、
管理者に問い合わせてください。' } diff --git a/ecs/jskult-webapp/src/templates/docInfo.html b/ecs/jskult-webapp/src/templates/docInfo.html index 8f9beb21..71b82b75 100644 --- a/ecs/jskult-webapp/src/templates/docInfo.html +++ b/ecs/jskult-webapp/src/templates/docInfo.html @@ -4,228 +4,228 @@ {% with subtitle = ultmarc.subtitle %} {% include '_header.html' %} {% endwith %} - 医師情報 - - - + } + + - - - - - - -

{{ultmarc.subtitle}}

- {% if ultmarc.is_batch_processing %} -
日次バッチ処理中のため、データが正しく表示されない可能性があります
- {% endif %} -
+ + + + + + +

{{ultmarc.subtitle}}

+ {% if ultmarc.is_batch_processing %} +
日次バッチ処理中のため、データが正しく表示されない可能性があります
+ {% endif %} +
- - - - - - - - - - - - - - - -
- - - {{ultmarc.is_page_num_view()}}/{{ultmarc.post_cnt}} - - -
- -
+ + + + + + + - - - - - - - - - - - - - - - - - - - +
医師コード:氏名(漢字):氏名(カナ):
性別:生年月日:
- 出身大学: - - 出身県: - - 卒年: - - 登録年: - - 開業年: - -
+ + + + + + + + + + + + + + + + + + - - - - -
医師コード:氏名(漢字):氏名(カナ):
性別:生年月日:
+ 出身大学: + + 出身県: + + 卒年: + + 登録年: + + 開業年: + +
- 診療科目: - {% for trt_coursed_data in ultmarc.trt_coursed_data %} - - {% endfor %} - {% for i in range(5-ultmarc.is_input_trt_course_data_size())%} - - {% endfor %} -
- - - - - - - - -
-

所属学会

-
- - - - - - - - - {% for sosiety_data in ultmarc.sosiety_data %} - - - - - {% endfor %} - -
コード所属学会
{{sosiety_data.sosiety_cd or ' '}}{{sosiety_data.sosiety_name or ' '}}
-
-
-

所属学会専門医

-
- - - - - - - - - {% for specialist_license_data in ultmarc.specialist_license_data %} - - - - - {% endfor %} - -
コード専門医資格名
{{specialist_license_data.specialist_cd or ' '}}{{specialist_license_data.specialist_license_name or ' '}}
-
-
- -

勤務先履歴

-
- - - - - - - - - - - - - - - {% for doctor_wrkplace_data in ultmarc.doctor_wrkplace_data %} - {% if doctor_wrkplace_data.dcf_dsf_inst_cd %} - - - - - - - - - - {% endif %} - {% endfor %} - {% for doctor_wrkplace_his_data in ultmarc.doctor_wrkplace_his_data %} - {% if doctor_wrkplace_his_data.dcf_dsf_inst_cd %} - - - - - - - - - - {% endif %} - {% endfor %} - -
ULT施設コード勤務先略名所属部科名役職名職位開始年月日終了年月日
- {{doctor_wrkplace_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_data.inst_name_kanji or ''}}{{doctor_wrkplace_data.blng_sec_name or ''}}{{doctor_wrkplace_data.univ_post_name or ''}}{{doctor_wrkplace_data.post_name or ''}}{{ultmarc.is_input_aply_start_ymd_format(doctor_wrkplace_data.aply_start_ymd)}}9999/99/99
- {{doctor_wrkplace_his_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_his_data.inst_name_kanji or ''}}{{doctor_wrkplace_his_data.blng_sec_name or ''}}{{doctor_wrkplace_his_data.univ_post_name or ''}}{{doctor_wrkplace_his_data.post_name or ''}}{{ultmarc.is_input_his_aply_start_ymd_format(doctor_wrkplace_his_data.aply_start_ymd)}}{{ultmarc.is_input_his_aply_end_ymd_format(doctor_wrkplace_his_data.aply_end_ymd)}}
-
+ + + 診療科目: + {% for trt_coursed_data in ultmarc.trt_coursed_data %} + + {% endfor %} + {% for i in range(5-ultmarc.is_input_trt_course_data_size())%} + + {% endfor %} + + + + + + + + + + + + +
+

所属学会

+
+ + + + + + + + + {% for sosiety_data in ultmarc.sosiety_data %} + + + + + {% endfor %} + +
コード所属学会
{{sosiety_data.sosiety_cd or ' '}}{{sosiety_data.sosiety_name or ' '}}
+
+
+

所属学会専門医

+
+ + + + + + + + + {% for specialist_license_data in ultmarc.specialist_license_data %} + + + + + {% endfor %} + +
コード専門医資格名
{{specialist_license_data.specialist_cd or ' '}}{{specialist_license_data.specialist_license_name or ' '}}
+
+
+ +

勤務先履歴

+
+ + + + + + + + + + + + + + + {% for doctor_wrkplace_data in ultmarc.doctor_wrkplace_data %} + {% if doctor_wrkplace_data.dcf_dsf_inst_cd %} + + + + + + + + + + {% endif %} + {% endfor %} + {% for doctor_wrkplace_his_data in ultmarc.doctor_wrkplace_his_data %} + {% if doctor_wrkplace_his_data.dcf_dsf_inst_cd %} + + + + + + + + + + {% endif %} + {% endfor %} + +
ULT施設コード勤務先略名所属部科名役職名職位開始年月日終了年月日
+ {{doctor_wrkplace_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_data.inst_name_kanji or ''}}{{doctor_wrkplace_data.blng_sec_name or ''}}{{doctor_wrkplace_data.univ_post_name or ''}}{{doctor_wrkplace_data.post_name or ''}}{{ultmarc.is_input_aply_start_ymd_format(doctor_wrkplace_data.aply_start_ymd)}}9999/99/99
+ {{doctor_wrkplace_his_data.dcf_dsf_inst_cd or ''}}{{doctor_wrkplace_his_data.inst_name_kanji or ''}}{{doctor_wrkplace_his_data.blng_sec_name or ''}}{{doctor_wrkplace_his_data.univ_post_name or ''}}{{doctor_wrkplace_his_data.post_name or ''}}{{ultmarc.is_input_his_aply_start_ymd_format(doctor_wrkplace_his_data.aply_start_ymd)}}{{ultmarc.is_input_his_aply_end_ymd_format(doctor_wrkplace_his_data.aply_end_ymd)}}
+
\ No newline at end of file diff --git a/ecs/jskult-webapp/src/templates/docSearch.html b/ecs/jskult-webapp/src/templates/docSearch.html index 365e2745..7f6cc551 100644 --- a/ecs/jskult-webapp/src/templates/docSearch.html +++ b/ecs/jskult-webapp/src/templates/docSearch.html @@ -6,185 +6,191 @@ {% endwith %} - + // Enter押下時にsubmitさせなくする + $(function() { + $(document).on("keypress", "input:not(.allow_submit)", function(event) { + return event.which !== 13; + }); + }); + } + - - - - - - -

{{ultmarc.subtitle}}

- {% if ultmarc.is_batch_processing %} -
日次バッチ処理中のため、データが正しく表示されない可能性があります
- {% endif %} -
-