Merge pull request #115 feature-NEWDWH2021-771 into develop-6crm

This commit is contained in:
朝倉 明日香 2022-10-14 11:09:54 +09:00
commit 421310a839
19 changed files with 657 additions and 273 deletions

View File

@ -98,12 +98,12 @@
│ ├── system_var/ -- 環境変数と定数ファイル置き場
│ ├── upload_last_fetch_datetime_process.py -- [前回取得日時ファイル更新処理]モジュール
│ ├── upload_result_data_process.py -- [取得処理実施結果アップロード処理]モジュール
── util/ -- ユーティリティモジュール置き場
├── counter_object.py -- リトライ判定のためのカウントアップクラス
├── dict_checker.py -- 辞書型値オブジェクトの設定値チェック用クラス
├── execute_datetime.py -- 取得処理開始年月日時分秒の管理クラス
└── logger.py -- ログ管理クラス
── util/ -- ユーティリティモジュール置き場
├── counter_object.py -- リトライ判定のためのカウントアップクラス
├── dict_checker.py -- 辞書型値オブジェクトの設定値チェック用クラス
├── execute_datetime.py -- 取得処理開始年月日時分秒の管理クラス
└── logger.py -- ログ管理クラス
└── writer/ -- ファイル書き込みモジュール置き場
└── tests/ -- テストコード置き場
├── test_utils/ -- テストコードで共通的に使用できる関数群
├── aws/ -- AWS操作モジュールのテスト

View File

@ -1,5 +1,3 @@
import json
import boto3
from src.system_var.constants import (AWS_RESOURCE_S3, S3_CHAR_CODE,
S3_RESPONSE_BODY)
@ -24,9 +22,8 @@ class S3Resource:
body = response[S3_RESPONSE_BODY].read()
return body.decode(S3_CHAR_CODE)
def put_object(self, object_key: str, data: str) -> None:
s3_object = self.__s3_bucket.Object(object_key)
s3_object.put(Body=data.encode(S3_CHAR_CODE), ContentEncoding=S3_CHAR_CODE)
def put_object(self, object_key: str, local_file_path: str) -> None:
self.__s3_bucket.upload_file(Key=object_key, Filename=local_file_path)
return
def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None:
@ -47,12 +44,12 @@ class ConfigBucket:
def get_object_info_file(self) -> str:
return self.__s3_resource.get_object(f'{OBJECT_INFO_FOLDER}/{OBJECT_INFO_FILENAME}')
def get_last_fetch_datetime_file(self, file_path: str) -> str:
return self.__s3_resource.get_object(f'{LAST_FETCH_DATE_FOLDER}/{file_path}')
def get_last_fetch_datetime_file(self, file_key: str) -> str:
return self.__s3_resource.get_object(f'{LAST_FETCH_DATE_FOLDER}/{file_key}')
def put_last_fetch_datetime_file(self, file_path: str, data: str) -> None:
def put_last_fetch_datetime_file(self, file_key: str, local_file_path: str) -> None:
self.__s3_resource.put_object(
f'{LAST_FETCH_DATE_FOLDER}/{file_path}', data)
f'{LAST_FETCH_DATE_FOLDER}/{file_key}', local_file_path)
return
@ -65,9 +62,9 @@ class DataBucket:
def __str__(self) -> str:
return IMPORT_DATA_BUCKET
def put_csv(self, file_path: str, data: str) -> None:
object_key = f'{CRM_IMPORT_DATA_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, data)
def put_csv(self, file_key: str, local_file_path: str) -> None:
object_key = f'{CRM_IMPORT_DATA_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return
def put_csv_from(self, src_bucket: str, src_key: str):
@ -85,17 +82,17 @@ class BackupBucket:
def __str__(self) -> str:
return CRM_BACKUP_BUCKET
def put_response_json(self, file_path: str, data: dict) -> None:
object_key = f'{RESPONSE_JSON_BACKUP_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, json.dumps(data, ensure_ascii=False))
def put_response_json(self, file_key: str, local_file_path: str) -> None:
object_key = f'{RESPONSE_JSON_BACKUP_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return
def put_csv(self, file_path: str, data: str) -> None:
object_key = f'{CRM_IMPORT_DATA_BACKUP_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, data)
def put_csv(self, file_key: str, local_file_path: str) -> None:
object_key = f'{CRM_IMPORT_DATA_BACKUP_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return
def put_result_json(self, file_path: str, data: dict) -> None:
object_key = f'{PROCESS_RESULT_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, json.dumps(data, ensure_ascii=False))
def put_result_json(self, file_key: str, local_file_path: str) -> None:
object_key = f'{PROCESS_RESULT_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return

View File

@ -1,18 +1,22 @@
import os
import tempfile
from src.aws.s3 import BackupBucket
from src.config.objects import TargetObject
from src.error.exceptions import FileUploadException
from src.system_var.constants import CSVBK_JP_NAME
from src.system_var.constants import CSVBK_JP_NAME, TEMPORARY_FILENAME
from src.util.execute_datetime import ExecuteDateTime
from src.util.logger import logger_instance as logger
from src.writer.file_writer import CsvWriter
def backup_crm_csv_data_process(target_object: TargetObject, execute_datetime: ExecuteDateTime, csv_string: str):
def backup_crm_csv_data_process(target_object: TargetObject, execute_datetime: ExecuteDateTime, csv_data: list):
"""CSVバックアップ処理
Args:
target_object (TargetObject): 取得対象オブジェクト情報インスタンス
execute_datetime (ExecuteDateTime): 実行日時取得インスタンス
csv_string (str): csvデータ
csv_data (list): csvデータ
Raises:
FileUploadException: S3のファイルアップロード失敗
@ -21,27 +25,38 @@ def backup_crm_csv_data_process(target_object: TargetObject, execute_datetime: E
# ① CSVバックアップ処理の開始ログを出力する
target_object_name = target_object.object_name
upload_file_name = target_object.upload_file_name
upload_file_key = f'{execute_datetime.to_path()}/{upload_file_name}.csv'
logger.info(
f'I-CSVBK-01 [{target_object_name}] のCSVデータのバックアップ処理を開始します ファイル名:[{upload_file_name}.csv]')
try:
# ② CRMバックアップ保管用バケットに、変換後のCSVデータのバックアップを保管する
backup_bucket = BackupBucket()
backup_bucket.put_csv(
f'{execute_datetime.to_path()}/{upload_file_name}.csv', csv_string)
logger.debug(
f'D-CSVBK-02 [{target_object_name}] のCSVデータバックアップ 正常終了')
_backup_csv_file(target_object_name, upload_file_key, csv_data)
except Exception as e:
raise FileUploadException(
'E-CSVBK-01',
CSVBK_JP_NAME, f'[{target_object_name}] CSVデータのバックアップに失敗しました ファイル名:[{upload_file_name}.csv] エラー内容:[{e}]')
# ③ CSVバックアップ処理の終了ログを出力する
logger.info(
f'I-CSVBK-03 [{target_object_name}] のCSVデータのバックアップ処理を終了します')
# ④ 次の処理へ移行する
return
def _backup_csv_file(target_object_name: str, upload_file_key: str, csv_data: list) -> None:
# 一時ファイル書き込み用の領域を確保
with tempfile.TemporaryDirectory(prefix=f'{target_object_name}_') as tmpdir:
# アップロード用のファイルをローカルに書き出す
local_file_path = os.path.join(tmpdir, TEMPORARY_FILENAME)
writer = CsvWriter(local_file_path, csv_data)
writer.write()
# ファイルからS3に書き込み
backup_bucket = BackupBucket()
backup_bucket.put_csv(upload_file_key, local_file_path)
logger.debug(
f'D-CSVBK-02 [{upload_file_key}] のCSVデータバックアップ 正常終了')
return

View File

@ -1,9 +1,13 @@
import os
import tempfile
from src.aws.s3 import BackupBucket
from src.config.objects import TargetObject
from src.error.exceptions import FileUploadException
from src.system_var.constants import RESBK_JP_NAME
from src.system_var.constants import RESBK_JP_NAME, TEMPORARY_FILENAME
from src.util.execute_datetime import ExecuteDateTime
from src.util.logger import logger_instance as logger
from src.writer.file_writer import JsonWriter
def backup_crm_data_process(target_object: TargetObject, sf_object_dict: dict, execute_datetime: ExecuteDateTime):
@ -23,14 +27,9 @@ def backup_crm_data_process(target_object: TargetObject, sf_object_dict: dict, e
logger.info(f'I-RESBK-01 [{object_name}] のCRM電文データバックアップ処理を開始します')
try:
# ② CRMバックアップ保管用バケットに、CRMから取得したJSONの電文データのバックアップを保管する
file_name = f'{execute_datetime.to_path()}/{target_object.upload_file_name}.json'
backup_bucket = BackupBucket()
backup_bucket.put_response_json(file_name, sf_object_dict)
logger.debug(f'D-RESBK-02 [{object_name}] のJSONデータバックアップ 正常終了')
# ② CRMバックアップ保管用バケットに、CRMから取得したJSONの電文データのバックアップを保管する
_backup_json_file(object_name, file_name, sf_object_dict)
except Exception as e:
raise FileUploadException(
'E-RESBK-01',
@ -43,3 +42,20 @@ def backup_crm_data_process(target_object: TargetObject, sf_object_dict: dict, e
# ④ 次の処理へ移行する
return
def _backup_json_file(object_name: str, file_name: str, sf_object_dict: dict) -> None:
# 一時ファイル書き込み用の領域を確保
with tempfile.TemporaryDirectory(prefix=f'{object_name}_') as tmpdir:
# アップロード用のファイルをローカルに書き出す
local_file_path = os.path.join(tmpdir, TEMPORARY_FILENAME)
writer = JsonWriter(local_file_path, sf_object_dict)
writer.write()
# ファイルからS3に書き込み
backup_bucket = BackupBucket()
backup_bucket.put_response_json(file_name, local_file_path)
logger.debug(f'D-RESBK-02 [{object_name}] のJSONデータバックアップ 正常終了')
return

View File

@ -1,6 +1,3 @@
import csv
import io
from src.config.objects import TargetObject
from src.converter.convert_strategy import ConvertStrategyFactory
@ -8,42 +5,24 @@ from src.converter.convert_strategy import ConvertStrategyFactory
class CSVStringConverter:
def __init__(self, target_object: TargetObject, sf_object_jsons: dict) -> None:
self.__target_object = target_object
self.__sf_object_jsons = sf_object_jsons
# メモリ節約のため、ジェネレータ化
self.__sf_object_jsons = (i for i in sf_object_jsons)
self.__convert_strategy_factory = ConvertStrategyFactory()
def convert(self) -> str:
extracted_sf_object_jsons = self.__extract_sf_object_jsons()
csv_data = self.__convert_to_csv(extracted_sf_object_jsons)
csv_string = self.__write_csv_string(csv_data)
return csv_string
def __extract_sf_object_jsons(self) -> list:
try:
extracted_sf_object_jsons = []
for sf_object_json in self.__sf_object_jsons:
extracted_sf_object_jsons.append(
self.__extract_necessary_props_from(sf_object_json))
return extracted_sf_object_jsons
except Exception as e:
raise Exception('必要なjsonのデータ成形に失敗しました', e)
csv_data = self.__convert_to_csv(self.__sf_object_jsons)
return csv_data
def __extract_necessary_props_from(self, sf_object_json) -> dict:
clone_sf_object = {**sf_object_json}
del sf_object_json['attributes']
return {k.upper(): v for k, v in sf_object_json.items()}
del clone_sf_object['attributes']
uppercase_key_sf_object = {
k.upper(): v for k, v in clone_sf_object.items()}
return uppercase_key_sf_object
def __convert_to_csv(self, extracted_sf_object_jsons) -> list:
def __convert_to_csv(self, sf_object_jsons) -> list:
try:
columns = self.__target_object.columns
csv_data = []
for i, json_object in enumerate(extracted_sf_object_jsons, 1):
csv_data = [columns]
for i, json_object in enumerate(sf_object_jsons, 1):
json_object = self.__extract_necessary_props_from(json_object)
csv_row = []
for column in columns:
v = json_object[column.upper()]
@ -59,17 +38,3 @@ class CSVStringConverter:
except Exception as e:
raise Exception(
f'CSV変換に失敗しました カラム名:[{column}] 行番号: [{i}] エラー内容:[{e}]')
def __write_csv_string(self, csv_data) -> str:
try:
with io.StringIO(newline='') as string_stream:
writer = csv.writer(string_stream, delimiter=',', lineterminator='\r\n',
doublequote=True, quotechar='"', quoting=csv.QUOTE_ALL, strict=True)
writer.writerow(self.__target_object.columns)
writer.writerows(csv_data)
csv_value = string_stream.getvalue()
return csv_value
except Exception as e:
raise Exception('CSVデータの出力に失敗しました', e)

View File

@ -72,11 +72,21 @@ UPLD_JP_NAME = 'CSVアップロード処理'
UPD_JP_NAME = '前回取得日時ファイル更新'
END_JP_NAME = '取得処理実施結果アップロード処理'
# CSVチェック
# CSV
CSV_TRUE_VALUE = 1
CSV_FALSE_VALUE = 0
CSV_LINE_TERMINATOR = '\r\n'
CSV_DELIMITER = ','
CSV_QUOTE_CHAR = '"'
# オブジェクト変数
# システム変数
FILE_CHAR_CODE = 'utf-8'
FILE_MODE_WRITE = 'w'
TEMPORARY_FILENAME = 'temporary_file'
# CRM_取得オブジェクト情報ファイル関連
OBJECTS_KEY = 'objects'
OBJECTS_TYPE = list
OBJECT_NAME_KEY = 'object_name'
@ -94,6 +104,8 @@ UPLOAD_FILE_NAME_TYPE = str
DATETIME_COLUMN_KEY = 'datetime_column'
DATETIME_COLUMN_TYPE = str
DATETIME_COLUMN_DEFAULT_VALUE = 'SystemModstamp'
# 前回取得日時ファイル関連
LAST_FETCH_DATETIME_TO_KEY = 'last_fetch_datetime_to'
LAST_FETCH_DATETIME_TO_TYPE = str
LAST_FETCH_DATETIME_FROM_KEY = 'last_fetch_datetime_from'

View File

@ -1,10 +1,12 @@
import json
import os
import tempfile
from src.aws.s3 import ConfigBucket
from src.config.objects import LastFetchDatetime, TargetObject
from src.error.exceptions import FileUploadException
from src.system_var.constants import UPD_JP_NAME
from src.system_var.constants import TEMPORARY_FILENAME, UPD_JP_NAME
from src.util.logger import logger_instance as logger
from src.writer.file_writer import JsonWriter
def upload_last_fetch_datetime_process(target_object: TargetObject, last_fetch_datetime: LastFetchDatetime):
@ -18,15 +20,16 @@ def upload_last_fetch_datetime_process(target_object: TargetObject, last_fetch_d
FileUploadException: S3のファイルアップロード失敗
"""
object_name = target_object.object_name
# ① 前回取得日時ファイル更新処理の開始ログを出力する
logger.info(
f'I-UPD-01 [{target_object.object_name}] の前回取得日時ファイルの更新処理を開始します')
f'I-UPD-01 [{object_name}] の前回取得日時ファイルの更新処理を開始します')
try:
if target_object.is_update_last_fetch_datetime is False:
# ② オブジェクト情報.is_update_last_fetch_datetimeがfalseの場合、以降の処理をスキップする
logger.info(
f'I-UPD-02 [{target_object.object_name}] の前回取得日時ファイルの更新処理をスキップします')
f'I-UPD-02 [{object_name}] の前回取得日時ファイルの更新処理をスキップします')
return
# ③ 前回取得日時ファイル.last_fetch_datetime_fromに取得処理開始年月日時分秒を設定する
@ -35,23 +38,32 @@ def upload_last_fetch_datetime_process(target_object: TargetObject, last_fetch_d
'last_fetch_datetime_from': last_fetch_datetime.last_fetch_datetime_to,
'last_fetch_datetime_to': ''
}
config_bucket = ConfigBucket()
config_bucket.put_last_fetch_datetime_file(
target_object.last_fetch_datetime_file_name, json.dumps(last_fetch_datetime_dict))
logger.info(
f'D-UPD-03 [{target_object.object_name}] の前回取得日時ファイル更新処理 正常終了')
_upload_last_fetch_datetime(target_object, last_fetch_datetime_dict)
except Exception as e:
raise FileUploadException(
'E-UPD-01',
UPD_JP_NAME,
f'[{target_object.object_name}] 前回処理日時ファイルのアップロードに失敗しました ファイル名:[{target_object.last_fetch_datetime_file_name}] エラー内容:[{e}]')
f'[{object_name}] 前回処理日時ファイルのアップロードに失敗しました ファイル名:[{target_object.last_fetch_datetime_file_name}] エラー内容:[{e}]')
# ④ 前回取得日時ファイル更新処理の終了ログを出力する
logger.info(
f'I-UPD-04 [{target_object.object_name}] の前回取得日時ファイルの更新処理を終了します')
f'I-UPD-04 [{object_name}] の前回取得日時ファイルの更新処理を終了します')
# ⑤ 次の処理へ移行する
return
def _upload_last_fetch_datetime(target_object: TargetObject, last_fetch_datetime_dict: dict) -> None:
# 一時ファイル書き込み用の領域を確保
with tempfile.TemporaryDirectory(prefix=f'{target_object.object_name}_') as tmpdir:
# アップロード用のファイルをローカルに書き出す
local_file_path = os.path.join(tmpdir, TEMPORARY_FILENAME)
writer = JsonWriter(local_file_path, last_fetch_datetime_dict)
writer.write()
# ファイルからS3に書き込み
config_bucket = ConfigBucket()
config_bucket.put_last_fetch_datetime_file(
target_object.last_fetch_datetime_file_name, local_file_path)
logger.info(
f'D-UPD-03 [{target_object.object_name}] の前回取得日時ファイル更新処理 正常終了')

View File

@ -1,9 +1,13 @@
import os
import tempfile
from src.aws.s3 import BackupBucket
from src.error.exceptions import FileUploadException
from src.system_var.constants import END_JP_NAME
from src.system_var.constants import END_JP_NAME, TEMPORARY_FILENAME
from src.system_var.environments import PROCESS_RESULT_FILENAME
from src.util.execute_datetime import ExecuteDateTime
from src.util.logger import logger_instance as logger
from src.writer.file_writer import JsonWriter
def upload_result_data_process(process_result: dict, execute_datetime: ExecuteDateTime):
@ -23,12 +27,7 @@ def upload_result_data_process(process_result: dict, execute_datetime: ExecuteDa
try:
# ② CRMバックアップ保管用バケットに、取得処理実施結果のJSONデータを保管する
backup_bucket = BackupBucket()
backup_bucket.put_result_json(
f'{execute_datetime.to_path()}/{PROCESS_RESULT_FILENAME}', process_result)
logger.debug(f'D-END-02 取得処理実施結果アップロード 正常終了')
_upload_result(execute_datetime, process_result)
except Exception as e:
raise FileUploadException(
'E-END-01', END_JP_NAME, f'取得処理実施結果のアップロードに失敗しました ファイル名:[{PROCESS_RESULT_FILENAME}] エラー内容:[{e}]')
@ -37,3 +36,18 @@ def upload_result_data_process(process_result: dict, execute_datetime: ExecuteDa
logger.info(f'I-END-03 取得処理実施結果アップロード処理を終了します')
return
def _upload_result(execute_datetime: ExecuteDateTime, process_result: dict) -> None:
# 一時ファイル書き込み用の領域を確保
with tempfile.TemporaryDirectory(prefix=f'{PROCESS_RESULT_FILENAME}_') as tmpdir:
# アップロード用のファイルをローカルに書き出す
local_file_path = os.path.join(tmpdir, TEMPORARY_FILENAME)
writer = JsonWriter(local_file_path, process_result)
writer.write()
# ファイルからS3に書き込み
backup_bucket = BackupBucket()
backup_bucket.put_result_json(
f'{execute_datetime.to_path()}/{PROCESS_RESULT_FILENAME}', local_file_path)
logger.debug(f'D-END-02 取得処理実施結果アップロード 正常終了')

View File

View File

@ -0,0 +1,43 @@
import csv
import json
from abc import ABCMeta, abstractmethod
from src.system_var.constants import (CSV_DELIMITER, CSV_LINE_TERMINATOR,
CSV_QUOTE_CHAR, FILE_CHAR_CODE,
FILE_MODE_WRITE)
class FileWriter(metaclass=ABCMeta):
def __init__(self, file_path: str, content) -> None:
self._file_path = file_path
self._content = content
@abstractmethod
def write(self) -> str:
"""ファイルを書き出し、ファイルパスを返す
Returns:
str: 書き出し先のファイルパス
"""
pass
class JsonWriter(FileWriter):
def write(self) -> str:
with open(self._file_path, mode=FILE_MODE_WRITE, encoding=FILE_CHAR_CODE, newline='') as f:
json.dump(self._content, f, ensure_ascii=False, )
return self._file_path
class CsvWriter(FileWriter):
def write(self) -> str:
with open(self._file_path, mode=FILE_MODE_WRITE, encoding=FILE_CHAR_CODE, newline='') as f:
writer = csv.writer(f, delimiter=CSV_DELIMITER, lineterminator=CSV_LINE_TERMINATOR,
quotechar=CSV_QUOTE_CHAR, doublequote=True, quoting=csv.QUOTE_ALL,
strict=True)
writer.writerows(self._content)
return self._file_path

View File

@ -1,3 +1,5 @@
import os
import pytest
from src.aws.s3 import BackupBucket, ConfigBucket, DataBucket, S3Resource
@ -51,7 +53,7 @@ class TestS3Resource:
# Assert
sut.get_object('hogehoge/test.txt')
def test_put_object(self, s3_test, s3_client, bucket_name):
def test_put_object(self, s3_test, s3_client, bucket_name, tmpdir):
"""
Cases:
- S3にオブジェクトをPUTできるか
@ -61,9 +63,14 @@ class TestS3Resource:
- PUTされたファイルが存在する
- PUTされたファイルの内容が期待値と一致する
"""
file_path = os.path.join(tmpdir, 'test.txt')
with open(file_path, mode='w') as f:
f.write('aaaaaaaaaaaaaaa')
sut = S3Resource(bucket_name)
sut.put_object('hogehoge/test.txt', 'aaaaaaaaaaaaaaa')
sut.put_object('hogehoge/test.txt', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key='hogehoge/test.txt')
assert actual['Body'].read().decode('utf-8') == 'aaaaaaaaaaaaaaa'
@ -213,7 +220,7 @@ class TestConfigBucket:
with pytest.raises(Exception):
sut.get_last_fetch_datetime_file('Object.json')
def test_put_last_fetch_datetime_file(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_last_fetch_datetime_file(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- オブジェクト最終更新日時ファイルをPUTできること
@ -226,8 +233,12 @@ class TestConfigBucket:
monkeypatch.setattr('src.aws.s3.CRM_CONFIG_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.LAST_FETCH_DATE_FOLDER', 'crm')
file_path = os.path.join(tmpdir, 'Object.json')
with open(file_path, mode='w') as f:
f.write('aaaaaaaaaaaaaaa')
sut = ConfigBucket()
sut.put_last_fetch_datetime_file('Object.json', 'aaaaaaaaaaaaaaa')
sut.put_last_fetch_datetime_file('Object.json', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'crm/Object.json')
assert actual['Body'].read().decode('utf-8') == 'aaaaaaaaaaaaaaa'
@ -267,7 +278,7 @@ class TestConfigBucket:
class TestDataBucket:
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- CSVファイルをPUTできること
@ -280,8 +291,12 @@ class TestDataBucket:
monkeypatch.setattr('src.aws.s3.IMPORT_DATA_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.CRM_IMPORT_DATA_FOLDER', 'crm/target')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('test,test,test')
sut = DataBucket()
sut.put_csv('test.csv', 'test,test,test')
sut.put_csv('test.csv', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'crm/target/test.csv')
assert actual['Body'].read().decode('utf-8') == 'test,test,test'
@ -363,7 +378,7 @@ class TestDataBucket:
class TestBackupBucket:
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- CSVファイルをPUTできること
@ -376,8 +391,12 @@ class TestBackupBucket:
monkeypatch.setattr('src.aws.s3.CRM_BACKUP_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.CRM_IMPORT_DATA_BACKUP_FOLDER', 'data_import')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('test,test,test')
sut = BackupBucket()
sut.put_csv('test.csv', 'test,test,test')
sut.put_csv('test.csv', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'data_import/test.csv')
assert actual['Body'].read().decode('utf-8') == 'test,test,test'
@ -399,7 +418,7 @@ class TestBackupBucket:
with pytest.raises(Exception):
sut.put_csv('test.csv', 'test,test,test')
def test_put_response_json(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_response_json(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- JSONファイルをPUTできること
@ -412,8 +431,12 @@ class TestBackupBucket:
monkeypatch.setattr('src.aws.s3.CRM_BACKUP_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.RESPONSE_JSON_BACKUP_FOLDER', 'response_json')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('{"test": "test"}')
sut = BackupBucket()
sut.put_response_json('test.json', {"test": "test"})
sut.put_response_json('test.json', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'response_json/test.json')
assert actual['Body'].read().decode('utf-8') == '{"test": "test"}'
@ -435,7 +458,7 @@ class TestBackupBucket:
with pytest.raises(Exception):
sut.put_response_json('test.json', {"test": "test"})
def test_put_result_json(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_result_json(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- 結果のJSONファイルをPUTできること
@ -448,8 +471,12 @@ class TestBackupBucket:
monkeypatch.setattr('src.aws.s3.CRM_BACKUP_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.PROCESS_RESULT_FOLDER', 'data_import')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('{"test": "test"}')
sut = BackupBucket()
sut.put_result_json('result.json', {"test": "test"})
sut.put_result_json('result.json', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'data_import/result.json')
assert actual['Body'].read().decode('utf-8') == '{"test": "test"}'

View File

@ -1,4 +1,3 @@
import textwrap
from collections import OrderedDict
import pytest
@ -98,16 +97,17 @@ class TestCSVStringConverter:
actual = csv_string_converter.convert()
# Expects
expected_value = '''\
"Id","AccountId","UserOrGroupId","AccountAccessLevel","OpportunityAccessLevel","CaseAccessLevel","ContactAccessLevel","RowCause","LastModifiedDate","LastModifiedById","IsDeleted"\r\n\
"TEST001","test001","","1","2","3","4","テストのため1","2022-06-01 09:00:00","1234567.0","0"\r\n\
"TEST002","test002","","5","6","7","8","テストのため2","2022-06-03 01:30:30","2.23","1"\r\n\
"TEST003","test003","","9","10","11","12","テストのため3","2022-06-04 08:50:50","3.234567","0"\r\n\
'''
expect = [
["Id", "AccountId", "UserOrGroupId", "AccountAccessLevel", "OpportunityAccessLevel", "CaseAccessLevel",
"ContactAccessLevel", "RowCause", "LastModifiedDate", "LastModifiedById", "IsDeleted"],
["TEST001", "test001", "", 1, 2, 3, 4, "テストのため1", "2022-06-01 09:00:00", 1234567.0, 0],
["TEST002", "test002", "", 5, 6, 7, 8, "テストのため2", "2022-06-03 01:30:30", 2.23, 1],
["TEST003", "test003", "", 9, 10, 11, 12, "テストのため3", "2022-06-04 08:50:50", 3.234567, 0]
]
# expected_valueのインデントが半角スペースと認識されてしまうため、`textwrap.dedent`にて補正
assert actual == textwrap.dedent(expected_value)
assert actual == expect
@pytest.mark.skip('抽出処理を変換処理と統合したため、テスト不要')
def test_raise_convert_extract_jsons(self) -> str:
"""
Cases:
@ -291,103 +291,3 @@ class TestCSVStringConverter:
# Expects
assert 'CSV変換に失敗しました カラム名:[Id] 行番号: [1]' in str(e.value)
def test_raise_convert_write_csv_string(self, monkeypatch) -> str:
"""
Cases:
csvデータ出力のCSVデータ取得で例外が発生すること
Arranges:
- オブジェクト情報の作成
- データの作成
- 実行日時取得インスタンスの生成
- オブジェクト情報インスタンスの生成
- csvデータ出力のエラーを発生させるためのモックを準備
Expects:
例外が期待値と一致すること
"""
# Arranges
object_info = {
"object_name": "AccountShare",
"columns": [
"Id",
"AccountId",
"UserOrGroupId",
"AccountAccessLevel",
"OpportunityAccessLevel",
"CaseAccessLevel",
"ContactAccessLevel",
"RowCause",
"LastModifiedDate",
"LastModifiedById",
"IsDeleted"
],
"is_skip": False,
"is_update_last_fetch_datetime": False,
"last_fetch_datetime_file_name": "AccountShare.json",
"upload_file_name": "CRM_AccountShare_{execute_datetime}",
"datetime_column": "LastModifiedDate"
}
data = [
OrderedDict([
('attributes', OrderedDict([('type', 'AccountShare'), ('url', '/services/data/v1.0/sobjects/AccountShare/test1')])),
('Id', 'TEST001'),
('AccountId', 'test001'),
('UserOrGroupId', None),
('AccountAccessLevel', 1),
('OpportunityAccessLevel', 2),
('CaseAccessLevel', 3),
('ContactAccessLevel', 4),
('RowCause', 'テストのため1'),
('LastModifiedDate', '2022-06-01T00:00:00.000+0000'),
('LastModifiedById', 1.234567E+6),
('IsDeleted', False)
]),
OrderedDict([
('attributes', OrderedDict([('type', 'AccountShare'), ('url', '/services/data/v1.0/sobjects/AccountShare/test1')])),
('Id', 'TEST002'),
('AccountId', 'test002'),
('UserOrGroupId', None),
('AccountAccessLevel', 5),
('OpportunityAccessLevel', 6),
('CaseAccessLevel', 7),
('ContactAccessLevel', 8),
('RowCause', 'テストのため2'),
('LastModifiedDate', '2022-06-02T16:30:30.000+0000'),
('LastModifiedById', 2.234567E+6),
('IsDeleted', True)
]),
OrderedDict([
('attributes', OrderedDict([('type', 'AccountShare'), ('url', '/services/data/v1.0/sobjects/AccountShare/test1')])),
('Id', 'TEST003'),
('AccountId', 'test003'),
('UserOrGroupId', None),
('AccountAccessLevel', 9),
('OpportunityAccessLevel', 10),
('CaseAccessLevel', 11),
('ContactAccessLevel', 12),
('RowCause', 'テストのため3'),
('LastModifiedDate', '2022-06-03T23:50:50.000+0000'),
('LastModifiedById', 3.234567E+6),
('IsDeleted', False)
])
]
execute_datetime = ExecuteDateTime()
target_object = TargetObject(object_info, execute_datetime)
def dummy_method(arg):
raise Exception(e)
# データ加工のみだと事前の処理によりエラーとなるため、csv出力モジュールをモック化する
monkeypatch.setattr("csv.writer", dummy_method)
# Act
csv_string_converter = CSVStringConverter(target_object, data)
with pytest.raises(Exception) as e:
csv_string_converter.convert()
# Expects
assert 'CSVデータの出力に失敗しました' in str(e.value)

View File

@ -34,12 +34,12 @@ class TestBackupCrmCsvDataProcess:
- CSVバックアップ処理の仕様に沿った正常系ログが出力されること(デバッグログは除く)
"""
# Arrange
csv_string = textwrap.dedent("""\
"Id","AccountNumber","LastModifiedDate","LastModifiedById","SystemModstamp","IsDeleted"
"TEST001","test001","2022-06-01 09:00:00","1234567","2022-06-01 09:00:00","1"
"TEST002","test002","2022-06-01 09:00:00","1234567","2022-06-01 09:00:00","0"
"TEST003","test003","2022-06-01 09:00:00","1234567","2022-06-01 09:00:00","0"
""")
csv_data = [
["Id", "AccountNumber", "LastModifiedDate", "LastModifiedById", "SystemModstamp", "IsDeleted"],
["TEST001", "test001", "2022-06-01 09:00:00", "1234567", "2022-06-01 09:00:00", 1],
["TEST002", "test002", "2022-06-01 09:00:00", "1234567", "2022-06-01 09:00:00", 0],
["TEST003", "test003", "2022-06-01 09:00:00", "1234567", "2022-06-01 09:00:00", 0]
]
target_object_dict = {
'object_name': 'Account',
'columns': [
@ -60,16 +60,23 @@ class TestBackupCrmCsvDataProcess:
monkeypatch.setattr('src.aws.s3.CRM_IMPORT_DATA_BACKUP_FOLDER', 'data_import')
# Act
backup_crm_csv_data_process(target_object, execute_datetime, csv_string)
backup_crm_csv_data_process(target_object, execute_datetime, csv_data)
# Assert
expect_csv_value = """\
"Id","AccountNumber","LastModifiedDate","LastModifiedById","SystemModstamp","IsDeleted"\r\n\
"TEST001","test001","2022-06-01 09:00:00","1234567","2022-06-01 09:00:00","1"\r\n\
"TEST002","test002","2022-06-01 09:00:00","1234567","2022-06-01 09:00:00","0"\r\n\
"TEST003","test003","2022-06-01 09:00:00","1234567","2022-06-01 09:00:00","0"\r\n\
"""
expect_file_key = f'data_import/{execute_datetime.to_path()}/CRM_Account_{execute_datetime.format_date()}.csv'
# ファイル確認
actual = s3_client.get_object(
Bucket=bucket_name,
Key=f'data_import/{execute_datetime.to_path()}/CRM_Account_{execute_datetime.format_date()}.csv')
Key=expect_file_key)
assert actual['Body'].read().decode('utf-8') == csv_string
assert actual['Body'].read().decode('utf-8') == textwrap.dedent(expect_csv_value)
# ログの確認
assert generate_log_message_tuple(
@ -104,15 +111,20 @@ class TestBackupCrmCsvDataProcess:
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_object_dict, execute_datetime)
with patch('src.backup_crm_csv_data_process.BackupBucket') as mock_backup_bucket:
with patch('src.backup_crm_csv_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.backup_crm_csv_data_process.CsvWriter') as mock_writer:
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_csv.return_value = ''
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
# Act
backup_crm_csv_data_process(target_object, execute_datetime, '')
# Assert
assert mock_backup_bucket_inst.put_csv.called is True
assert mock_writer_inst.write.called is True
def test_raise_put_csv(self, bucket_name, monkeypatch, caplog):
"""
@ -141,17 +153,69 @@ class TestBackupCrmCsvDataProcess:
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_object_dict, execute_datetime)
with patch('src.backup_crm_csv_data_process.BackupBucket') as mock_backup_bucket:
with patch('src.backup_crm_csv_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.backup_crm_csv_data_process.CsvWriter') as mock_writer:
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_csv.side_effect = Exception('登録エラー')
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
# Act
with pytest.raises(FileUploadException) as e:
backup_crm_csv_data_process(target_object, execute_datetime, '')
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket_inst.put_csv.called is True
assert e.value.error_id == 'E-CSVBK-01'
assert e.value.func_name == CSVBK_JP_NAME
assert e.value.args[0] == \
f'[Account] CSVデータのバックアップに失敗しました ファイル名:[CRM_Account_{execute_datetime.format_date()}.csv] エラー内容:[登録エラー]'
def test_raise_put_csv_write_local_file(self, bucket_name, monkeypatch, caplog):
"""
Cases:
CSVデータをバックアップするための一時ファイルを書き込めない場合エラーが発生すること
Arranges:
- オブジェクト情報ファイル取得処理で例外が発生するようにする
Expects:
- 例外が発生する
- ファイルが書き込めないエラーが返却される
"""
# Arrange
target_object_dict = {
'object_name': 'Account',
'columns': [
'Id',
'AccountNumber',
'LastModifiedDate',
'LastModifiedById',
'SystemModstamp',
'IsDeleted'
]
}
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_object_dict, execute_datetime)
with patch('src.backup_crm_csv_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.backup_crm_csv_data_process.CsvWriter') as mock_writer:
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_csv.return_value = ''
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.side_effect = Exception('書き込みエラー')
# Act
with pytest.raises(FileUploadException) as e:
backup_crm_csv_data_process(target_object, execute_datetime, '')
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket_inst.put_response_json.called is False
assert e.value.error_id == 'E-CSVBK-01'
assert e.value.func_name == CSVBK_JP_NAME
assert e.value.args[0] == \
f'[Account] CSVデータのバックアップに失敗しました ファイル名:[CRM_Account_{execute_datetime.format_date()}.csv] エラー内容:[書き込みエラー]'

View File

@ -133,14 +133,19 @@ class TestBackupCrmDataProcess:
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_object_dict, execute_datetime)
with patch('src.backup_crm_data_process.BackupBucket') as mock_backup_bucket:
with patch('src.backup_crm_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.backup_crm_data_process.JsonWriter') as mock_writer:
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_response_json.return_value = ''
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
# Act
backup_crm_data_process(target_object, {}, execute_datetime)
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket_inst.put_response_json.called is True
def test_raise_put_response_json(self, bucket_name, monkeypatch, caplog):
@ -170,17 +175,69 @@ class TestBackupCrmDataProcess:
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_object_dict, execute_datetime)
with patch('src.backup_crm_data_process.BackupBucket') as mock_backup_bucket:
with patch('src.backup_crm_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.backup_crm_data_process.JsonWriter') as mock_writer:
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_response_json.side_effect = Exception('登録エラー')
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
# Act
with pytest.raises(FileUploadException) as e:
backup_crm_data_process(target_object, {}, execute_datetime)
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket_inst.put_response_json.called is True
assert e.value.error_id == 'E-RESBK-01'
assert e.value.func_name == RESBK_JP_NAME
assert e.value.args[0] == \
f'[Account] 電文データのバックアップに失敗しました ファイル名:[CRM_Account_{execute_datetime.format_date()}.json] エラー内容:[登録エラー]'
def test_raise_put_response_json_write_local_file(self, bucket_name, monkeypatch, caplog):
"""
Cases:
CRM電文データをS3に配置するための一時ファイルを書き込めない場合エラーが発生すること
Arranges:
- オブジェクト情報ファイル取得処理で例外が発生するようにする
Expects:
- 例外が発生する
- ファイルが書き込めないエラーが返却される
"""
# Arrange
target_object_dict = {
'object_name': 'Account',
'columns': [
'Id',
'AccountNumber',
'LastModifiedDate',
'LastModifiedById',
'SystemModstamp',
'IsDeleted'
]
}
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_object_dict, execute_datetime)
with patch('src.backup_crm_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.backup_crm_data_process.JsonWriter') as mock_writer:
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_response_json.return_value = ''
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.side_effect = Exception('書き込みエラー')
# Act
with pytest.raises(FileUploadException) as e:
backup_crm_data_process(target_object, {}, execute_datetime)
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket_inst.put_response_json.called is False
assert e.value.error_id == 'E-RESBK-01'
assert e.value.func_name == RESBK_JP_NAME
assert e.value.args[0] == \
f'[Account] 電文データのバックアップに失敗しました ファイル名:[CRM_Account_{execute_datetime.format_date()}.json] エラー内容:[書き込みエラー]'

View File

@ -1,4 +1,3 @@
import textwrap
from collections import OrderedDict
from unittest.mock import patch
@ -105,19 +104,22 @@ class TestConvertCrmCsvDataProcess:
target_object = TargetObject(target_object_dict, execute_datetime)
# Act
actual_csv_string = convert_crm_csv_data_process(target_object, response_json)
actual_csv_data = convert_crm_csv_data_process(target_object, response_json)
# Assert
expect_csv_string = """\
"Id","AccountNumber","LastModifiedDate","LastModifiedById","SystemModstamp","IsDeleted","PersonMailingAddress"\r\n\
"TEST001","test001","2022-06-01 09:00:00","1234567.0","2022-06-01 09:00:00","1","{""PersonMailingStreet"": ""Lorem ipsum dolor sit amet, \\nconsectetur adipiscing elit, \\nsed do eiusmod tempor incididunt ut labore et dolore magna aliqua."", ""PersonMailingCity"": ""New york city"", ""PersonMailingState"": ""Ohaio"", ""PersonMailingPostalCode"": ""999-9999"", ""PersonMailingCountry"": ""US"", ""PersonMailingLatitude"": 50.1234567, ""PersonMailingLongitude"": 103.1234567, ""PersonMailingGeocodeAccuracy"": ""Address""}"\r\n\
"TEST002","test002","2022-06-01 09:00:00","1.23","2022-06-01 09:00:00","0","{""PersonMailingStreet"": ""Lorem ipsum dolor sit amet, \\nconsectetur adipiscing elit, \\nsed do eiusmod tempor incididunt ut labore et dolore magna aliqua."", ""PersonMailingCity"": ""New york city"", ""PersonMailingState"": ""Ohaio"", ""PersonMailingPostalCode"": ""999-9999"", ""PersonMailingCountry"": ""US"", ""PersonMailingLatitude"": 50.1234567, ""PersonMailingLongitude"": 103.1234567, ""PersonMailingGeocodeAccuracy"": ""Address""}"\r\n\
"TEST003","test003","2022-06-01 09:00:00","1.234567","2022-06-01 09:00:00","0","{""PersonMailingStreet"": ""Lorem ipsum dolor sit amet, \\nconsectetur adipiscing elit, \\nsed do eiusmod tempor incididunt ut labore et dolore magna aliqua."", ""PersonMailingCity"": ""New york city"", ""PersonMailingState"": ""Ohaio"", ""PersonMailingPostalCode"": ""999-9999"", ""PersonMailingCountry"": ""US"", ""PersonMailingLatitude"": 50.1234567, ""PersonMailingLongitude"": 103.1234567, ""PersonMailingGeocodeAccuracy"": ""Address""}"\r\n\
"""
expect_csv_data = [
["Id", "AccountNumber", "LastModifiedDate", "LastModifiedById", "SystemModstamp", "IsDeleted", "PersonMailingAddress"],
["TEST001", "test001", "2022-06-01 09:00:00", 1234567.0, "2022-06-01 09:00:00", 1,
"{\"PersonMailingStreet\": \"Lorem ipsum dolor sit amet, \\nconsectetur adipiscing elit, \\nsed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\", \"PersonMailingCity\": \"New york city\", \"PersonMailingState\": \"Ohaio\", \"PersonMailingPostalCode\": \"999-9999\", \"PersonMailingCountry\": \"US\", \"PersonMailingLatitude\": 50.1234567, \"PersonMailingLongitude\": 103.1234567, \"PersonMailingGeocodeAccuracy\": \"Address\"}"],
["TEST002", "test002", "2022-06-01 09:00:00", 1.23, "2022-06-01 09:00:00", 0,
"{\"PersonMailingStreet\": \"Lorem ipsum dolor sit amet, \\nconsectetur adipiscing elit, \\nsed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\", \"PersonMailingCity\": \"New york city\", \"PersonMailingState\": \"Ohaio\", \"PersonMailingPostalCode\": \"999-9999\", \"PersonMailingCountry\": \"US\", \"PersonMailingLatitude\": 50.1234567, \"PersonMailingLongitude\": 103.1234567, \"PersonMailingGeocodeAccuracy\": \"Address\"}"],
["TEST003", "test003", "2022-06-01 09:00:00", 1.234567, "2022-06-01 09:00:00", 0,
"{\"PersonMailingStreet\": \"Lorem ipsum dolor sit amet, \\nconsectetur adipiscing elit, \\nsed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\", \"PersonMailingCity\": \"New york city\", \"PersonMailingState\": \"Ohaio\", \"PersonMailingPostalCode\": \"999-9999\", \"PersonMailingCountry\": \"US\", \"PersonMailingLatitude\": 50.1234567, \"PersonMailingLongitude\": 103.1234567, \"PersonMailingGeocodeAccuracy\": \"Address\"}"],
]
# 返り値の期待値チェック
assert isinstance(actual_csv_string, str), 'CSV文字列が返却される'
assert actual_csv_string == textwrap.dedent(expect_csv_string)
assert isinstance(actual_csv_data, list), 'CSVのリストが返却される'
assert actual_csv_data == expect_csv_data
# ログの確認
assert generate_log_message_tuple(log_message='I-CONV-01 [Account] のCSV変換処理を開始します') in caplog.record_tuples

View File

@ -118,11 +118,19 @@ class TestUploadLastFetchDatetimeProcess:
last_fetch_datetime = LastFetchDatetime(last_fetch_datetime_dict, execute_datetime)
# Act
with patch('src.aws.s3.ConfigBucket.put_last_fetch_datetime_file', mock_config_bucket):
with patch('src.upload_last_fetch_datetime_process.ConfigBucket') as mock_config_bucket, \
patch('src.upload_last_fetch_datetime_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
mock_config_bucket_inst = mock_config_bucket.return_value
mock_config_bucket_inst.put_last_fetch_datetime_file.return_value = ''
upload_last_fetch_datetime_process(target_object, last_fetch_datetime)
# Assert
# 処理実行確認
assert mock_config_bucket_inst.put_last_fetch_datetime_file.called is False
assert mock_config_bucket.called is False
# ログの確認
@ -143,7 +151,6 @@ class TestUploadLastFetchDatetimeProcess:
"""
# Arrange
mock_config_bucket = MagicMock(return_value=None)
target_objects_dict = {
'object_name': 'Account',
@ -165,11 +172,19 @@ class TestUploadLastFetchDatetimeProcess:
last_fetch_datetime = LastFetchDatetime(last_fetch_datetime_dict, execute_datetime)
# Act
with patch('src.aws.s3.ConfigBucket.put_last_fetch_datetime_file', mock_config_bucket):
with patch('src.upload_last_fetch_datetime_process.ConfigBucket') as mock_config_bucket, \
patch('src.upload_last_fetch_datetime_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
mock_config_bucket_inst = mock_config_bucket.return_value
mock_config_bucket_inst.put_last_fetch_datetime_file.return_value = ''
upload_last_fetch_datetime_process(target_object, last_fetch_datetime)
# Assert
assert mock_config_bucket.called is True
assert mock_config_bucket_inst.put_last_fetch_datetime_file.called is True
assert mock_writer_inst.write.called is True
def test_raise_put_last_fetch_datetime_file(self, monkeypatch):
"""
@ -185,7 +200,6 @@ class TestUploadLastFetchDatetimeProcess:
"""
# Arrange
mock_config_bucket = MagicMock(side_effect=Exception('ファイルアップロードエラー'))
target_objects_dict = {
'object_name': 'Account',
@ -207,12 +221,69 @@ class TestUploadLastFetchDatetimeProcess:
last_fetch_datetime = LastFetchDatetime(last_fetch_datetime_dict, execute_datetime)
# Act
with patch('src.aws.s3.ConfigBucket.put_last_fetch_datetime_file', mock_config_bucket):
with patch('src.upload_last_fetch_datetime_process.ConfigBucket') as mock_config_bucket, \
patch('src.upload_last_fetch_datetime_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
mock_config_bucket_inst = mock_config_bucket.return_value
mock_config_bucket_inst.put_last_fetch_datetime_file.side_effect = Exception('ファイルアップロードエラー')
with pytest.raises(FileUploadException) as e:
upload_last_fetch_datetime_process(target_object, last_fetch_datetime)
# Assert
assert mock_config_bucket.called is True
assert mock_writer_inst.write.called is True
assert mock_config_bucket_inst.put_last_fetch_datetime_file.called is True
assert e.value.error_id == 'E-UPD-01'
assert e.value.func_name == UPD_JP_NAME
assert e.value.args[0] == f'[Account] 前回処理日時ファイルのアップロードに失敗しました ファイル名:[Account.json] エラー内容:[ファイルアップロードエラー]'
def test_raise_put_last_fetch_datetime_file_write_local_file(self, monkeypatch):
"""
Cases:
前回取得日時ファイルをアップロードするためのローカルファイルの書き込みに失敗した場合エラーが発生すること
Arranges:
- 前回取得日時ファイル更新処理で例外を発生させるモックを生成する
- 取得処理実施結果を準備する
- 実行日時取得インスタンスを生成する
Expects:
- 例外が発生する
- ファイルが書き込めないエラーが返却される
"""
# Arrange
target_objects_dict = {
'object_name': 'Account',
'columns': [
'Id',
'Name'
],
'is_update_last_fetch_datetime': True
}
last_fetch_datetime_dict = {
"last_fetch_datetime_from": "1999-01-01T00:00:00.000Z",
"last_fetch_datetime_to": "2100-12-31T23:59:59.000Z",
}
execute_datetime = ExecuteDateTime()
target_object = TargetObject(target_objects_dict, execute_datetime)
last_fetch_datetime = LastFetchDatetime(last_fetch_datetime_dict, execute_datetime)
# Act
with patch('src.upload_last_fetch_datetime_process.ConfigBucket') as mock_config_bucket, \
patch('src.upload_last_fetch_datetime_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.side_effect = Exception('ファイル書き込みエラー')
mock_config_bucket_inst = mock_config_bucket.return_value
mock_config_bucket_inst.put_last_fetch_datetime_file.return_value = ''
with pytest.raises(FileUploadException) as e:
upload_last_fetch_datetime_process(target_object, last_fetch_datetime)
# Assert
assert mock_writer_inst.write.called is True
assert mock_config_bucket_inst.put_last_fetch_datetime_file.called is False
assert e.value.error_id == 'E-UPD-01'
assert e.value.func_name == UPD_JP_NAME
assert e.value.args[0] == f'[Account] 前回処理日時ファイルのアップロードに失敗しました ファイル名:[Account.json] エラー内容:[ファイル書き込みエラー]'

View File

@ -1,4 +1,4 @@
from unittest.mock import MagicMock, patch
from unittest.mock import patch
import pytest
from src.error.exceptions import FileUploadException
@ -72,7 +72,6 @@ class TestUploadResultDataProcess:
"""
# Arrange
mock_backup_bucket = MagicMock(return_value=None)
process_result = {
"Account": "success",
@ -82,11 +81,18 @@ class TestUploadResultDataProcess:
execute_datetime = ExecuteDateTime()
# Act
with patch('src.aws.s3.BackupBucket.put_result_json', mock_backup_bucket):
with patch('src.upload_result_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.upload_result_data_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_result_json.return_value = ''
upload_result_data_process(process_result, execute_datetime)
# Assert
assert mock_backup_bucket.called is True
assert mock_writer_inst.write.called is True
assert mock_backup_bucket_inst.put_result_json.called is True
def test_raise_put_result_json(self, monkeypatch):
"""
@ -102,7 +108,6 @@ class TestUploadResultDataProcess:
"""
# Arrange
mock_backup_bucket = MagicMock(side_effect=Exception('ファイルアップロードエラー'))
process_result = {
"Account": "success",
@ -112,13 +117,59 @@ class TestUploadResultDataProcess:
execute_datetime = ExecuteDateTime()
# Act
with patch('src.aws.s3.BackupBucket.put_result_json', mock_backup_bucket):
with patch('src.upload_result_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.upload_result_data_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.return_value = ''
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_result_json.side_effect = Exception('ファイルアップロードエラー')
with pytest.raises(FileUploadException) as e:
upload_result_data_process(process_result, execute_datetime)
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket.called is True
assert e.value.error_id == 'E-END-01'
assert e.value.func_name == END_JP_NAME
assert e.value.args[0] == f'取得処理実施結果のアップロードに失敗しました ファイル名:[process_result.json] エラー内容:[ファイルアップロードエラー]'
def test_raise_put_result_json_write_local_file(self, monkeypatch):
"""
Cases:
取得処理実施結果をアップロードできない場合エラーが発生すること
Arranges:
- 取得処理実施結果アップロード処理で例外を発生させるモックを生成する
- 取得処理実施結果を準備する
- 実行日時取得インスタンスを生成する
Expects:
- 例外が発生する
- ファイルがアップロードできないエラーが返却される
"""
# Arrange
process_result = {
"Account": "success",
"Contact": "fail"
}
execute_datetime = ExecuteDateTime()
# Act
with patch('src.upload_result_data_process.BackupBucket') as mock_backup_bucket, \
patch('src.upload_result_data_process.JsonWriter') as mock_writer:
mock_writer_inst = mock_writer.return_value
mock_writer_inst.write.side_effect = Exception('ファイル書き込みエラー')
mock_backup_bucket_inst = mock_backup_bucket.return_value
mock_backup_bucket_inst.put_result_json.return_value = ''
with pytest.raises(FileUploadException) as e:
upload_result_data_process(process_result, execute_datetime)
# Assert
assert mock_writer_inst.write.called is True
assert mock_backup_bucket.called is False
assert e.value.error_id == 'E-END-01'
assert e.value.func_name == END_JP_NAME
assert e.value.args[0] == f'取得処理実施結果のアップロードに失敗しました ファイル名:[process_result.json] エラー内容:[ファイル書き込みエラー]'

View File

@ -0,0 +1,138 @@
import os
import textwrap
import pytest
from src.writer.file_writer import CsvWriter, FileWriter, JsonWriter
class ConcreteFileWriter(FileWriter):
def write(self) -> str:
return super().write()
class ConcreteFileWriterAbstractMethodNotImplemented(FileWriter):
pass
class TestFileWriter:
def test_concrete_file_writer_instance_create(self):
concrete_file_writer = ConcreteFileWriter('', '')
assert isinstance(concrete_file_writer, FileWriter)
def test_concrete_file_writer_call_write_method(self):
concrete_file_writer = ConcreteFileWriter('', '')
concrete_file_writer.write()
def test_raise_abstract_class_do_not_create_instance(self):
with pytest.raises(TypeError):
FileWriter('', '')
def test_raise_concrete_file_writer_not_implemented_abstract_method(self):
with pytest.raises(TypeError):
ConcreteFileWriterAbstractMethodNotImplemented('', '')
class TestJsonFileWriter:
def test_write(self, tmpdir):
"""
Cases:
JSONファイルが書き込めること
Arranges:
Expects:
JSONファイルが正しく書き込まれている
"""
# Arrange
file_name = 'test.json'
file_path = os.path.join(tmpdir, file_name)
content = {'test': 'テスト'}
# Act
sut = JsonWriter(file_path, content)
sut.write()
# Assert
with open(file_path) as f:
actual = f.read()
assert actual == '{"test": "テスト"}'
def test_raise_write_cause_file_path_not_exists(self):
"""
Cases:
書き込み先が存在しない場合エラーとなること
Arranges:
Expects:
エラーが発生する
"""
# Arrange
file_name = 'test.json'
file_path = os.path.join('invalid', file_name)
content = {'test': 'テスト'}
# Act
sut = JsonWriter(file_path, content)
with pytest.raises(Exception):
sut.write()
class TestCsvFileWriter:
def test_write(self, tmpdir):
"""
Cases:
CSVファイルが書き込めること
Arranges:
Expects:
CSVファイルが正しく書き込まれている
"""
# Arrange
file_name = 'test.csv'
file_path = os.path.join(tmpdir, file_name)
content = [
["Id", "AccountId", "UserOrGroupId", "AccountAccessLevel", "OpportunityAccessLevel", "CaseAccessLevel",
"ContactAccessLevel", "RowCause", "LastModifiedDate", "LastModifiedById", "IsDeleted"],
["TEST001", "test001", "", 1, 2, 3, 4, "テストのため1", "2022-06-01 09:00:00", 1234567.0, 0],
["TEST002", "test002", "", 5, 6, 7, 8, "テストのため2", "2022-06-03 01:30:30", 2.23, 1],
["TEST003", "test003", "", 9, 10, 11, 12, "テストのため3", "2022-06-04 08:50:50", 3.234567, 0]
]
# Act
sut = CsvWriter(file_path, content)
sut.write()
# Assert
with open(file_path, newline='') as f:
actual = f.read()
expect = """\
"Id","AccountId","UserOrGroupId","AccountAccessLevel","OpportunityAccessLevel","CaseAccessLevel","ContactAccessLevel","RowCause","LastModifiedDate","LastModifiedById","IsDeleted"\r\n\
"TEST001","test001","","1","2","3","4","テストのため1","2022-06-01 09:00:00","1234567.0","0"\r\n\
"TEST002","test002","","5","6","7","8","テストのため2","2022-06-03 01:30:30","2.23","1"\r\n\
"TEST003","test003","","9","10","11","12","テストのため3","2022-06-04 08:50:50","3.234567","0"\r\n\
"""
assert actual == textwrap.dedent(expect)
def test_raise_write_cause_file_path_not_exists(self):
"""
Cases:
書き込み先が存在しない場合エラーとなること
Arranges:
Expects:
エラーが発生する
"""
# Arrange
file_name = 'test.csv'
file_path = os.path.join('invalid', file_name)
content = {'test': 'テスト'}
# Act
sut = CsvWriter(file_path, content)
with pytest.raises(Exception):
sut.write()