feat: S3へのPUTをローカルのファイルから行うように修正

This commit is contained in:
shimoda.m@nds-tyo.co.jp 2022-10-12 15:36:28 +09:00
parent 5cfcd0e3e5
commit a74e0ee8f3
2 changed files with 57 additions and 33 deletions

View File

@ -1,5 +1,3 @@
import json
import boto3
from src.system_var.constants import (AWS_RESOURCE_S3, S3_CHAR_CODE,
S3_RESPONSE_BODY)
@ -24,9 +22,8 @@ class S3Resource:
body = response[S3_RESPONSE_BODY].read()
return body.decode(S3_CHAR_CODE)
def put_object(self, object_key: str, data: str) -> None:
s3_object = self.__s3_bucket.Object(object_key)
s3_object.put(Body=data.encode(S3_CHAR_CODE), ContentEncoding=S3_CHAR_CODE)
def put_object(self, object_key: str, local_file_path: str) -> None:
self.__s3_bucket.upload_file(Key=object_key, Filename=local_file_path)
return
def copy(self, src_bucket: str, src_key: str, dest_bucket: str, dest_key: str) -> None:
@ -47,12 +44,12 @@ class ConfigBucket:
def get_object_info_file(self) -> str:
return self.__s3_resource.get_object(f'{OBJECT_INFO_FOLDER}/{OBJECT_INFO_FILENAME}')
def get_last_fetch_datetime_file(self, file_path: str) -> str:
return self.__s3_resource.get_object(f'{LAST_FETCH_DATE_FOLDER}/{file_path}')
def get_last_fetch_datetime_file(self, file_key: str) -> str:
return self.__s3_resource.get_object(f'{LAST_FETCH_DATE_FOLDER}/{file_key}')
def put_last_fetch_datetime_file(self, file_path: str, data: str) -> None:
def put_last_fetch_datetime_file(self, file_key: str, local_file_path: str) -> None:
self.__s3_resource.put_object(
f'{LAST_FETCH_DATE_FOLDER}/{file_path}', data)
f'{LAST_FETCH_DATE_FOLDER}/{file_key}', local_file_path)
return
@ -65,9 +62,9 @@ class DataBucket:
def __str__(self) -> str:
return IMPORT_DATA_BUCKET
def put_csv(self, file_path: str, data: str) -> None:
object_key = f'{CRM_IMPORT_DATA_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, data)
def put_csv(self, file_key: str, local_file_path: str) -> None:
object_key = f'{CRM_IMPORT_DATA_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return
def put_csv_from(self, src_bucket: str, src_key: str):
@ -85,17 +82,17 @@ class BackupBucket:
def __str__(self) -> str:
return CRM_BACKUP_BUCKET
def put_response_json(self, file_path: str, data: dict) -> None:
object_key = f'{RESPONSE_JSON_BACKUP_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, json.dumps(data, ensure_ascii=False))
def put_response_json(self, file_key: str, local_file_path: str) -> None:
object_key = f'{RESPONSE_JSON_BACKUP_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return
def put_csv(self, file_path: str, data: str) -> None:
object_key = f'{CRM_IMPORT_DATA_BACKUP_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, data)
def put_csv(self, file_key: str, local_file_path: str) -> None:
object_key = f'{CRM_IMPORT_DATA_BACKUP_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return
def put_result_json(self, file_path: str, data: dict) -> None:
object_key = f'{PROCESS_RESULT_FOLDER}/{file_path}'
self.__s3_resource.put_object(object_key, json.dumps(data, ensure_ascii=False))
def put_result_json(self, file_key: str, local_file_path: str) -> None:
object_key = f'{PROCESS_RESULT_FOLDER}/{file_key}'
self.__s3_resource.put_object(object_key, local_file_path)
return

View File

@ -1,3 +1,5 @@
import os
import pytest
from src.aws.s3 import BackupBucket, ConfigBucket, DataBucket, S3Resource
@ -51,7 +53,7 @@ class TestS3Resource:
# Assert
sut.get_object('hogehoge/test.txt')
def test_put_object(self, s3_test, s3_client, bucket_name):
def test_put_object(self, s3_test, s3_client, bucket_name, tmpdir):
"""
Cases:
- S3にオブジェクトをPUTできるか
@ -61,9 +63,14 @@ class TestS3Resource:
- PUTされたファイルが存在する
- PUTされたファイルの内容が期待値と一致する
"""
file_path = os.path.join(tmpdir, 'test.txt')
with open(file_path, mode='w') as f:
f.write('aaaaaaaaaaaaaaa')
sut = S3Resource(bucket_name)
sut.put_object('hogehoge/test.txt', 'aaaaaaaaaaaaaaa')
sut.put_object('hogehoge/test.txt', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key='hogehoge/test.txt')
assert actual['Body'].read().decode('utf-8') == 'aaaaaaaaaaaaaaa'
@ -213,7 +220,7 @@ class TestConfigBucket:
with pytest.raises(Exception):
sut.get_last_fetch_datetime_file('Object.json')
def test_put_last_fetch_datetime_file(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_last_fetch_datetime_file(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- オブジェクト最終更新日時ファイルをPUTできること
@ -226,8 +233,12 @@ class TestConfigBucket:
monkeypatch.setattr('src.aws.s3.CRM_CONFIG_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.LAST_FETCH_DATE_FOLDER', 'crm')
file_path = os.path.join(tmpdir, 'Object.json')
with open(file_path, mode='w') as f:
f.write('aaaaaaaaaaaaaaa')
sut = ConfigBucket()
sut.put_last_fetch_datetime_file('Object.json', 'aaaaaaaaaaaaaaa')
sut.put_last_fetch_datetime_file('Object.json', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'crm/Object.json')
assert actual['Body'].read().decode('utf-8') == 'aaaaaaaaaaaaaaa'
@ -267,7 +278,7 @@ class TestConfigBucket:
class TestDataBucket:
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- CSVファイルをPUTできること
@ -280,8 +291,12 @@ class TestDataBucket:
monkeypatch.setattr('src.aws.s3.IMPORT_DATA_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.CRM_IMPORT_DATA_FOLDER', 'crm/target')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('test,test,test')
sut = DataBucket()
sut.put_csv('test.csv', 'test,test,test')
sut.put_csv('test.csv', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'crm/target/test.csv')
assert actual['Body'].read().decode('utf-8') == 'test,test,test'
@ -363,7 +378,7 @@ class TestDataBucket:
class TestBackupBucket:
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_csv(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- CSVファイルをPUTできること
@ -376,8 +391,12 @@ class TestBackupBucket:
monkeypatch.setattr('src.aws.s3.CRM_BACKUP_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.CRM_IMPORT_DATA_BACKUP_FOLDER', 'data_import')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('test,test,test')
sut = BackupBucket()
sut.put_csv('test.csv', 'test,test,test')
sut.put_csv('test.csv', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'data_import/test.csv')
assert actual['Body'].read().decode('utf-8') == 'test,test,test'
@ -399,7 +418,7 @@ class TestBackupBucket:
with pytest.raises(Exception):
sut.put_csv('test.csv', 'test,test,test')
def test_put_response_json(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_response_json(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- JSONファイルをPUTできること
@ -412,8 +431,12 @@ class TestBackupBucket:
monkeypatch.setattr('src.aws.s3.CRM_BACKUP_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.RESPONSE_JSON_BACKUP_FOLDER', 'response_json')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('{"test": "test"}')
sut = BackupBucket()
sut.put_response_json('test.json', {"test": "test"})
sut.put_response_json('test.json', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'response_json/test.json')
assert actual['Body'].read().decode('utf-8') == '{"test": "test"}'
@ -435,7 +458,7 @@ class TestBackupBucket:
with pytest.raises(Exception):
sut.put_response_json('test.json', {"test": "test"})
def test_put_result_json(self, s3_test, s3_client, bucket_name, monkeypatch):
def test_put_result_json(self, s3_test, s3_client, bucket_name, monkeypatch, tmpdir):
"""
Cases:
- 結果のJSONファイルをPUTできること
@ -448,8 +471,12 @@ class TestBackupBucket:
monkeypatch.setattr('src.aws.s3.CRM_BACKUP_BUCKET', bucket_name)
monkeypatch.setattr('src.aws.s3.PROCESS_RESULT_FOLDER', 'data_import')
file_path = os.path.join(tmpdir, 'test.json')
with open(file_path, mode='w') as f:
f.write('{"test": "test"}')
sut = BackupBucket()
sut.put_result_json('result.json', {"test": "test"})
sut.put_result_json('result.json', file_path)
actual = s3_client.get_object(Bucket=bucket_name, Key=f'data_import/result.json')
assert actual['Body'].read().decode('utf-8') == '{"test": "test"}'