feat: dump取得時、出力先のパスをログ出力。

This commit is contained in:
shimoda.m@nds-tyo.co.jp 2023-07-13 10:29:01 +09:00
parent 5ebd71b03d
commit b3759b4ae9
2 changed files with 6 additions and 5 deletions

View File

@ -1,4 +1,4 @@
FROM python:3.9-bullseye
FROM python:3.9
ENV TZ="Asia/Tokyo"

View File

@ -64,8 +64,8 @@ def exec():
dt_now = datetime.datetime.now()
converted_value = dt_now.strftime('%Y%m%d%H%M%S')
file_name = f'backup_rds_src05_{converted_value}.gz'
s3_file_name = f's3://{environment.JSKULT_BACKUP_BUCKET}/{environment.DUMP_BACKUP_FOLDER}/{dt_now.year}/{dt_now.strftime("%m")}/{dt_now.strftime("%d")}/{file_name}'
dump_file_name = f'backup_rds_src05_{converted_value}.gz'
s3_file_path = f's3://{environment.JSKULT_BACKUP_BUCKET}/{environment.DUMP_BACKUP_FOLDER}/{dt_now.year}/{dt_now.strftime("%m")}/{dt_now.strftime("%d")}/{dump_file_name}'
# mysqldumpコマンドを実行し、dumpを取得する
command = [
@ -79,12 +79,12 @@ def exec():
'--set-gtid-purged=OFF',
environment.DB_SCHEMA
]
mysqldump_process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# gzipコマンドを実行してdump結果を圧縮する
gzip_process = subprocess.Popen(['gzip', '-c'], stdin=mysqldump_process.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# aws s3 cpコマンドを実行してアップロードする
s3_cp_process = subprocess.Popen(['aws', 's3', 'cp', '-', s3_file_name], stdin=gzip_process.stdout, stderr=subprocess.PIPE)
s3_cp_process = subprocess.Popen(['aws', 's3', 'cp', '-', s3_file_path], stdin=gzip_process.stdout, stderr=subprocess.PIPE)
# mysqldumpの標準出力をgzipに接続したため、標準出力をクローズする
mysqldump_process.stdout.close()
# gzipの標準出力をaws s3 cpに接続したため、標準出力をクローズする
@ -114,6 +114,7 @@ def exec():
return constants.BATCH_EXIT_CODE_SUCCESS
logger.info('日次バッチ処理前DBダンプ取得終了正常終了')
logger.info(f'出力ファイルパス: {s3_file_path}')
return constants.BATCH_EXIT_CODE_SUCCESS
except Exception as e: