feat: ページング取得に伴い、API機能だけになったのでコントローラーを移植

This commit is contained in:
shimoda.m@nds-tyo.co.jp 2023-10-05 09:42:27 +09:00
parent 6d2ca2167b
commit a9a968312c
2 changed files with 66 additions and 102 deletions

View File

@ -1,22 +1,13 @@
import datetime
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from starlette import status
from src.depends.services import get_service
from src.logging.get_logger import get_logger
from src.model.db.bio_sales_lot import BioSalesLotDBModel
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.view.bio_view_model import BioViewModel
from src.router.session_router import AuthenticatedRoute
from src.services.batch_status_service import BatchStatusService
from src.services.bio_view_service import BioViewService
from src.services.session_service import set_session
from src.system_var import constants, environment
from src.system_var import constants
from src.templates import templates
router = APIRouter()
@ -53,90 +44,3 @@ def bio_view(
headers={'session_key': session_key}
)
return templates_response
@router.post('/BioSearchList')
def search_bio(
request: Request,
bio_form: Optional[BioModel] = Depends(BioModel.as_form),
bio_service: BioViewService = Depends(get_service(BioViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BATCH_PROCESSING)
# 生物由来データを検索
bio_sales_view_data = bio_service.search_bio_data(bio_form)
# 検索項目などのデータを取得
bio: BioViewModel = bio_service.prepare_bio_view(session)
bio.bio_data = bio_sales_view_data
bio.form_data = bio_form
# レスポンス
session_key = session.session_key
templates_response = templates.TemplateResponse(
'bioSearchList.html', {
'request': request,
'bio': bio
},
headers={'session_key': session_key}
)
return templates_response
@router.post('/BioSearchListAjax')
def search_bio_ajax(
request: Request,
bio_form: Optional[BioModel] = Depends(BioModel.as_form),
bio_service: BioViewService = Depends(get_service(BioViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService))
):
session: UserSession = request.session
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=constants.LOGOUT_REASON_BATCH_PROCESSING)
# 生物由来データと件数を取得
bio_sales_lot_data = bio_service.search_bio_data(bio_form)
bio_sales_lot_count = bio_service.count_bio_data(bio_form, session)
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
def custom_encode(obj):
encoded_obj = obj.model_dump()
for key, value in encoded_obj.items():
if type(value) == datetime.datetime:
encoded_obj[key] = value.strftime("%Y-%m-%d %H:%M:%S") if obj is not None else ''
if type(value) == datetime.date:
encoded_obj[key] = value.strftime("%Y-%m-%d") if obj is not None else ''
print(key, value)
return encoded_obj
data = jsonable_encoder(
bio_sales_lot_data,
custom_encoder={
BioSalesLotDBModel: custom_encode
}
)
# クッキーも書き換え
json_response = JSONResponse(content={
'data': data,
'count': bio_sales_lot_count
})
json_response.set_cookie(
key='session',
value=session.session_key,
max_age=environment.SESSION_EXPIRE_MINUTE * 60, # cookieの有効期限は秒数指定なので、60秒をかける
secure=True,
httponly=True
)
return json_response

View File

@ -1,9 +1,10 @@
"""生物由来ファイルダウンロード APIRoute"""
from datetime import datetime
from typing import Union
import datetime
from typing import Optional, Union
import pandas as pd
from fastapi import APIRouter, Depends, HTTPException
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from starlette import status
@ -13,6 +14,7 @@ from src.logging.get_logger import get_logger
from src.model.internal.session import UserSession
from src.model.request.bio import BioModel
from src.model.request.bio_download import BioDownloadModel
from src.model.view.bio_disp_model import BioDisplayModel
from src.services.batch_status_service import BatchStatusService
from src.services.bio_view_service import BioViewService
from src.services.session_service import set_session
@ -27,6 +29,64 @@ router = APIRouter()
#########################
@router.post('/search')
def search_bio_data(
bio_form: Optional[BioModel] = Depends(BioModel.as_form),
bio_service: BioViewService = Depends(get_service(BioViewService)),
batch_status_service: BatchStatusService = Depends(get_service(BatchStatusService)),
session: Union[UserSession, None] = Depends(verify_session)
):
if session is None:
return JSONResponse(content={'status': 'session_expired'}, status_code=status.HTTP_401_UNAUTHORIZED)
# バッチ処理中の場合、機能を利用させない
if batch_status_service.is_batch_processing():
return JSONResponse(content={'status': 'batch_processing'}, status_code=status.HTTP_403_FORBIDDEN)
# 生物由来データと件数を取得
bio_sales_lot_data = bio_service.search_bio_data(bio_form)
bio_sales_lot_count = bio_service.count_bio_data(bio_form, session)
# レスポンスデータを加工
# 日付型のデータのエンコードエラーを解消するための措置
def custom_encode(obj):
encoded_obj = obj.model_dump()
for key, value in encoded_obj.items():
if type(value) == datetime.datetime:
encoded_obj[key] = value.strftime("%Y-%m-%d %H:%M:%S") if obj is not None else ''
if type(value) == datetime.date:
encoded_obj[key] = value.strftime("%Y-%m-%d") if obj is not None else ''
return encoded_obj
data = jsonable_encoder(
bio_sales_lot_data,
custom_encoder={
BioDisplayModel: custom_encode
}
)
# セッション書き換え
session.update(
actions=[
UserSession.last_access_time.set(UserSession.new_last_access_time()),
UserSession.record_expiration_time.set(UserSession.new_record_expiration_time()),
]
)
set_session(session)
json_response = JSONResponse(content={
'data': data,
'count': bio_sales_lot_count
})
# クッキーも書き換え
json_response.set_cookie(
key='session',
value=session.session_key,
max_age=environment.SESSION_EXPIRE_MINUTE * 60, # cookieの有効期限は秒数指定なので、60秒をかける
secure=True,
httponly=True
)
return json_response
@router.post('/download')
async def download_bio_data(
search_param: BioModel = Depends(BioModel.as_body),
@ -40,7 +100,7 @@ async def download_bio_data(
logger.info(f'ユーザーID: {download_param.user_id}')
logger.info(f'拡張子: {download_param.ext}')
# ファイル名に使用するタイムスタンプを初期化しておく
current_timestamp = datetime.now()
current_timestamp = datetime.datetime.now()
# 出力ファイル名
download_file_name = f'Result_{download_param.user_id}_{current_timestamp:%Y%m%d%H%M%S%f}.{download_param.ext}'
if session is None:
@ -50,7 +110,7 @@ async def download_bio_data(
return {'status': 'batch_processing'}
# 生物由来データを検索
# 検索に使用したクエリも取得
search_result_df, query = _search_bio_data(bio_service, search_param, download_param)
search_result_df, query = _search_download_bio_data(bio_service, search_param, download_param)
# アクセスログを記録
bio_service.write_access_log(query, search_param, download_param.user_id, current_timestamp, download_file_name)
@ -100,7 +160,7 @@ async def download_bio_data(
return json_response
def _search_bio_data(
def _search_download_bio_data(
bio_service: BioViewService,
search_param: BioModel,
download_param: BioDownloadModel