From 84266768c1d29aedc1a5b9a96ed70f95b7120a42 Mon Sep 17 00:00:00 2001 From: SaboniAmine Date: Mon, 10 Jun 2024 11:07:38 +0200 Subject: [PATCH] wip --- backend/bloom/config.py | 51 ++-- backend/bloom/container.py | 2 +- .../repositories/repository_excursion.py | 45 +-- .../infra/repositories/repository_port.py | 14 +- backend/bloom/main.py | 65 ++++ backend/bloom/routers/excursions.py | 71 +++++ backend/bloom/routers/ports.py | 45 +++ backend/bloom/routers/vessels.py | 91 ++++++ backend/bloom/routers/zones.py | 119 ++++++++ backend/bloom/services/api.py | 280 ------------------ backend/bloom/services/geo.py | 4 +- backend/bloom/tasks/clean_positions.py | 4 +- .../tasks/compute_port_geometry_buffer.py | 4 +- ...convert_spire_vessels_to_spire_ais_data.py | 4 +- backend/bloom/tasks/create_new_excursion.py | 47 +-- .../create_update_excursions_segments.py | 8 +- .../tasks/create_update_rel_segments_zones.py | 4 +- backend/bloom/tasks/load_dim_port_from_csv.py | 4 +- .../bloom/tasks/load_dim_vessel_from_csv.py | 4 +- .../bloom/tasks/load_dim_zone_amp_from_csv.py | 4 +- .../tasks/load_fct_excursions_from_csv.py | 4 +- .../bloom/tasks/load_spire_data_from_api.py | 4 +- .../bloom/tasks/load_spire_data_from_csv.py | 4 +- .../bloom/tasks/load_spire_data_from_json.py | 4 +- .../bloom/tasks/update_vessel_data_voyage.py | 4 +- backend/bloom/usecase/Excursions.py | 30 ++ backend/bloom/usecase/Ports.py | 20 ++ backend/tests/test_alert.py | 4 +- 28 files changed, 543 insertions(+), 401 deletions(-) create mode 100644 backend/bloom/main.py create mode 100644 backend/bloom/routers/excursions.py create mode 100644 backend/bloom/routers/ports.py create mode 100644 backend/bloom/routers/vessels.py create mode 100644 backend/bloom/routers/zones.py delete mode 100644 backend/bloom/services/api.py create mode 100644 backend/bloom/usecase/Excursions.py create mode 100644 backend/bloom/usecase/Ports.py diff --git a/backend/bloom/config.py b/backend/bloom/config.py index 4149de44..5894add8 100644 --- a/backend/bloom/config.py +++ b/backend/bloom/config.py @@ -16,6 +16,7 @@ model_validator ) + class Settings(BaseSettings): model_config = SettingsConfigDict( # validate_assignment=True allows to update db_url value as soon as one of @@ -27,44 +28,44 @@ class Settings(BaseSettings): env_ignore_empty=True, env_nested_delimiter='__', env_file='.env', - env_file_encoding = 'utf-8', + env_file_encoding='utf-8', extra='ignore' - ) - + ) + # Déclaration des attributs/paramètres disponibles au sein de la class settings - postgres_user:str = Field(default='') - postgres_password:str = Field(default='') - postgres_hostname:str = Field(min_length=1, - default='localhost') - postgres_port:int = Field(gt=1024, - default=5432) + postgres_user: str = Field(default='') + postgres_password: str = Field(default='') + postgres_hostname: str = Field(min_length=1, + default='localhost') + postgres_port: int = Field(gt=1024, + default=5432) - postgres_db:str = Field(min_length=1,max_length=32,pattern=r'^(?:[a-zA-Z]|_)[\w\d_]*$') + postgres_db: str = Field(min_length=1, max_length=32, pattern=r'^(?:[a-zA-Z]|_)[\w\d_]*$') srid: int = Field(default=4326) - spire_token:str = Field(default='') - data_folder:str=Field(default=str(Path(__file__).parent.parent.parent.joinpath('./data'))) - db_url:str=Field(default='') + spire_token: str = Field(default='') + data_folder: str = Field(default=str(Path(__file__).parent.parent.parent.joinpath('./data'))) + db_url: str = Field(default='') redis_host: str = Field(default='localhost') redis_port: int = Field(default=6379) redis_cache_expiration: int = Field(default=3600) - - logging_level:str=Field( - default="INFO", - pattern=r'NOTSET|DEBUG|INFO|WARNING|ERROR|CRITICAL' - ) + + logging_level: str = Field( + default="INFO", + pattern=r'NOTSET|DEBUG|INFO|WARNING|ERROR|CRITICAL' + ) @model_validator(mode='after') - def update_db_url(self)->dict: - new_url= f"postgresql://{self.postgres_user}:"\ - f"{self.postgres_password}@{self.postgres_hostname}:"\ - f"{self.postgres_port}/{self.postgres_db}" + def update_db_url(self) -> dict: + new_url = f"postgresql://{self.postgres_user}:" \ + f"{self.postgres_password}@{self.postgres_hostname}:" \ + f"{self.postgres_port}/{self.postgres_db}" if self.db_url != new_url: - self.db_url = new_url + self.db_url = new_url return self settings = Settings(_env_file=os.getenv('BLOOM_CONFIG', - Path(__file__).parent.parent.parent.joinpath('.env')), + Path(__file__).parent.parent.parent.joinpath('.env')), _secrets_dir=os.getenv('BLOOM_SECRETS_DIR', - Path(__file__).parent.parent.parent.joinpath('./secrets'))) + Path(__file__).parent.parent.parent.joinpath('./secrets'))) diff --git a/backend/bloom/container.py b/backend/bloom/container.py index c8c693cd..338701d9 100644 --- a/backend/bloom/container.py +++ b/backend/bloom/container.py @@ -14,7 +14,7 @@ from dependency_injector import containers, providers -class UseCases(containers.DeclarativeContainer): +class UseCasesContainer(containers.DeclarativeContainer): config = providers.Configuration() db_url = settings.db_url db = providers.Singleton( diff --git a/backend/bloom/infra/repositories/repository_excursion.py b/backend/bloom/infra/repositories/repository_excursion.py index 5f869585..e01eb668 100644 --- a/backend/bloom/infra/repositories/repository_excursion.py +++ b/backend/bloom/infra/repositories/repository_excursion.py @@ -34,18 +34,21 @@ def get_param_from_last_excursion(self, session: Session, vessel_id: int) -> Uni return None return {"arrival_port_id": result.arrival_port_id, "arrival_position": result.arrival_position} - def get_excursions_by_vessel_id(self, session: Session, vessel_id: int) -> List[Excursion]: - """Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée""" - stmt = select(sql_model.Excursion).where(sql_model.Excursion.vessel_id == vessel_id) - result = session.execute(stmt).scalars() - if not result: - return [] + def get_excursions_by_vessel_id(self, vessel_id: int) -> List[Excursion]: + with self.session_factory as session: + """Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée""" + stmt = select(sql_model.Excursion).where(sql_model.Excursion.vessel_id == vessel_id) + result = session.execute(stmt).scalars() + if not result: + return [] return [ExcursionRepository.map_to_domain(r) for r in result] def get_vessel_excursion_by_id(self, session: Session, vessel_id: int, excursion_id: int) -> Union[Excursion, None]: """Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée""" - stmt = select(sql_model.Excursion).where((sql_model.Excursion.vessel_id == vessel_id) - & (sql_model.Excursion.id == excursion_id)) + stmt = select(sql_model.Excursion).where( + (sql_model.Excursion.vessel_id == vessel_id) + & (sql_model.Excursion.id == excursion_id) + ) result = session.execute(stmt).scalar() if not result: return None @@ -164,29 +167,3 @@ def map_to_domain(excursion: sql_model.Excursion) -> Excursion: created_at=excursion.created_at, updated_at=excursion.updated_at ) - - @staticmethod - def map_to_orm(excursion: Excursion) -> sql_model.Excursion: - return sql_model.Excursion( - id=excursion.id, - vessel_id=excursion.vessel_id, - departure_port_id=excursion.departure_port_id, - departure_at=excursion.departure_at, - departure_position=from_shape( - excursion.departure_position) if excursion.departure_position is not None else None, - arrival_port_id=excursion.arrival_port_id, - arrival_at=excursion.arrival_at, - arrival_position=from_shape(excursion.arrival_position) if excursion.arrival_position is not None else None, - excursion_duration=excursion.excursion_duration, - total_time_at_sea=excursion.total_time_at_sea, - total_time_in_amp=excursion.total_time_in_amp, - total_time_in_territorial_waters=excursion.total_time_fishing_in_territorial_waters, - total_time_in_costal_waters=excursion.total_time_fishing_in_costal_waters, - total_time_fishing=excursion.total_time_fishing, - total_time_fishing_in_amp=excursion.total_time_fishing_in_amp, - total_time_fishing_in_territorial_waters=excursion.total_time_fishing_in_territorial_waters, - total_time_fishing_in_costal_waters=excursion.total_time_fishing_in_costal_waters, - total_time_extincting_amp=excursion.total_time_extincting_amp, - created_at=excursion.created_at, - updated_at=excursion.updated_at - ) diff --git a/backend/bloom/infra/repositories/repository_port.py b/backend/bloom/infra/repositories/repository_port.py index 264dc6e1..4783f62c 100644 --- a/backend/bloom/infra/repositories/repository_port.py +++ b/backend/bloom/infra/repositories/repository_port.py @@ -17,17 +17,19 @@ class PortRepository: def __init__(self, session_factory: Callable) -> None: self.session_factory = session_factory - def get_port_by_id(self, session: Session, port_id: int) -> Union[Port, None]: - entity = session.get(sql_model.Port, port_id) + def get_port_by_id(self, port_id: int) -> Union[Port, None]: + with self.session_factory() as session: + entity = session.get(sql_model.Port, port_id) if entity is not None: return PortRepository.map_to_domain(entity) else: return None - def get_all_ports(self, session: Session) -> List[Port]: - q = session.query(sql_model.Port) - if not q: - return [] + def get_all_ports(self) -> List[Port]: + with self.session_factory() as session: + q = session.query(sql_model.Port) + if not q: + return [] return [PortRepository.map_to_domain(entity) for entity in q] def get_empty_geometry_buffer_ports(self, session: Session) -> list[Port]: diff --git a/backend/bloom/main.py b/backend/bloom/main.py new file mode 100644 index 00000000..7b2a2c08 --- /dev/null +++ b/backend/bloom/main.py @@ -0,0 +1,65 @@ +from fastapi import FastAPI +from starlette.requests import Request + +from bloom.container import UseCasesContainer +from bloom.routers import excursions, zones, vessels, ports +from bloom.routers.vessels import router, rd + + +def init_db(container): + db = container.db() + db.create_database() + + +def create_app() -> FastAPI: + container = init_container() + + init_db(container) + server = init_server(container) + # server.add_exception_handler(DBException, db_exception_handler) + # server.add_exception_handler(ValidationError, validation_exception_handler) + # server.add_exception_handler(Exception, generic_exception_handler) + + return server + + +def init_container(): + container = UseCasesContainer() + container.wire( + modules=[ + zones, + vessels, + excursions, + ports + ] + ) + return container + + +def init_server(container): + server = FastAPI(dependencies=[]) + server.container = container + server.include_router(excursions.router) + server.include_router(ports.router) + server.include_router(vessels.router) + server.include_router(zones.router) + return server + + +app = create_app() + + +@app.get("/") +async def root(request: Request): + return { + "maptiles": f"{request.url_for('list_maptiles')}", + "ports": f"{request.url_for('list_ports')}", + "vessels": f"{request.url_for('list_vessels')}", + "zones": f"{request.url_for('list_zones')}", + } + + +@router.get("/cache/all/flush") +async def cache_all_flush(request: Request): + await rd.flushall() + return {"code": 0} diff --git a/backend/bloom/routers/excursions.py b/backend/bloom/routers/excursions.py new file mode 100644 index 00000000..8008c51b --- /dev/null +++ b/backend/bloom/routers/excursions.py @@ -0,0 +1,71 @@ +import json +import time + +import redis +from dependency_injector.wiring import inject, Provide +from fastapi import APIRouter, Depends + +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger +from bloom.usecase.Excursions import ExcursionUseCase + +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +router = APIRouter() + + +@router.get("/vessels/{vessel_id}/excursions") +@inject +async def list_vessel_excursions( + vessel_id: int, + nocache: bool = False, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + endpoint = f"/vessels/{vessel_id}/excursions" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + return excursion_usecase.list_vessel_excursions(vessel_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}") +async def get_vessel_excursion( + vessel_id: int, + excursions_id: int, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + )): + return excursion_usecase.get_excursion_by_id(vessel_id, excursions_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") +@inject +async def list_vessel_excursion_segments( + vessel_id: int, + excursions_id: int, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + return excursion_usecase.get_excursions_segments(vessel_id, excursions_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") +@inject +async def get_vessel_excursion_segment( + vessel_id: int, + excursions_id: int, + segment_id: int, + excursion_usecase: ExcursionUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + return await excursion_usecase.get_segment_by_id(vessel_id, excursions_id, segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py new file mode 100644 index 00000000..2c5df458 --- /dev/null +++ b/backend/bloom/routers/ports.py @@ -0,0 +1,45 @@ +import json +import time + +from redis import Redis +from dependency_injector.wiring import inject, Provide +from fastapi import APIRouter, Depends +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger +from bloom.services.api import rd +from bloom.usecase.Ports import PortUseCase + +router = APIRouter() +redis_client = Redis(host=settings.redis_host, port=settings.redis_port, db=0) + + +@router.get("/ports") +@inject +async def list_ports( + nocache: bool = False, + ports_usecase: PortUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + endpoint = f"/ports" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + return ports_usecase.list_ports() + + +@router.get("/ports/{port_id}") +@inject +async def get_port( + port_id: int, + ports_usecase: PortUseCase = Depends( + Provide[UseCasesContainer.emission_service] + ) +): + return ports_usecase.get_port_by_id(port_id) diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py new file mode 100644 index 00000000..d60e8481 --- /dev/null +++ b/backend/bloom/routers/vessels.py @@ -0,0 +1,91 @@ +from fastapi import APIRouter + +from redis import Redis +import json +import time +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger + +rd = Redis(host=settings.redis_host, port=settings.redis_port, db=0) + + +router = APIRouter() + + +@router.get("/vessels") +async def list_vessels(nocache: bool = False): + endpoint = f"/vessels" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + vessel_repository = use_cases.vessel_repository() + db = use_cases.db() + with db.session() as session: + + json_data = [json.loads(v.model_dump_json() if v else "{}") + for v in vessel_repository.get_vessels_list(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint, settings.redis_cache_expiration) + return json_data + + +@router.get("/vessels/{vessel_id}") +async def get_vessel(vessel_id: int): + use_cases = UseCasesContainer() + vessel_repository = use_cases.vessel_repository() + db = use_cases.db() + with db.session() as session: + return vessel_repository.get_vessel_by_id(session, vessel_id) + + +@router.get("/vessels/all/positions/last") +async def list_all_vessel_last_position(nocache: bool = False): + endpoint = f"/vessels/all/positions/last" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(p.model_dump_json() if p else "{}") + for p in segment_repository.get_all_vessels_last_position(session)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/vessels/{vessel_id}/positions/last") +async def get_vessel_last_position(vessel_id: int, nocache: bool = False): + endpoint = f"/vessels/{vessel_id}/positions/last" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + result = segment_repository.get_vessel_last_position(session, vessel_id) + json_data = json.loads(result.model_dump_json() if result else "{}") + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py new file mode 100644 index 00000000..2c1b5978 --- /dev/null +++ b/backend/bloom/routers/zones.py @@ -0,0 +1,119 @@ +import json +import time + +import redis +from fastapi import APIRouter +from starlette.requests import Request + +from bloom.config import settings +from bloom.container import UseCasesContainer +from bloom.logger import logger +from bloom.main import app + +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +router = APIRouter() + + +@router.get("/zones") +async def list_zones(request: Request, nocache: bool = False): + endpoint = f"/zones" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zones(session)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/all/categories") +async def list_zone_categories(request: Request, nocache: bool = False): + endpoint = f"/zones/all/categories" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zone_categories(session)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/by-category/{category}/by-sub-category/{sub}") +async def get_zone_all_by_category(category: str = "all", sub: str = None, nocache: bool = False): + endpoint = f"/zones/by-category/{category}/by-sub-category/{sub}" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in + zone_repository.get_all_zones_by_category(session, category if category != 'all' else None, + sub)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/by-category/{category}") +async def get_zone_all_by_category(category: str = "all", nocache: bool = False): + endpoint = f"/zones/by-category/{category}" + cache = rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload = json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return payload + else: + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in + zone_repository.get_all_zones_by_category(session, category if category != 'all' else None)] + await rd.set(endpoint, json.dumps(json_data)) + await rd.expire(endpoint, settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time() - start}") + return json_data + + +@router.get("/zones/{zones_id}") +async def get_zone(zones_id: int): + use_cases = UseCasesContainer() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + return zone_repository.get_zone_by_id(session, zones_id) diff --git a/backend/bloom/services/api.py b/backend/bloom/services/api.py deleted file mode 100644 index 71bd474e..00000000 --- a/backend/bloom/services/api.py +++ /dev/null @@ -1,280 +0,0 @@ -from fastapi import FastAPI, APIRouter -from fastapi import Request - -import redis -import json -from bloom.config import settings -from bloom.container import UseCases -from bloom.domain.vessel import Vessel -from bloom.logger import logger - -rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) - -from datetime import datetime -import time - - -app = FastAPI() - -@app.get("/cache/all/flush") -async def cache_all_flush(request:Request): - rd.flushall() - return {"code":0} - -@app.get("/vessels") -async def list_vessels(nocache:bool=False): - endpoint=f"/vessels" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - - json_data = [json.loads(v.model_dump_json() if v else "{}") - for v in vessel_repository.get_vessels_list(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - return json_data - -@app.get("/vessels/{vessel_id}") -async def get_vessel(vessel_id: int): - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - return vessel_repository.get_vessel_by_id(session,vessel_id) - -@app.get("/vessels/all/positions/last") -async def list_all_vessel_last_position(nocache:bool=False): - endpoint=f"/vessels/all/positions/last" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in segment_repository.get_all_vessels_last_position(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/vessels/{vessel_id}/positions/last") -async def get_vessel_last_position(vessel_id: int, nocache:bool=False): - endpoint=f"/vessels/{vessel_id}/positions/last" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - result=segment_repository.get_vessel_last_position(session,vessel_id) - json_data = json.loads(result.model_dump_json() if result else "{}") - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/vessels/{vessel_id}/excursions") -async def list_vessel_excursions(vessel_id: int, nocache:bool=False): - endpoint=f"/vessels/{vessel_id}/excursions" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - excursion_repository = use_cases.excursion_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in excursion_repository.get_excursions_by_vessel_id(session,vessel_id)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - - -@app.get("/vessels/{vessel_id}/excursions/{excursions_id}") -async def get_vessel_excursion(vessel_id: int,excursions_id: int): - use_cases = UseCases() - excursion_repository = use_cases.excursion_repository() - db = use_cases.db() - with db.session() as session: - return excursion_repository.get_vessel_excursion_by_id(session,vessel_id,excursions_id) - - -@app.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") -async def list_vessel_excursion_segments(vessel_id: int,excursions_id: int): - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - return segment_repository.list_vessel_excursion_segments(session,vessel_id,excursions_id) - -@app.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") -async def get_vessel_excursion_segment(vessel_id: int,excursions_id: int, segment_id:int): - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - return segment_repository.get_vessel_excursion_segment_by_id(session,vessel_id,excursions_id,segment_id) - -@app.get("/ports") -async def list_ports(request:Request,nocache:bool=False): - endpoint=f"/ports" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - port_repository = use_cases.port_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in port_repository.get_all_ports(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - - -@app.get("/ports/{port_id}") -async def get_port(port_id:int): - use_cases = UseCases() - port_repository = use_cases.port_repository() - db = use_cases.db() - with db.session() as session: - return port_repository.get_port_by_id(session,port_id) - -@app.get("/zones") -async def list_zones(request:Request,nocache:bool=False): - endpoint=f"/zones" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/all/categories") -async def list_zone_categories(request:Request,nocache:bool=False): - endpoint=f"/zones/all/categories" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zone_categories(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/by-category/{category}/by-sub-category/{sub}") -async def get_zone_all_by_category(category:str="all",sub:str=None,nocache:bool=False): - endpoint=f"/zones/by-category/{category}/by-sub-category/{sub}" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None,sub)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/by-category/{category}") -async def get_zone_all_by_category(category:str="all",nocache:bool=False): - endpoint=f"/zones/by-category/{category}" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/{zones_id}") -async def get_zone(zones_id:int): - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - return zone_repository.get_zone_by_id(session,zones_id) - -@app.get("/") -async def root(request:Request): - return { - "maptiles": f"{request.url_for('list_maptiles')}", - "ports": f"{request.url_for('list_ports')}", - "vessels": f"{request.url_for('list_vessels')}", - "zones": f"{request.url_for('list_zones')}", - } \ No newline at end of file diff --git a/backend/bloom/services/geo.py b/backend/bloom/services/geo.py index 81f3d674..588aa69b 100644 --- a/backend/bloom/services/geo.py +++ b/backend/bloom/services/geo.py @@ -2,7 +2,7 @@ import pandas as pd import geopandas as gpd -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.config import settings def find_positions_in_port_buffer(vessel_positions: List[tuple]) -> List[tuple]: @@ -28,7 +28,7 @@ def find_positions_in_port_buffer(vessel_positions: List[tuple]) -> List[tuple]: ) # Get all ports from DataBase - use_cases = UseCases() + use_cases = UseCasesContainer() port_repository = use_cases.port_repository() db = use_cases.db() with db.session() as session: diff --git a/backend/bloom/tasks/clean_positions.py b/backend/bloom/tasks/clean_positions.py index fc6e26cc..c9d1448e 100644 --- a/backend/bloom/tasks/clean_positions.py +++ b/backend/bloom/tasks/clean_positions.py @@ -8,7 +8,7 @@ from geopy import distance from shapely.geometry import Point -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.vessel_position import VesselPosition from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository from bloom.logger import logger @@ -51,7 +51,7 @@ def to_coords(row: pd.Series) -> pd.Series: def run(batch_time): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() spire_repository = use_cases.spire_ais_data_repository() excursion_repository = use_cases.excursion_repository() diff --git a/backend/bloom/tasks/compute_port_geometry_buffer.py b/backend/bloom/tasks/compute_port_geometry_buffer.py index 186caa86..fb1cd858 100644 --- a/backend/bloom/tasks/compute_port_geometry_buffer.py +++ b/backend/bloom/tasks/compute_port_geometry_buffer.py @@ -5,7 +5,7 @@ import pyproj import shapely from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.logger import logger from scipy.spatial import Voronoi from shapely.geometry import LineString, Polygon @@ -93,7 +93,7 @@ def assign_voronoi_buffer(ports: gpd.GeoDataFrame) -> gpd.GeoDataFrame: def run() -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() port_repository = use_cases.port_repository() db = use_cases.db() items = [] diff --git a/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py b/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py index df120b9e..c1bcaea0 100644 --- a/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py +++ b/backend/bloom/tasks/convert_spire_vessels_to_spire_ais_data.py @@ -1,7 +1,7 @@ from time import perf_counter from typing import Generator -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.spire_ais_data import SpireAisData from bloom.infra.database.sql_model import VesselPositionSpire from bloom.logger import logger @@ -9,7 +9,7 @@ from shapely import Point from sqlalchemy.orm.session import Session -use_cases = UseCases() +use_cases = UseCasesContainer() vessel_repo = use_cases.vessel_repository() spire_ais_data_repo = use_cases.spire_ais_data_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/create_new_excursion.py b/backend/bloom/tasks/create_new_excursion.py index 2c637159..bd4f6f53 100644 --- a/backend/bloom/tasks/create_new_excursion.py +++ b/backend/bloom/tasks/create_new_excursion.py @@ -1,17 +1,18 @@ from bloom.domain.excursion import Excursion -from bloom.container import UseCases +from bloom.container import UseCasesContainer from sqlalchemy.orm import Session from shapely.geometry import Point -from datetime import datetime,timedelta +from datetime import datetime, timedelta from typing import Optional import pandas as pd from geoalchemy2.shape import to_shape + def add_excursion(vessel_id: int, departure_at: datetime, departure_position: Optional[Point] = None) -> int: - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() excursion_repository = use_cases.excursion_repository() - + with db.session() as session: result = excursion_repository.get_param_from_last_excursion(session, vessel_id) @@ -48,19 +49,19 @@ def add_excursion(vessel_id: int, departure_at: datetime, departure_position: Op session.refresh(new_excursion_sql) return new_excursion_sql.id + def close_excursion(id: int, port_id: int, latitude: float, longitude: float, arrived_at: datetime) -> None: - - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() excursion_repository = use_cases.excursion_repository() with db.session() as session: excursion = excursion_repository.get_excursion_by_id(session, id) - + if excursion: excursion.arrival_port_id = port_id excursion.arrival_at = arrived_at - excursion.arrival_position = Point(longitude,latitude) + excursion.arrival_position = Point(longitude, latitude) close_excursion_sql = excursion_repository.map_to_sql(excursion) session.merge(close_excursion_sql) # Utiliser merge pour mettre à jour l'entité dans la session @@ -68,41 +69,41 @@ def close_excursion(id: int, port_id: int, latitude: float, longitude: float, ar else: raise ValueError(f"No excursion found with ID {id}") -def update_excursion(id :int) -> None : - - use_cases = UseCases() + +def update_excursion(id: int) -> None: + use_cases = UseCasesContainer() db = use_cases.db() excursion_repository = use_cases.excursion_repository() segment_repository = use_cases.segment_repository() with db.session() as session: - + total_segments = segment_repository.get_segments_by_excursions(session, id) - + total_segments['segment_duration'] = pd.to_timedelta(total_segments['segment_duration']) - excursion_duration=total_segments['segment_duration'].sum() + excursion_duration = total_segments['segment_duration'].sum() - in_amp=total_segments[total_segments.loc[:, 'in_amp_zone'] == 1] - amp_duration=in_amp['segment_duration'].sum() + in_amp = total_segments[total_segments.loc[:, 'in_amp_zone'] == 1] + amp_duration = in_amp['segment_duration'].sum() - in_territorial_waters=total_segments[total_segments.loc[:, 'in_territorial_waters'] == 1] - territorial_duration=in_territorial_waters['segment_duration'].sum() + in_territorial_waters = total_segments[total_segments.loc[:, 'in_territorial_waters'] == 1] + territorial_duration = in_territorial_waters['segment_duration'].sum() - in_costal_waters=total_segments[total_segments.loc[:, 'in_costal_waters'] == 1] - costal_duration=in_costal_waters['segment_duration'].sum() + in_costal_waters = total_segments[total_segments.loc[:, 'in_costal_waters'] == 1] + costal_duration = in_costal_waters['segment_duration'].sum() excursion = excursion_repository.get_excursion_by_id(session, id) - + if excursion: excursion.excursion_duration = excursion_duration excursion.total_time_in_amp = amp_duration excursion.total_time_in_territorial_waters = territorial_duration excursion.total_time_in_costal_waters = costal_duration excursion.total_time_at_sea = excursion_duration - territorial_duration - costal_duration - + excursion_update_sql = excursion_repository.map_to_sql(excursion) session.merge(excursion_update_sql) # Utiliser merge pour mettre à jour l'entité dans la session session.commit() session.close() else: - raise ValueError(f"No excursion found with ID {id}") \ No newline at end of file + raise ValueError(f"No excursion found with ID {id}") diff --git a/backend/bloom/tasks/create_update_excursions_segments.py b/backend/bloom/tasks/create_update_excursions_segments.py index 80f00bba..c4bed986 100644 --- a/backend/bloom/tasks/create_update_excursions_segments.py +++ b/backend/bloom/tasks/create_update_excursions_segments.py @@ -10,7 +10,7 @@ from shapely.geometry import Point from sqlalchemy.orm import Session -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.excursion import Excursion from bloom.domain.segment import Segment from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository @@ -36,7 +36,7 @@ def to_coords(row: pd.Series) -> pd.Series: def add_excursion(session: Session, vessel_id: int, departure_at: datetime, departure_position: Optional[Point] = None) -> int: - use_cases = UseCases() + use_cases = UseCasesContainer() excursion_repository = use_cases.excursion_repository() result = excursion_repository.get_param_from_last_excursion(session, vessel_id) @@ -72,7 +72,7 @@ def add_excursion(session: Session, vessel_id: int, departure_at: datetime, def close_excursion(session: Session, excursion_id: int, port_id: int, latitude: float, longitude: float, arrived_at: datetime) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() excursion_repository = use_cases.excursion_repository() excursion = excursion_repository.get_excursion_by_id(session, excursion_id) @@ -85,7 +85,7 @@ def close_excursion(session: Session, excursion_id: int, port_id: int, latitude: def run(): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() segment_repository = use_cases.segment_repository() vessel_position_repository = use_cases.vessel_position_repository() diff --git a/backend/bloom/tasks/create_update_rel_segments_zones.py b/backend/bloom/tasks/create_update_rel_segments_zones.py index 1e4d65a3..0def822a 100644 --- a/backend/bloom/tasks/create_update_rel_segments_zones.py +++ b/backend/bloom/tasks/create_update_rel_segments_zones.py @@ -1,6 +1,6 @@ from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.rel_segment_zone import RelSegmentZone from bloom.infra.repositories.repository_rel_segment_zone import RelSegmentZoneRepository from bloom.infra.repositories.repository_task_execution import TaskExecutionRepository @@ -8,7 +8,7 @@ def run(): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() segment_repository = use_cases.segment_repository() excursion_repository = use_cases.excursion_repository() diff --git a/backend/bloom/tasks/load_dim_port_from_csv.py b/backend/bloom/tasks/load_dim_port_from_csv.py index 00a2eef8..ad275304 100644 --- a/backend/bloom/tasks/load_dim_port_from_csv.py +++ b/backend/bloom/tasks/load_dim_port_from_csv.py @@ -5,7 +5,7 @@ import pandas as pd import pycountry from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.port import Port from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -29,7 +29,7 @@ def map_to_domain(row) -> Port: def run(csv_file_name: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() port_repository = use_cases.port_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/load_dim_vessel_from_csv.py b/backend/bloom/tasks/load_dim_vessel_from_csv.py index 1d0a1e96..0dde3873 100644 --- a/backend/bloom/tasks/load_dim_vessel_from_csv.py +++ b/backend/bloom/tasks/load_dim_vessel_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.vessel import Vessel from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -33,7 +33,7 @@ def map_to_domain(row: pd.Series) -> Vessel: def run(csv_file_name: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py index fe3365b9..ff3536c3 100644 --- a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py +++ b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.zone import Zone from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -26,7 +26,7 @@ def map_to_domain(row: pd.Series) -> Zone: def run(csv_file_name: str): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() zone_repository = use_cases.zone_repository() diff --git a/backend/bloom/tasks/load_fct_excursions_from_csv.py b/backend/bloom/tasks/load_fct_excursions_from_csv.py index 208deb13..c96e1d5b 100644 --- a/backend/bloom/tasks/load_fct_excursions_from_csv.py +++ b/backend/bloom/tasks/load_fct_excursions_from_csv.py @@ -6,7 +6,7 @@ from datetime import datetime from shapely.geometry import Point from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.infra.database.errors import DBException from bloom.logger import logger from bloom.domain.spire_ais_data import SpireAisData @@ -119,7 +119,7 @@ def get_point(end_position: str) -> Point: return Point(end_position[1], end_position[0]) def run(excursion_csv_filename: str, segment_csv_filename: str, spire_csv_filename: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() excursion_repository = use_cases.excursion_repository() # vessel_position_repository = use_cases.vessel_position_repository() segment_repository = use_cases.segment_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_api.py b/backend/bloom/tasks/load_spire_data_from_api.py index 29d9b1e5..214b55a0 100644 --- a/backend/bloom/tasks/load_spire_data_from_api.py +++ b/backend/bloom/tasks/load_spire_data_from_api.py @@ -4,7 +4,7 @@ from pathlib import Path from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.vessel import Vessel from bloom.infra.http.spire_api_utils import map_raw_vessels_to_domain from bloom.logger import logger @@ -12,7 +12,7 @@ def run(dump_path: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() spire_ais_data_repository = use_cases.spire_ais_data_repository() spire_traffic_usecase = use_cases.get_spire_data_usecase() vessel_repository = use_cases.vessel_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_csv.py b/backend/bloom/tasks/load_spire_data_from_csv.py index 3968b1f1..e02bcd70 100644 --- a/backend/bloom/tasks/load_spire_data_from_csv.py +++ b/backend/bloom/tasks/load_spire_data_from_csv.py @@ -3,7 +3,7 @@ import pandas as pd from bloom.config import settings -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.spire_ais_data import SpireAisData from bloom.infra.database.errors import DBException from bloom.logger import logger @@ -51,7 +51,7 @@ def map_to_domain(row: pd.Series) -> SpireAisData: def run(csv_file_name: str): - use_cases = UseCases() + use_cases = UseCasesContainer() db = use_cases.db() spire_ais_data_repository = use_cases.spire_ais_data_repository() diff --git a/backend/bloom/tasks/load_spire_data_from_json.py b/backend/bloom/tasks/load_spire_data_from_json.py index 04fbf907..36dc4512 100644 --- a/backend/bloom/tasks/load_spire_data_from_json.py +++ b/backend/bloom/tasks/load_spire_data_from_json.py @@ -3,14 +3,14 @@ from pathlib import Path from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.infra.http.spire_api_utils import map_raw_vessels_to_domain from bloom.logger import logger from pydantic import ValidationError def run(file_name: str) -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() spire_ais_data_repository = use_cases.spire_ais_data_repository() db = use_cases.db() diff --git a/backend/bloom/tasks/update_vessel_data_voyage.py b/backend/bloom/tasks/update_vessel_data_voyage.py index ef4886ff..daff9964 100644 --- a/backend/bloom/tasks/update_vessel_data_voyage.py +++ b/backend/bloom/tasks/update_vessel_data_voyage.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from time import perf_counter -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.spire_ais_data import SpireAisData from bloom.domain.vessel import Vessel from bloom.domain.vessel_data import VesselData @@ -47,7 +47,7 @@ def map_ais_data_to_vessel_voyage(ais_data: SpireAisData, vessel: Vessel) -> Uni def run() -> None: - use_cases = UseCases() + use_cases = UseCasesContainer() spire_ais_data_repository = use_cases.spire_ais_data_repository() vessel_repository = use_cases.vessel_repository() db = use_cases.db() diff --git a/backend/bloom/usecase/Excursions.py b/backend/bloom/usecase/Excursions.py new file mode 100644 index 00000000..716ee3cf --- /dev/null +++ b/backend/bloom/usecase/Excursions.py @@ -0,0 +1,30 @@ +import json + +from bloom.config import settings +from bloom.logger import logger + + +class ExcursionUseCase: + def __init__(self, excursions_repository, redis_client): + self.excursions_repository = excursions_repository + self.redis_client = redis_client + self.endpoint = f"/vessels/excursions" + + def list_vessel_excursions(self, vessel_id, with_cache=True): + return self.excursions_repository.get_vessel_excursions(vessel_id, with_cache) + + async def get_excursions_by_vessel_id(self, vessel_id): + excursions = self.excursions_repository.get_excursions_by_vessel_id(vessel_id) + + await self.redis_client.set(self.endpoint, json.dumps(excursions)) + await self.redis_client.expire(self.endpoint, settings.redis_cache_expiration) + return self.excursions_repository.get_excursions_by_vessel_id(vessel_id) + + async def get_excursion_by_id(self, vessel_id, excursions_id): + return self.excursions_repository.get_excursion_by_id(vessel_id, excursions_id) + + async def get_excursions_segments(self, vessel_id, excursions_id, segment_id): + return self.excursions_repository.get(vessel_id, excursions_id, segment_id) + + async def get_segment_by_id(self, vessel_id, excursions_id, segment_id): + return self.excursions_repository.get_segment_by_id(vessel_id, excursions_id, segment_id) diff --git a/backend/bloom/usecase/Ports.py b/backend/bloom/usecase/Ports.py new file mode 100644 index 00000000..83d69b11 --- /dev/null +++ b/backend/bloom/usecase/Ports.py @@ -0,0 +1,20 @@ +import json + +from bloom.config import settings +from bloom.infra.repositories.repository_port import PortRepository + + +class PortUseCase: + def __init__(self, ports_repository: PortRepository, redis_client): + self.ports_repository = ports_repository + self.redis_client = redis_client + self.caching_key = 'ports:caching' + + async def list_ports(self): + ports = self.ports_repository.get_all_ports() + await self.redis_client.set(self.caching_key, json.dumps(ports)) + await self.redis_client.expire(self.caching_key, settings.redis_cache_expiration) + return ports + + async def get_port_by_id(self, port_id): + return self.ports_repository.get_port_by_id(port_id) diff --git a/backend/tests/test_alert.py b/backend/tests/test_alert.py index e8a15a56..b1fe7929 100644 --- a/backend/tests/test_alert.py +++ b/backend/tests/test_alert.py @@ -1,4 +1,4 @@ -from bloom.container import UseCases +from bloom.container import UseCasesContainer from bloom.domain.alert import Alert from datetime import datetime, timezone @@ -13,7 +13,7 @@ def test_launch_alert(): - use_cases = UseCases() + use_cases = UseCasesContainer() alert_usecase = use_cases.generate_alert_usecase() status_code = alert_usecase.send_slack_alert( test_alert,