diff --git a/.env.template b/.env.template index 32778649..01a50525 100644 --- a/.env.template +++ b/.env.template @@ -1,4 +1,5 @@ # these values are used in the local docker env. You can use "localhost" hostname if you run the application without docker +APP_ENV=dev POSTGRES_HOSTNAME=postgres_bloom POSTGRES_USER=bloom_user POSTGRES_PASSWORD=bloom diff --git a/.gitignore b/.gitignore index 339b6bcf..eaff0134 100644 --- a/.gitignore +++ b/.gitignore @@ -126,7 +126,8 @@ celerybeat.pid # Environments .env -.env.test +.env.* +!.env.template .venv env/ venv/ diff --git a/alembic/env.py b/alembic/env.py index 1d2331a6..8701599f 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -22,26 +22,9 @@ # can be acquired: # ... etc. -postgres_user = os.environ.get("POSTGRES_USER") -postgres_password = os.environ.get("POSTGRES_PASSWORD") -postgres_hostname = os.environ.get("POSTGRES_HOSTNAME") -postgres_db = os.environ.get("POSTGRES_DB") -postgres_port = os.environ.get("POSTGRES_PORT") - -db_url = ( - "postgresql://" - + postgres_user - + ":" - + postgres_password - + "@" - + postgres_hostname - + ":" - + postgres_port - + "/" - + postgres_db -) - -config.set_main_option("sqlalchemy.url", db_url) +from bloom.config import settings + +config.set_main_option("sqlalchemy.url", settings.db_url) def run_migrations_offline() -> None: diff --git a/alembic/init_script/load_amp_data.py b/alembic/init_script/load_amp_data.py index 86326de1..aee54a41 100644 --- a/alembic/init_script/load_amp_data.py +++ b/alembic/init_script/load_amp_data.py @@ -10,28 +10,10 @@ logging.basicConfig() logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) -postgres_user = os.environ.get("POSTGRES_USER") -postgres_password = os.environ.get("POSTGRES_PASSWORD") -postgres_hostname = os.environ.get("POSTGRES_HOSTNAME") -postgres_db = os.environ.get("POSTGRES_DB") -postgres_port = os.environ.get("POSTGRES_PORT") - - -# The db url is configured with the db connexion variables declared in the db.yaml file. -db_url = ( - "postgresql://" - + postgres_user - + ":" - + postgres_password - + "@" - + postgres_hostname - + ":" - + postgres_port - + "/" - + postgres_db -) -engine = create_engine(db_url, echo=False) +from bloom.config import settings + +engine = create_engine(settings.db_url, echo=False) df = pd.read_csv( Path(os.path.dirname(__file__)).joinpath("../../data/zones_subset_02022024.csv"), diff --git a/alembic/init_script/load_positions_data.py b/alembic/init_script/load_positions_data.py index 6db0e4e3..4e2d24c7 100644 --- a/alembic/init_script/load_positions_data.py +++ b/alembic/init_script/load_positions_data.py @@ -8,27 +8,9 @@ logging.basicConfig() logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) -postgres_user = os.environ.get("POSTGRES_USER") -postgres_password = os.environ.get("POSTGRES_PASSWORD") -postgres_hostname = os.environ.get("POSTGRES_HOSTNAME") -postgres_db = os.environ.get("POSTGRES_DB") -postgres_port = os.environ.get("POSTGRES_PORT") +from bloom.config import settings -# The db url is configured with the db connexion variables declared in the db.yaml file. -db_url = ( - "postgresql://" - + postgres_user - + ":" - + postgres_password - + "@" - + postgres_hostname - + ":" - + postgres_port - + "/" - + postgres_db -) - -engine = create_engine(db_url) +engine = create_engine(settings.db_url) df = pd.read_csv( Path(os.path.dirname(__file__)).joinpath("../../data/spire_positions_subset_02022024.csv"), diff --git a/alembic/init_script/load_vessels_data.py b/alembic/init_script/load_vessels_data.py index 03190585..26caaf36 100644 --- a/alembic/init_script/load_vessels_data.py +++ b/alembic/init_script/load_vessels_data.py @@ -8,26 +8,9 @@ logging.basicConfig() logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) -postgres_user = os.environ.get("POSTGRES_USER") -postgres_password = os.environ.get("POSTGRES_PASSWORD") -postgres_hostname = os.environ.get("POSTGRES_HOSTNAME") -postgres_db = os.environ.get("POSTGRES_DB") -postgres_port = os.environ.get("POSTGRES_PORT") +from bloom.config import settings -# The db url is configured with the db connexion variables declared in the db.yaml file. -db_url = ( - "postgresql://" - + postgres_user - + ":" - + postgres_password - + "@" - + postgres_hostname - + ":" - + postgres_port - + "/" - + postgres_db -) -engine = create_engine(db_url) +engine = create_engine(settings.db_url) df = pd.read_csv( Path(os.path.dirname(__file__)).joinpath("../../data/chalutiers_pelagiques.csv"), sep=";", diff --git a/bloom/config.py b/bloom/config.py index c0f0e1ea..72237ba3 100644 --- a/bloom/config.py +++ b/bloom/config.py @@ -1,28 +1,78 @@ import os +from pathlib import Path from pydantic import BaseSettings +def extract_values(filename:str,config:dict,allow_extend:bool=True): + """ function that extrat key=value pairs from a file + Parameters: + - filename: filename/filepath from which to extract key/value pairs + - config: dict to extend/update with new key/value pairs + - keys: restrict the keys that are extracted/updated to the supplied list. No restriction if None + Returns a dict contains key/value + """ + FILEPATH=Path(os.path.dirname(__file__)).joinpath(filename) + for l in open(FILEPATH): + # Split line at first occurence of '='. This allows to have values containing '=' character + split=l.strip().split('=',1) + # if extraction contains 2 items and strictly 2 items + if(len(split)==2): + # if extracted key already exist in config OR if allowed to add new keys to config + # Then adding/updating key/value + if split[0] in config.keys() or allow_extend == True: + config[split[0].lower()]=split[1] + return config + class Settings(BaseSettings): - postgres_user = os.environ.get("POSTGRES_USER") - postgres_password = os.environ.get("POSTGRES_PASSWORD") - postgres_hostname = os.environ.get("POSTGRES_HOSTNAME") - postgres_port = os.environ.get("POSTGRES_PORT") - postgres_db = os.environ.get("POSTGRES_DB") - - print("db_url: ", "postgresql://"+postgres_user+":"+postgres_password+"@"+postgres_hostname+":"+postgres_port+"/"+postgres_db) - - db_url = ( - "postgresql://" - + postgres_user - + ":" - + postgres_password - + "@" - + postgres_hostname - + ":" - + postgres_port - + "/" - + postgres_db - ) + app_env:str=None + db_url:str=None + def __init__(self,*arg, **args): + super().__init__(self,*arg, **args) + # Default app_env is 'dev' + self.app_env='dev' + + # dict to store temporary/overrided config parameters + config={} + # Extract .env.template as default values + # The keys present in .env.template now will restrict keys that are extracted from following files + # So all parameters MUST HAVE a default value declared in .env.template to be loaded + file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.template") + if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=True) + + # Extract .env.local and override existing values + # We restrict extracted keys to the keys already existing in .env.template + file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.local") + if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=False) + + # Extract .env.${app_env} and override existing values + # We restrict extracted keys to the keys already existing in .env.template + file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.{self.app_env}") + if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=False) + + # Extract .env.${app_env}.local and override existing values + # We restrict extracted keys to the keys already existing in .env.template + file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.{self.app_env}.local") + if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=False) + + print(config) + + # Now all .env.* files has been merged, we write the cumulated result to .env + # .env is for compliance with docker/docker-compose standard + file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env") + f = open(file_to_process, "w") + f.truncate(0) + f.write("# This file was generated automaticaly by bloom.config\n# Don't modify values directly here\n# Use .env.* files instead then restart application") + for k,v in config.items(): + f.write(f"{k}={v}\n") + f.close() + # Now we extract key/value pairs from new .env and add them to current class as attributes + if os.path.isfile(file_to_process): extract_values(file_to_process,self.__dict__) + + # Set the db_url attribute containing connection string to the database + self.db_url = ( f"postgresql://" + f"{self.postgres_user}:{self.postgres_password}" + f"@{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}") + srid: int = 4326 diff --git a/bloom/usecase/GenerateAlerts.py b/bloom/usecase/GenerateAlerts.py index 77e2eb42..a37a55bb 100644 --- a/bloom/usecase/GenerateAlerts.py +++ b/bloom/usecase/GenerateAlerts.py @@ -9,6 +9,7 @@ from bloom.infra.repositories.repository_raster import RepositoryRaster from bloom.logger import logger +from bloom.config import settings class GenerateAlerts: def __init__( @@ -36,7 +37,7 @@ def send_slack_alert( alert: Alert, type_name: str = "Vessel in a Protected Area", ) -> int: - slack_url = os.environ.get("SLACK_URL") + slack_url = settings.slack_url webhook = WebhookClient(slack_url) blocks = ( """[ diff --git a/bloom/usecase/GetVesselsFromSpire.py b/bloom/usecase/GetVesselsFromSpire.py index 8f512d1d..9284322f 100644 --- a/bloom/usecase/GetVesselsFromSpire.py +++ b/bloom/usecase/GetVesselsFromSpire.py @@ -11,6 +11,8 @@ from bloom.infra.repositories.repository_vessel import RepositoryVessel from bloom.logger import logger +from bloom.config import settings + class GetVesselsFromSpire: def __init__( @@ -19,7 +21,7 @@ def __init__( ) -> None: self.vessel_repository: RepositoryVessel = vessel_repository - spire_token = os.environ.get("SPIRE_TOKEN") + spire_token = settings.spire_token self.transport = RequestsHTTPTransport( url="https://api.spire.com/graphql",