Skip to content

Commit

Permalink
feat: proposal of management of .env* parameters files in Symfony way
Browse files Browse the repository at this point in the history
The proposition is to managed .env files according to Symfony standard
- we load .env.template as default values. In it, APP_ENV=dev is declared
- then override with content of .env.local if exists. This file is not index in git, we can overload APP_ENV=test by example
- then override with content of .env.${APP_ENV} if exists. This file contains default sepcific to APP_ENV env (dev/test/prod) in addition to common default values in .env.template
- then override with content of .env.${APP_ENV}.local if exists. This file is not indexed in git.
For compliance with docker standard, .env is reserved to store the merged result of all variables
So when all files has been parsed, the result is written in .env and reloaded from .env in bloom.config

All keys are transformed to lower case
ATTR=VaLue will give => settings.attr:VaLue

To assure a minimal documentation of parameters, as Symfony standard in .env.dist
The extracted values in all theses files MUST BE present in .env.template
So for adding a new parameter, you MUST declare it in .env.template with a default value
If not, this new parameters won't be available in python scripts

To access settings
```
from bloom.config import settings
print(settings.db_url)
print(settings.postgres_user)
```
  • Loading branch information
rv2931 committed Feb 26, 2024
1 parent 0aa29f0 commit 5b6764d
Show file tree
Hide file tree
Showing 9 changed files with 88 additions and 103 deletions.
1 change: 1 addition & 0 deletions .env.template
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# these values are used in the local docker env. You can use "localhost" hostname if you run the application without docker
APP_ENV=dev
POSTGRES_HOSTNAME=postgres_bloom
POSTGRES_USER=bloom_user
POSTGRES_PASSWORD=bloom
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,8 @@ celerybeat.pid

# Environments
.env
.env.test
.env.*
!.env.template
.venv
env/
venv/
Expand Down
23 changes: 3 additions & 20 deletions alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,26 +22,9 @@
# can be acquired:
# ... etc.

postgres_user = os.environ.get("POSTGRES_USER")
postgres_password = os.environ.get("POSTGRES_PASSWORD")
postgres_hostname = os.environ.get("POSTGRES_HOSTNAME")
postgres_db = os.environ.get("POSTGRES_DB")
postgres_port = os.environ.get("POSTGRES_PORT")

db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
)

config.set_main_option("sqlalchemy.url", db_url)
from bloom.config import settings

config.set_main_option("sqlalchemy.url", settings.db_url)


def run_migrations_offline() -> None:
Expand Down
24 changes: 3 additions & 21 deletions alembic/init_script/load_amp_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,28 +10,10 @@
logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)

postgres_user = os.environ.get("POSTGRES_USER")
postgres_password = os.environ.get("POSTGRES_PASSWORD")
postgres_hostname = os.environ.get("POSTGRES_HOSTNAME")
postgres_db = os.environ.get("POSTGRES_DB")
postgres_port = os.environ.get("POSTGRES_PORT")


# The db url is configured with the db connexion variables declared in the db.yaml file.
db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
)

engine = create_engine(db_url, echo=False)
from bloom.config import settings

engine = create_engine(settings.db_url, echo=False)

df = pd.read_csv(
Path(os.path.dirname(__file__)).joinpath("../../data/zones_subset_02022024.csv"),
Expand Down
22 changes: 2 additions & 20 deletions alembic/init_script/load_positions_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,9 @@
logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)

postgres_user = os.environ.get("POSTGRES_USER")
postgres_password = os.environ.get("POSTGRES_PASSWORD")
postgres_hostname = os.environ.get("POSTGRES_HOSTNAME")
postgres_db = os.environ.get("POSTGRES_DB")
postgres_port = os.environ.get("POSTGRES_PORT")
from bloom.config import settings

# The db url is configured with the db connexion variables declared in the db.yaml file.
db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
)

engine = create_engine(db_url)
engine = create_engine(settings.db_url)

df = pd.read_csv(
Path(os.path.dirname(__file__)).joinpath("../../data/spire_positions_subset_02022024.csv"),
Expand Down
21 changes: 2 additions & 19 deletions alembic/init_script/load_vessels_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,9 @@
logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)

postgres_user = os.environ.get("POSTGRES_USER")
postgres_password = os.environ.get("POSTGRES_PASSWORD")
postgres_hostname = os.environ.get("POSTGRES_HOSTNAME")
postgres_db = os.environ.get("POSTGRES_DB")
postgres_port = os.environ.get("POSTGRES_PORT")
from bloom.config import settings

# The db url is configured with the db connexion variables declared in the db.yaml file.
db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
)
engine = create_engine(db_url)
engine = create_engine(settings.db_url)
df = pd.read_csv(
Path(os.path.dirname(__file__)).joinpath("../../data/chalutiers_pelagiques.csv"),
sep=";",
Expand Down
90 changes: 70 additions & 20 deletions bloom/config.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,78 @@
import os
from pathlib import Path

from pydantic import BaseSettings

def extract_values(filename:str,config:dict,allow_extend:bool=True):
""" function that extrat key=value pairs from a file
Parameters:
- filename: filename/filepath from which to extract key/value pairs
- config: dict to extend/update with new key/value pairs
- keys: restrict the keys that are extracted/updated to the supplied list. No restriction if None
Returns a dict contains key/value
"""
FILEPATH=Path(os.path.dirname(__file__)).joinpath(filename)
for l in open(FILEPATH):
# Split line at first occurence of '='. This allows to have values containing '=' character
split=l.strip().split('=',1)
# if extraction contains 2 items and strictly 2 items
if(len(split)==2):
# if extracted key already exist in config OR if allowed to add new keys to config
# Then adding/updating key/value
if split[0] in config.keys() or allow_extend == True:
config[split[0].lower()]=split[1]
return config

class Settings(BaseSettings):
postgres_user = os.environ.get("POSTGRES_USER")
postgres_password = os.environ.get("POSTGRES_PASSWORD")
postgres_hostname = os.environ.get("POSTGRES_HOSTNAME")
postgres_port = os.environ.get("POSTGRES_PORT")
postgres_db = os.environ.get("POSTGRES_DB")

print("db_url: ", "postgresql://"+postgres_user+":"+postgres_password+"@"+postgres_hostname+":"+postgres_port+"/"+postgres_db)

db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
)
app_env:str=None
db_url:str=None
def __init__(self,*arg, **args):
super().__init__(self,*arg, **args)
# Default app_env is 'dev'
self.app_env='dev'

# dict to store temporary/overrided config parameters
config={}
# Extract .env.template as default values
# The keys present in .env.template now will restrict keys that are extracted from following files
# So all parameters MUST HAVE a default value declared in .env.template to be loaded
file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.template")
if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=True)

# Extract .env.local and override existing values
# We restrict extracted keys to the keys already existing in .env.template
file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.local")
if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=False)

# Extract .env.${app_env} and override existing values
# We restrict extracted keys to the keys already existing in .env.template
file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.{self.app_env}")
if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=False)

# Extract .env.${app_env}.local and override existing values
# We restrict extracted keys to the keys already existing in .env.template
file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env.{self.app_env}.local")
if os.path.isfile(file_to_process): extract_values(file_to_process,config,allow_extend=False)

print(config)

# Now all .env.* files has been merged, we write the cumulated result to .env
# .env is for compliance with docker/docker-compose standard
file_to_process=Path(os.path.dirname(__file__)).joinpath(f"../.env")
f = open(file_to_process, "w")
f.truncate(0)
f.write("# This file was generated automaticaly by bloom.config\n# Don't modify values directly here\n# Use .env.* files instead then restart application")
for k,v in config.items():
f.write(f"{k}={v}\n")
f.close()
# Now we extract key/value pairs from new .env and add them to current class as attributes
if os.path.isfile(file_to_process): extract_values(file_to_process,self.__dict__)

# Set the db_url attribute containing connection string to the database
self.db_url = ( f"postgresql://"
f"{self.postgres_user}:{self.postgres_password}"
f"@{self.postgres_hostname}:{self.postgres_port}/{self.postgres_db}")


srid: int = 4326

Expand Down
3 changes: 2 additions & 1 deletion bloom/usecase/GenerateAlerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from bloom.infra.repositories.repository_raster import RepositoryRaster
from bloom.logger import logger

from bloom.config import settings

class GenerateAlerts:
def __init__(
Expand Down Expand Up @@ -36,7 +37,7 @@ def send_slack_alert(
alert: Alert,
type_name: str = "Vessel in a Protected Area",
) -> int:
slack_url = os.environ.get("SLACK_URL")
slack_url = settings.slack_url
webhook = WebhookClient(slack_url)
blocks = (
"""[
Expand Down
4 changes: 3 additions & 1 deletion bloom/usecase/GetVesselsFromSpire.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from bloom.infra.repositories.repository_vessel import RepositoryVessel
from bloom.logger import logger

from bloom.config import settings


class GetVesselsFromSpire:
def __init__(
Expand All @@ -19,7 +21,7 @@ def __init__(
) -> None:
self.vessel_repository: RepositoryVessel = vessel_repository

spire_token = os.environ.get("SPIRE_TOKEN")
spire_token = settings.spire_token

self.transport = RequestsHTTPTransport(
url="https://api.spire.com/graphql",
Expand Down

0 comments on commit 5b6764d

Please sign in to comment.