Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Temporary way of passing *some* environment variables to remote nodes #1564

Merged
merged 1 commit into from
Oct 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions metaflow/plugins/airflow/airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
SERVICE_HEADERS,
SERVICE_INTERNAL_URL,
)

from metaflow.metaflow_config_funcs import config_values

from metaflow.parameters import (
DelayedEvaluationParameter,
JSONTypeClass,
Expand Down Expand Up @@ -335,6 +338,16 @@ def _to_job(self, node):
metaflow_version["production_token"] = self.production_token
env["METAFLOW_VERSION"] = json.dumps(metaflow_version)

# Temporary passing of *some* environment variables. Do not rely on this
# mechanism as it will be removed in the near future
env.update(
{
k: v
for k, v in config_values()
if k.startswith("METAFLOW_CONDA_") or k.startswith("METAFLOW_DEBUG_")
}
)

# Extract the k8s decorators for constructing the arguments of the K8s Pod Operator on Airflow.
k8s_deco = [deco for deco in node.decorators if deco.name == "kubernetes"][0]
user_code_retries, _ = self._get_retries(node)
Expand Down
15 changes: 15 additions & 0 deletions metaflow/plugins/argo/argo_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@
UI_URL,
ARGO_WORKFLOWS_UI_URL,
)

from metaflow.metaflow_config_funcs import config_values

from metaflow.mflog import BASH_SAVE_LOGS, bash_capture_logs, export_mflog_env_vars
from metaflow.parameters import deploy_time_eval
from metaflow.plugins.kubernetes.kubernetes import (
Expand Down Expand Up @@ -1164,6 +1167,18 @@ def _container_templates(self):
0
].attributes["vars"]
)

# Temporary passing of *some* environment variables. Do not rely on this
# mechanism as it will be removed in the near future
env.update(
saikonen marked this conversation as resolved.
Show resolved Hide resolved
{
k: v
for k, v in config_values()
if k.startswith("METAFLOW_CONDA_")
or k.startswith("METAFLOW_DEBUG_")
}
)

env.update(
{
**{
Expand Down
10 changes: 10 additions & 0 deletions metaflow/plugins/aws/batch/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
AWS_SECRETS_MANAGER_DEFAULT_REGION,
S3_SERVER_SIDE_ENCRYPTION,
)

from metaflow.metaflow_config_funcs import config_values

from metaflow.mflog import (
export_mflog_env_vars,
bash_capture_logs,
Expand Down Expand Up @@ -249,6 +252,13 @@ def create_job(
.environment_variable("METAFLOW_CARD_S3ROOT", CARD_S3ROOT)
.environment_variable("METAFLOW_RUNTIME_ENVIRONMENT", "aws-batch")
)

# Temporary passing of *some* environment variables. Do not rely on this
# mechanism as it will be removed in the near future
for k, v in config_values():
if k.startswith("METAFLOW_CONDA_") or k.startswith("METAFLOW_DEBUG_"):
job.environment_variable(k, v)
saikonen marked this conversation as resolved.
Show resolved Hide resolved

if DEFAULT_SECRETS_BACKEND_TYPE is not None:
job.environment_variable(
"METAFLOW_DEFAULT_SECRETS_BACKEND_TYPE", DEFAULT_SECRETS_BACKEND_TYPE
Expand Down
8 changes: 8 additions & 0 deletions metaflow/plugins/kubernetes/kubernetes.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@
SERVICE_INTERNAL_URL,
S3_SERVER_SIDE_ENCRYPTION,
)
from metaflow.metaflow_config_funcs import config_values

from metaflow.mflog import (
BASH_SAVE_LOGS,
bash_capture_logs,
Expand Down Expand Up @@ -259,6 +261,12 @@ def create_job(
# see get_datastore_root_from_config in datastore/local.py).
)

# Temporary passing of *some* environment variables. Do not rely on this
# mechanism as it will be removed in the near future
for k, v in config_values():
if k.startswith("METAFLOW_CONDA_") or k.startswith("METAFLOW_DEBUG_"):
job.environment_variable(k, v)
saikonen marked this conversation as resolved.
Show resolved Hide resolved

if S3_SERVER_SIDE_ENCRYPTION is not None:
job.environment_variable(
"METAFLOW_S3_SERVER_SIDE_ENCRYPTION", S3_SERVER_SIDE_ENCRYPTION
Expand Down
Loading