Skip to content

Commit

Permalink
Merge pull request #150 from AllenNeuralDynamics/build-transfer-service
Browse files Browse the repository at this point in the history
Release v1.4.0
  • Loading branch information
jtyoung84 authored Sep 13, 2024
2 parents ab3efa2 + 427616c commit a68030f
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 9 deletions.
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ dynamic = ["version"]

dependencies = [
'boto3',
'pydantic>=2.0',
'pydantic>=2.7,<2.9',
'pydantic-settings>=2.0',
'aind-data-transfer-models==0.6.2'
'aind-data-schema>=1.0.0',
'aind-data-transfer-models==0.8.3'
]

[project.optional-dependencies]
Expand Down
2 changes: 1 addition & 1 deletion src/aind_data_transfer_service/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Init package"""
import os

__version__ = "1.3.0"
__version__ = "1.4.0"

# Global constants
OPEN_DATA_BUCKET_NAME = os.getenv("OPEN_DATA_BUCKET_NAME", "open")
Expand Down
8 changes: 4 additions & 4 deletions src/aind_data_transfer_service/configs/job_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class ModalityConfigs(BaseSettings):
# added to the Modality class
_MODALITY_MAP: ClassVar = {
m().abbreviation.upper().replace("-", "_"): m().abbreviation
for m in Modality._ALL
for m in Modality.ALL
}

# Optional number id to assign to modality config
Expand Down Expand Up @@ -116,10 +116,10 @@ class BasicUploadJobConfigs(BaseSettings):
extra="allow",
)

# Need some way to extract abbreviations. Maybe a public method can be
# added to the Platform class
# Legacy way required users to input platform in screaming snake case
_PLATFORM_MAP: ClassVar = {
p().abbreviation.upper(): p().abbreviation for p in Platform._ALL
a.upper().replace("-", "_"): a
for a in Platform.abbreviation_map.keys()
}
_MODALITY_ENTRY_PATTERN: ClassVar = re.compile(r"^modality(\d*)$")
_DATETIME_PATTERN1: ClassVar = re.compile(
Expand Down
4 changes: 2 additions & 2 deletions src/aind_data_transfer_service/configs/job_upload_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,13 @@ def validators(self) -> List[Dict[str, Any]]:
{
"name": "platform",
"type": "list",
"options": [p().abbreviation for p in Platform._ALL],
"options": list(Platform.abbreviation_map.keys()),
"column_indexes": [self.HEADERS.index("platform")],
},
{
"name": "modality",
"type": "list",
"options": [m().abbreviation for m in Modality._ALL],
"options": list(Modality.abbreviation_map.keys()),
"column_indexes": [
self.HEADERS.index("modality0"),
self.HEADERS.index("modality1"),
Expand Down
54 changes: 54 additions & 0 deletions tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
SubmitJobRequest,
V0036JobProperties,
)
from aind_data_transfer_models.trigger import TriggerConfigModel, ValidJobType
from fastapi.responses import StreamingResponse
from fastapi.testclient import TestClient
from pydantic import SecretStr
Expand Down Expand Up @@ -1604,6 +1605,59 @@ def test_submit_v1_jobs_200_session_settings_config_file(
)
self.assertEqual(200, submit_job_response.status_code)

@patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
@patch("requests.post")
def test_submit_v1_jobs_200_trigger_capsule_configs(
self,
mock_post: MagicMock,
):
"""Tests suubmission when user adds trigger_capsule_configs"""

mock_response = Response()
mock_response.status_code = 200
mock_response._content = json.dumps({"message": "sent"}).encode(
"utf-8"
)
mock_post.return_value = mock_response
ephys_source_dir = PurePosixPath("shared_drive/ephys_data/690165")

s3_bucket = "private"
subject_id = "690165"
acq_datetime = datetime(2024, 2, 19, 11, 25, 17)
platform = Platform.ECEPHYS

trigger_capsule_settings = TriggerConfigModel(
job_type=ValidJobType.RUN_GENERIC_PIPELINE, capsule_id="abc-123"
)
ephys_config = ModalityConfigs(
modality=Modality.ECEPHYS,
source=ephys_source_dir,
)
project_name = "Ephys Platform"

upload_job_configs = BasicUploadJobConfigs(
project_name=project_name,
s3_bucket=s3_bucket,
platform=platform,
subject_id=subject_id,
acq_datetime=acq_datetime,
modalities=[ephys_config],
trigger_capsule_configs=trigger_capsule_settings,
)

upload_jobs = [upload_job_configs]
submit_request = SubmitJobRequest(upload_jobs=upload_jobs)

post_request_content = json.loads(
submit_request.model_dump_json(round_trip=True)
)

with TestClient(app) as client:
submit_job_response = client.post(
url="/api/v1/submit_jobs", json=post_request_content
)
self.assertEqual(200, submit_job_response.status_code)


if __name__ == "__main__":
unittest.main()

0 comments on commit a68030f

Please sign in to comment.