@@ -47,8 +47,8 @@ def create_fhir_datastore(
47
47
identity_provider_configuration : dict [str , any ] = None ,
48
48
) -> dict [str , str ]:
49
49
"""
50
- Creates a new HealthLake datastore .
51
- When creating a SMART on FHIR datastore , the following parameters are required:
50
+ Creates a new HealthLake data store .
51
+ When creating a SMART on FHIR data store , the following parameters are required:
52
52
- sse_configuration: The server-side encryption configuration for a SMART on FHIR-enabled data store.
53
53
- identity_provider_configuration: The identity provider configuration for a SMART on FHIR-enabled data store.
54
54
@@ -73,7 +73,7 @@ def create_fhir_datastore(
73
73
return response
74
74
except ClientError as err :
75
75
logger .exception (
76
- "Couldn't create datastore %s. Here's why %s" ,
76
+ "Couldn't create data store %s. Here's why %s" ,
77
77
datastore_name ,
78
78
err .response ["Error" ]["Message" ],
79
79
)
@@ -84,9 +84,9 @@ def create_fhir_datastore(
84
84
# snippet-start:[python.example_code.healthlake.DescribeFHIRDatastore]
85
85
def describe_fhir_datastore (self , datastore_id : str ) -> dict [str , any ]:
86
86
"""
87
- Describes a HealthLake datastore .
88
- :param datastore_id: The datastore ID.
89
- :return: The datastore description.
87
+ Describes a HealthLake data store .
88
+ :param datastore_id: The data store ID.
89
+ :return: The data store description.
90
90
"""
91
91
try :
92
92
response = self .health_lake_client .describe_fhir_datastore (
@@ -95,7 +95,7 @@ def describe_fhir_datastore(self, datastore_id: str) -> dict[str, any]:
95
95
return response ["DatastoreProperties" ]
96
96
except ClientError as err :
97
97
logger .exception (
98
- "Couldn't describe datastore with ID %s. Here's why %s" ,
98
+ "Couldn't describe data store with ID %s. Here's why %s" ,
99
99
datastore_id ,
100
100
err .response ["Error" ]["Message" ],
101
101
)
@@ -106,8 +106,8 @@ def describe_fhir_datastore(self, datastore_id: str) -> dict[str, any]:
106
106
# snippet-start:[python.example_code.healthlake.ListFHIRDatastores]
107
107
def list_fhir_datastores (self ) -> list [dict [str , any ]]:
108
108
"""
109
- Lists all HealthLake datastores .
110
- :return: A list of datastore descriptions.
109
+ Lists all HealthLake data stores .
110
+ :return: A list of data store descriptions.
111
111
"""
112
112
try :
113
113
next_token = None
@@ -128,22 +128,22 @@ def list_fhir_datastores(self) -> list[dict[str, any]]:
128
128
return datastores
129
129
except ClientError as err :
130
130
logger .exception (
131
- "Couldn't list datastores . Here's why %s" , err .response ["Error" ]["Message" ]
131
+ "Couldn't list data stores . Here's why %s" , err .response ["Error" ]["Message" ]
132
132
)
133
133
raise
134
134
# snippet-end:[python.example_code.healthlake.ListFHIRDatastores]
135
135
136
136
# snippet-start:[python.example_code.healthlake.DeleteFHIRDatastore]
137
137
def delete_fhir_datastore (self , datastore_id : str ) -> None :
138
138
"""
139
- Deletes a HealthLake datastore .
140
- :param datastore_id: The datastore ID.
139
+ Deletes a HealthLake data store .
140
+ :param datastore_id: The data store ID.
141
141
"""
142
142
try :
143
143
self .health_lake_client .delete_fhir_datastore (DatastoreId = datastore_id )
144
144
except ClientError as err :
145
145
logger .exception (
146
- "Couldn't delete datastore with ID %s. Here's why %s" ,
146
+ "Couldn't delete data store with ID %s. Here's why %s" ,
147
147
datastore_id ,
148
148
err .response ["Error" ]["Message" ],
149
149
)
@@ -164,7 +164,7 @@ def start_fhir_import_job(
164
164
"""
165
165
Starts a HealthLake import job.
166
166
:param job_name: The import job name.
167
- :param datastore_id: The datastore ID.
167
+ :param datastore_id: The data store ID.
168
168
:param input_s3_uri: The input S3 URI.
169
169
:param job_output_s3_uri: The job output S3 URI.
170
170
:param kms_key_id: The KMS key ID associated with the output S3 bucket.
@@ -200,7 +200,7 @@ def describe_fhir_import_job(
200
200
) -> dict [str , any ]:
201
201
"""
202
202
Describes a HealthLake import job.
203
- :param datastore_id: The datastore ID.
203
+ :param datastore_id: The data store ID.
204
204
:param job_id: The import job ID.
205
205
:return: The import job description.
206
206
"""
@@ -230,7 +230,7 @@ def list_fhir_import_jobs(
230
230
) -> list [dict [str , any ]]:
231
231
"""
232
232
Lists HealthLake import jobs satisfying the conditions.
233
- :param datastore_id: The datastore ID.
233
+ :param datastore_id: The data store ID.
234
234
:param job_name: The import job name.
235
235
:param job_status: The import job status.
236
236
:param submitted_before: The import job submitted before the specified date.
@@ -281,7 +281,7 @@ def start_fhir_export_job(
281
281
"""
282
282
Starts a HealthLake export job.
283
283
:param job_name: The export job name.
284
- :param datastore_id: The datastore ID.
284
+ :param datastore_id: The data store ID.
285
285
:param output_s3_uri: The output S3 URI.
286
286
:param kms_key_id: The KMS key ID associated with the output S3 bucket.
287
287
:param data_access_role_arn: The data access role ARN.
@@ -313,7 +313,7 @@ def describe_fhir_export_job(
313
313
) -> dict [str , any ]:
314
314
"""
315
315
Describes a HealthLake export job.
316
- :param datastore_id: The datastore ID.
316
+ :param datastore_id: The data store ID.
317
317
:param job_id: The export job ID.
318
318
:return: The export job description.
319
319
"""
@@ -343,7 +343,7 @@ def list_fhir_export_jobs(
343
343
) -> list [dict [str , any ]]:
344
344
"""
345
345
Lists HealthLake export jobs satisfying the conditions.
346
- :param datastore_id: The datastore ID.
346
+ :param datastore_id: The data store ID.
347
347
:param job_name: The export job name.
348
348
:param job_status: The export job status.
349
349
:param submitted_before: The export job submitted before the specified date.
@@ -448,11 +448,11 @@ def untag_resource(self, resource_arn: str, tag_keys: list[str]) -> None:
448
448
449
449
def wait_datastore_active (self , datastore_id : str ) -> None :
450
450
"""
451
- Waits for a HealthLake datastore to become active.
452
- :param datastore_id: The datastore ID.
451
+ Waits for a HealthLake data store to become active.
452
+ :param datastore_id: The data store ID.
453
453
"""
454
454
counter = 0
455
- max_count_minutes = 40 # It can take a while to create a datastore , so we'll wait up to 40 minutes.
455
+ max_count_minutes = 40 # It can take a while to create a data store , so we'll wait up to 40 minutes.
456
456
status = "CREATING"
457
457
while counter < max_count_minutes :
458
458
datastore = self .health_lake_client .describe_fhir_datastore (
@@ -468,25 +468,25 @@ def wait_datastore_active(self, datastore_id: str) -> None:
468
468
469
469
if status == "ACTIVE" :
470
470
print (
471
- f"Datastore with ID { datastore_id } is active after { counter } minutes."
471
+ f"Data store with ID { datastore_id } is active after { counter } minutes."
472
472
)
473
473
elif status == "CREATE_FAILED" :
474
474
raise ClientError (
475
- "Create datastore with ID %s failed after %d minutes." ,
475
+ "Create data store with ID %s failed after %d minutes." ,
476
476
datastore_id ,
477
477
counter ,
478
478
)
479
479
else :
480
480
raise ClientError (
481
- "Datastore with ID %s is not active after %d minutes." ,
481
+ "Data store with ID %s is not active after %d minutes." ,
482
482
datastore_id ,
483
483
counter ,
484
484
)
485
485
486
486
def wait_import_job_complete (self , datastore_id : str , job_id : str ) -> None :
487
487
"""
488
488
Waits for a HealthLake import job to complete.
489
- :param datastore_id: The datastore ID.
489
+ :param datastore_id: The data store ID.
490
490
:param job_id: The import job ID.
491
491
"""
492
492
counter = 0
@@ -520,7 +520,7 @@ def wait_import_job_complete(self, datastore_id: str, job_id: str) -> None:
520
520
def wait_export_job_complete (self , datastore_id : str , job_id : str ) -> None :
521
521
"""
522
522
Waits for a HealthLake export job to complete.
523
- :param datastore_id: The datastore ID.
523
+ :param datastore_id: The data store ID.
524
524
:param job_id: The export job ID.
525
525
"""
526
526
counter = 0
@@ -549,9 +549,22 @@ def wait_export_job_complete(self, datastore_id: str, job_id: str) -> None:
549
549
)
550
550
551
551
def health_lake_demo (self ) -> None :
552
- use_smart_on_fhir_data_store = True
552
+ use_smart_on_fhir_data_store = False
553
+
554
+ # Change the following variables to match your environment.
555
+ datastore_name = "health_imaging_datastore"
556
+ import_job_name = "my_import_job"
557
+ input_s3_uri = (
558
+ "s3://amzn-s3-demo-bucket/import/examples/patient_example.json"
559
+ )
560
+ kms_key_id = "arn:aws:kms:us-east-1:123456789012:key/b7f645cb-e564-4981-8672-9e012d1ff1a0"
561
+ data_access_role_arn = (
562
+ "arn:aws:iam::123456789012:role/healthlake_access"
563
+ )
564
+ export_job_name = "my_export_job"
565
+ output_s3_uri = "s3://amzn-s3-demo-bucket/export/output/"
566
+
553
567
554
- datastore_name = "health_imaging_datastore2"
555
568
if use_smart_on_fhir_data_store :
556
569
# snippet-start:[python.example_code.healthlake.CreateFHIRDatastore.smart]
557
570
sse_configuration = {
@@ -605,7 +618,7 @@ def health_lake_demo(self) -> None:
605
618
for data_store in data_stores :
606
619
if data_store ["DatastoreId" ] == data_store_id :
607
620
logger .info (
608
- "Datastore with ID %s is %s." ,
621
+ "Data store with ID %s is %s." ,
609
622
data_store_id ,
610
623
data_store ["DatastoreStatus" ],
611
624
)
@@ -625,17 +638,8 @@ def health_lake_demo(self) -> None:
625
638
626
639
self .untag_resource (data_store_arn , keys )
627
640
628
- job_name = "my_import_job"
629
- input_s3_uri = (
630
- "s3://health-lake-test-827365/import/examples/patient_example_chalmers.json"
631
- )
632
- output_s3_uri = "s3://health-lake-test-827365/import/output/"
633
- kms_key_id = "arn:aws:kms:us-east-1:123502194722:key/b7f645cb-e564-4981-8672-9e012d1ff1a0"
634
- data_access_role_arn = (
635
- "arn:aws:iam::123502194722:role/healthlaketest37-ahl-full-access"
636
- )
637
641
import_job = self .start_fhir_import_job (
638
- job_name ,
642
+ import_job_name ,
639
643
data_store_id ,
640
644
input_s3_uri ,
641
645
output_s3_uri ,
@@ -657,10 +661,8 @@ def health_lake_demo(self) -> None:
657
661
f"Job id: { import_job ['JobId' ]} , status: { import_job ['JobStatus' ]} , submit time: { import_job ['SubmitTime' ]} "
658
662
)
659
663
660
- job_name = "my_export_job"
661
- output_s3_uri = "s3://health-lake-test-827365/export/output/"
662
664
export_job = self .start_fhir_export_job (
663
- job_name , data_store_id , output_s3_uri , kms_key_id , data_access_role_arn
665
+ export_job_name , data_store_id , output_s3_uri , kms_key_id , data_access_role_arn
664
666
)
665
667
666
668
export_job_id = export_job ["JobId" ]
0 commit comments