3333
3434dataset_id = "test_dataset_{}" .format (uuid .uuid4 ())
3535fhir_store_id = "test_fhir_store-{}" .format (uuid .uuid4 ())
36+ test_fhir_store_id = "test_fhir_store-{}" .format (uuid .uuid4 ())
3637
3738gcs_uri = os .environ ["CLOUD_STORAGE_BUCKET" ]
3839RESOURCES = os .path .join (os .path .dirname (__file__ ), "resources" )
@@ -53,6 +54,39 @@ def test_dataset():
5354 datasets .delete_dataset (service_account_json , project_id , cloud_region , dataset_id )
5455
5556
57+ @pytest .fixture (scope = "module" )
58+ def test_fhir_store ():
59+ resp = fhir_stores .create_fhir_store (
60+ service_account_json , project_id , cloud_region , dataset_id ,
61+ test_fhir_store_id
62+ )
63+
64+ yield resp
65+
66+ fhir_stores .delete_fhir_store (
67+ service_account_json , project_id , cloud_region , dataset_id ,
68+ test_fhir_store_id
69+ )
70+
71+
72+ @pytest .fixture (scope = "module" )
73+ def test_blob ():
74+ storage_client = storage .Client ()
75+ bucket = storage_client .get_bucket (gcs_uri )
76+ blob = bucket .blob (source_file_name )
77+
78+ blob .upload_from_filename (resource_file )
79+
80+ yield blob
81+
82+ # Clean up
83+ try :
84+ blob .delete ()
85+ # If blob not found, then it's already been deleted, so no need to clean up.
86+ except exceptions .NotFound as e :
87+ print (f'Ignoring 404: { str (e )} ' )
88+
89+
5690def test_CRUD_fhir_store (test_dataset , capsys ):
5791 fhir_stores .create_fhir_store (
5892 service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
@@ -79,110 +113,67 @@ def test_CRUD_fhir_store(test_dataset, capsys):
79113 assert "Deleted FHIR store" in out
80114
81115
82- def test_patch_fhir_store (test_dataset , capsys ):
83- fhir_stores .create_fhir_store (
84- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
85- )
86-
116+ def test_patch_fhir_store (test_dataset , test_fhir_store , capsys ):
87117 fhir_stores .patch_fhir_store (
88- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
89- )
90-
91- # Clean up
92- fhir_stores .delete_fhir_store (
93- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
118+ service_account_json , project_id , cloud_region , dataset_id ,
119+ test_fhir_store_id
94120 )
95121
96122 out , _ = capsys .readouterr ()
97123
98124 assert "Patched FHIR store" in out
99125
100126
101- def test_import_fhir_store_gcs (test_dataset , capsys ):
102- fhir_stores .create_fhir_store (
103- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
104- )
105-
106- storage_client = storage .Client ()
107- bucket = storage_client .get_bucket (gcs_uri )
108- blob = bucket .blob (source_file_name )
109-
110- blob .upload_from_filename (resource_file )
127+ def test_import_fhir_store_gcs (
128+ test_dataset , test_fhir_store , test_blob , capsys ):
111129
112130 time .sleep (10 ) # Give new blob time to propagate
131+
113132 fhir_stores .import_fhir_resources (
114133 service_account_json ,
115134 project_id ,
116135 cloud_region ,
117136 dataset_id ,
118- fhir_store_id ,
137+ test_fhir_store_id ,
119138 import_object ,
120139 )
121140
122- # Clean up
123- try :
124- blob .delete ()
125- # If blob not found, then it's already been deleted, so no need to clean up.
126- except exceptions .NotFound :
127- pass
128-
129- fhir_stores .delete_fhir_store (
130- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
131- )
132-
133141 out , _ = capsys .readouterr ()
134142
135143 assert "Imported FHIR resources" in out
136144
137145
138- def test_export_fhir_store_gcs (test_dataset , capsys ):
139- fhir_stores .create_fhir_store (
140- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
141- )
142-
146+ def test_export_fhir_store_gcs (test_dataset , test_fhir_store , capsys ):
143147 fhir_stores .export_fhir_store_gcs (
144148 service_account_json ,
145149 project_id ,
146150 cloud_region ,
147151 dataset_id ,
148- fhir_store_id ,
152+ test_fhir_store_id ,
149153 gcs_uri ,
150154 )
151155
152- # Clean up
153- fhir_stores .delete_fhir_store (
154- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
155- )
156-
157156 out , _ = capsys .readouterr ()
158157
159158 assert "Exported FHIR resources to bucket" in out
160159
161160
162- def test_get_set_fhir_store_iam_policy (test_dataset , capsys ):
163- fhir_stores .create_fhir_store (
164- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
165- )
166-
161+ def test_get_set_fhir_store_iam_policy (test_dataset , test_fhir_store , capsys ):
167162 get_response = fhir_stores .get_fhir_store_iam_policy (
168- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
163+ service_account_json , project_id , cloud_region , dataset_id ,
164+ test_fhir_store_id
169165 )
170166
171167 set_response = fhir_stores .set_fhir_store_iam_policy (
172168 service_account_json ,
173169 project_id ,
174170 cloud_region ,
175171 dataset_id ,
176- fhir_store_id ,
172+ test_fhir_store_id ,
177173 "serviceAccount:python-docs-samples-tests@appspot.gserviceaccount.com" ,
178174 "roles/viewer" ,
179175 )
180176
181- # Clean up
182- fhir_stores .delete_fhir_store (
183- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
184- )
185-
186177 out , _ = capsys .readouterr ()
187178
188179 assert "etag" in get_response
0 commit comments