Skip to content

Commit

Permalink
Adapt more API tests around history contents to use JSON payload
Browse files Browse the repository at this point in the history
See galaxyproject#12152 for details
  • Loading branch information
davelopez committed Oct 19, 2021
1 parent 576d33a commit 2e6030b
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 27 deletions.
26 changes: 13 additions & 13 deletions lib/galaxy_test/api/test_dataset_collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def test_create_pair_from_history(self):
self.history_id,
instance_type="history",
)
create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
dataset_collection = self._check_create_response(create_response)
returned_datasets = dataset_collection["elements"]
assert len(returned_datasets) == 2, dataset_collection
Expand All @@ -33,11 +33,11 @@ def test_create_list_from_history(self):
payload = dict(
instance_type="history",
history_id=self.history_id,
element_identifiers=json.dumps(element_identifiers),
element_identifiers=element_identifiers,
collection_type="list",
)

create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
dataset_collection = self._check_create_response(create_response)
returned_datasets = dataset_collection["elements"]
assert len(returned_datasets) == 3, dataset_collection
Expand All @@ -47,7 +47,7 @@ def test_create_list_of_existing_pairs(self):
self.history_id,
instance_type="history",
)
pair_create_response = self._post("dataset_collections", pair_payload)
pair_create_response = self._post("dataset_collections", pair_payload, json=True)
dataset_collection = self._check_create_response(pair_create_response)
hdca_id = dataset_collection["id"]

Expand All @@ -58,10 +58,10 @@ def test_create_list_of_existing_pairs(self):
payload = dict(
instance_type="history",
history_id=self.history_id,
element_identifiers=json.dumps(element_identifiers),
element_identifiers=element_identifiers,
collection_type="list",
)
create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
dataset_collection = self._check_create_response(create_response)
returned_collections = dataset_collection["elements"]
assert len(returned_collections) == 1, dataset_collection
Expand All @@ -73,9 +73,9 @@ def test_create_list_of_new_pairs(self):
instance_type="history",
history_id=self.history_id,
name="a nested collection",
element_identifiers=json.dumps(identifiers),
element_identifiers=identifiers,
)
create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
dataset_collection = self._check_create_response(create_response)
assert dataset_collection["collection_type"] == "list:paired"
assert dataset_collection["name"] == "a nested collection"
Expand Down Expand Up @@ -169,10 +169,10 @@ def test_hda_security(self):
payload = dict(
instance_type="history",
history_id=history_id,
element_identifiers=json.dumps(element_identifiers),
element_identifiers=element_identifiers,
collection_type="paired",
)
create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
self._assert_status_code_is(create_response, 403)

def test_enforces_unique_names(self):
Expand All @@ -181,11 +181,11 @@ def test_enforces_unique_names(self):
payload = dict(
instance_type="history",
history_id=self.history_id,
element_identifiers=json.dumps(element_identifiers),
element_identifiers=element_identifiers,
collection_type="list",
)

create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
self._assert_status_code_is(create_response, 400)

def test_upload_collection(self):
Expand Down Expand Up @@ -488,7 +488,7 @@ def _compare_collection_contents_elements(self, contents_elements, hdca_elements
def _create_collection_contents_pair(self):
# Create a simple collection, return hdca and contents_url
payload = self.dataset_collection_populator.create_pair_payload(self.history_id, instance_type="history")
create_response = self._post("dataset_collections", payload)
create_response = self._post("dataset_collections", payload, json=True)
hdca = self._check_create_response(create_response)
root_contents_url = self._get_contents_url_for_hdca(hdca)
return hdca, root_contents_url
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ def test_search(self, history_id):
# We first copy the datasets, so that the update time is lower than the job creation time
new_history_id = self.dataset_populator.new_history()
copy_payload = {"content": dataset_id, "source": "hda", "type": "dataset"}
copy_response = self._post(f"histories/{new_history_id}/contents", data=copy_payload)
copy_response = self._post(f"histories/{new_history_id}/contents", data=copy_payload, json=True)
self._assert_status_code_is(copy_response, 200)
inputs = json.dumps({
'input1': {'src': 'hda', 'id': dataset_id}
Expand Down Expand Up @@ -650,7 +650,7 @@ def test_search_with_hdca_pair_input(self, history_id):
# We test that a job can be found even if the collection has been copied to another history
new_history_id = self.dataset_populator.new_history()
copy_payload = {"content": list_id_a, "source": "hdca", "type": "dataset_collection"}
copy_response = self._post(f"histories/{new_history_id}/contents", data=copy_payload)
copy_response = self._post(f"histories/{new_history_id}/contents", data=copy_payload, json=True)
self._assert_status_code_is(copy_response, 200)
new_list_a = copy_response.json()['id']
copied_inputs = json.dumps({
Expand Down
12 changes: 6 additions & 6 deletions lib/galaxy_test/api/test_libraries.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,17 +409,17 @@ def test_import_paired_collection(self):
'name': collection_name,
'collection_type': 'list:paired',
"type": "dataset_collection",
'element_identifiers': json.dumps([
'element_identifiers': [
{
'src': 'new_collection',
'name': 'pair1',
'collection_type': 'paired',
'element_identifiers': [{'name': 'forward', 'src': 'ldda', 'id': ld['id']},
{'name': 'reverse', 'src': 'ldda', 'id': ld['id']}]
}
])
]
}
new_collection = self._post(url, payload).json()
new_collection = self._post(url, payload, json=True).json()
assert new_collection['name'] == collection_name
pair = new_collection['elements'][0]
assert pair['element_identifier'] == 'pair1'
Expand All @@ -438,14 +438,14 @@ def _import_to_history(self, visible=True):
"history_id": history_id,
"name": collection_name,
"hide_source_items": not visible,
"element_identifiers": json.dumps([{
"element_identifiers": [{
"id": ld['id'],
"name": element_identifer,
"src": "ldda"}]),
"src": "ldda"}],
"type": "dataset_collection",
"elements": []
}
new_collection = self._post(url, payload).json()
new_collection = self._post(url, payload, json=True).json()
assert new_collection['name'] == collection_name
assert new_collection['element_count'] == 1
element = new_collection['elements'][0]
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy_test/api/test_workflow_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,14 +454,14 @@ def __copy_content_to_history(self, history_id, content):
source="hda",
content=content["id"]
)
response = self._post(f"histories/{history_id}/contents/datasets", payload)
response = self._post(f"histories/{history_id}/contents/datasets", payload, json=True)

else:
payload = dict(
source="hdca",
content=content["id"]
)
response = self._post(f"histories/{history_id}/contents/dataset_collections", payload)
response = self._post(f"histories/{history_id}/contents/dataset_collections", payload, json=True)
self._assert_status_code_is(response, 200)
return response.json()

Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -2877,7 +2877,7 @@ def test_workflow_rerun_with_use_cached_job(self):
new_ds_map = json.loads(new_workflow_request['ds_map'])
for key, input_values in invocation_1['inputs'].items():
copy_payload = {"content": input_values['id'], "source": "hda", "type": "dataset"}
copy_response = self._post(f"histories/{history_id_two}/contents", data=copy_payload).json()
copy_response = self._post(f"histories/{history_id_two}/contents", data=copy_payload, json=True).json()
new_ds_map[key]['id'] = copy_response['id']
new_workflow_request['ds_map'] = json.dumps(new_ds_map, sort_keys=True)
new_workflow_request['history'] = f"hist_id={history_id_two}"
Expand Down Expand Up @@ -2914,7 +2914,7 @@ def test_nested_workflow_rerun_with_use_cached_job(self):
dataset_type = inputs['outer_input']['src']
dataset_id = inputs['outer_input']['id']
copy_payload = {"content": dataset_id, "source": dataset_type, "type": "dataset"}
copy_response = self._post(f"histories/{history_id_two}/contents", data=copy_payload)
copy_response = self._post(f"histories/{history_id_two}/contents", data=copy_payload, json=True)
self._assert_status_code_is(copy_response, 200)
new_dataset_id = copy_response.json()['id']
inputs['outer_input']['id'] = new_dataset_id
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy_test/base/populators.py
Original file line number Diff line number Diff line change
Expand Up @@ -1454,7 +1454,7 @@ def create_nested_collection(self, history_id, collection_type, name=None, colle
payload = dict(
instance_type="history",
history_id=history_id,
element_identifiers=json.dumps(element_identifiers),
element_identifiers=element_identifiers,
collection_type=collection_type,
name=name,
)
Expand Down Expand Up @@ -1855,7 +1855,7 @@ def __init__(self, gi):
self.dataset_collection_populator = GiDatasetCollectionPopulator(gi)

def _create_collection(self, payload):
create_response = self._post("dataset_collections", data=payload)
create_response = self._post("dataset_collections", data=payload, json=True)
return create_response


Expand Down

0 comments on commit 2e6030b

Please sign in to comment.