Skip to content

Commit

Permalink
Remove unnecessary json dumps in tests
Browse files Browse the repository at this point in the history
Since json is the default now, the conversion from dict should be done directly in the request.
  • Loading branch information
davelopez committed Jun 17, 2021
1 parent 618fa7a commit 884b537
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 37 deletions.
7 changes: 3 additions & 4 deletions lib/galaxy/tool_util/verify/interactor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import io
import json
import os
import re
import shutil
Expand Down Expand Up @@ -283,7 +282,7 @@ def new_history(self, history_name='test_history', publish_history=False):
return history_id

def publish_history(self, history_id):
response = self._put(f'histories/{history_id}', json.dumps({'published': True}))
response = self._put(f'histories/{history_id}', {'published': True})
response.raise_for_status()

@nottest
Expand Down Expand Up @@ -461,7 +460,7 @@ def run_tool(self, testdef, history_id, resource_parameters=None):
def _create_collection(self, history_id, collection_def):
create_payload = dict(
name=collection_def.name,
element_identifiers=dumps(self._element_identifiers(collection_def)),
element_identifiers=self._element_identifiers(collection_def),
collection_type=collection_def.collection_type,
history_id=history_id,
)
Expand Down Expand Up @@ -614,7 +613,7 @@ def __submit_tool(self, history_id, tool_id, tool_input, extra_data=None, files=
data = dict(
history_id=history_id,
tool_id=tool_id,
inputs=dumps(tool_input),
inputs=tool_input,
**extra_data
)
return self._post("tools", files=files, data=data)
Expand Down
65 changes: 32 additions & 33 deletions lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import time
from json import dumps
from uuid import uuid4

import pytest
Expand Down Expand Up @@ -142,17 +141,17 @@ def _setup_workflow_run(self, workflow=None, inputs_by='step_id', history_id=Non
'0': self._ds_entry(hda1),
'1': self._ds_entry(hda2)
}
workflow_request["inputs"] = dumps(index_map)
workflow_request["inputs"] = index_map
workflow_request["inputs_by"] = 'step_index'
elif inputs_by == "name":
workflow_request["inputs"] = dumps(label_map)
workflow_request["inputs"] = label_map
workflow_request["inputs_by"] = 'name'
elif inputs_by in ["step_uuid", "uuid_implicitly"]:
uuid_map = {
workflow["steps"]["0"]["uuid"]: self._ds_entry(hda1),
workflow["steps"]["1"]["uuid"]: self._ds_entry(hda2),
}
workflow_request["inputs"] = dumps(uuid_map)
workflow_request["inputs"] = uuid_map
if inputs_by == "step_uuid":
workflow_request["inputs_by"] = "step_uuid"

Expand All @@ -165,7 +164,7 @@ def _build_ds_map(self, workflow_id, label_map):
label = value["label"]
if label in label_map:
ds_map[key] = label_map[label]
return dumps(ds_map)
return ds_map

def _ds_entry(self, history_content):
return self.dataset_populator.ds_entry(history_content)
Expand Down Expand Up @@ -362,7 +361,7 @@ def __test_upload(self, use_deprecated_route=False, name="test_import", workflow
if workflow is None:
workflow = self.workflow_populator.load_workflow(name=name)
data = dict(
workflow=dumps(workflow),
workflow=workflow,
)
if import_tools:
data["import_tools"] = import_tools
Expand Down Expand Up @@ -2618,10 +2617,10 @@ def test_run_with_int_parameter_nested(self):
'history_id': history_id,
'workflow_id': workflow_id,
'inputs_by': 'name',
'inputs': json.dumps({
'inputs': {
'input_dataset': {'src': 'hda', 'id': hda['id']},
'int_parameter': 1,
})
}
}
invocation_response = self.workflow_populator.invoke_workflow_raw(workflow_id, workflow_request)
assert invocation_response.status_code == 200, invocation_response.text
Expand Down Expand Up @@ -2848,7 +2847,7 @@ def test_nested_workflow_rerun_with_use_cached_job(self):
inputs['outer_input']['id'] = new_dataset_id
workflow_request['use_cached_job'] = True
workflow_request['history'] = f"hist_id={history_id_two}"
workflow_request['inputs'] = json.dumps(inputs)
workflow_request['inputs'] = inputs
run_workflow_response = self._post("workflows", data=run_jobs_summary.workflow_request).json()
self.workflow_populator.wait_for_workflow(workflow_request['workflow_id'],
run_workflow_response['id'],
Expand Down Expand Up @@ -2947,7 +2946,7 @@ def test_invalid_create_multiple_types(self):
def test_run_with_pja(self):
workflow = self.workflow_populator.load_workflow(name="test_for_pja_run", add_pja=True)
workflow_request, history_id = self._setup_workflow_run(workflow, inputs_by='step_index')
workflow_request["replacement_params"] = dumps(dict(replaceme="was replaced"))
workflow_request["replacement_params"] = dict(replaceme="was replaced")
run_workflow_response = self._post("workflows", data=workflow_request)
self._assert_status_code_is(run_workflow_response, 200)
content = self.dataset_populator.get_history_dataset_details(history_id, wait=True, assert_ok=True)
Expand Down Expand Up @@ -3667,7 +3666,7 @@ def test_run_with_runtime_pja(self):
workflow["steps"]["1"]["uuid"] = uuid1
workflow["steps"]["2"]["uuid"] = uuid2
workflow_request, history_id = self._setup_workflow_run(workflow, inputs_by='step_index')
workflow_request["replacement_params"] = dumps(dict(replaceme="was replaced"))
workflow_request["replacement_params"] = dict(replaceme="was replaced")

pja_map = {
"RenameDatasetActionout_file1": dict(
Expand All @@ -3676,9 +3675,9 @@ def test_run_with_runtime_pja(self):
action_arguments=dict(newname="foo ${replaceme}"),
)
}
workflow_request["parameters"] = dumps({
workflow_request["parameters"] = {
uuid2: {"__POST_JOB_ACTIONS__": pja_map}
})
}

run_workflow_response = self._post("workflows", data=workflow_request)
self._assert_status_code_is(run_workflow_response, 200)
Expand Down Expand Up @@ -3722,17 +3721,17 @@ def test_run_with_delayed_runtime_pja(self):
}
uuid2 = uuid_dict[3]
workflow_request = {}
workflow_request["replacement_params"] = dumps(dict(replaceme="was replaced"))
workflow_request["replacement_params"] = dict(replaceme="was replaced")
pja_map = {
"RenameDatasetActionout_file1": dict(
action_type="RenameDatasetAction",
output_name="out_file1",
action_arguments=dict(newname="foo ${replaceme}"),
)
}
workflow_request["parameters"] = dumps({
workflow_request["parameters"] = {
uuid2: {"__POST_JOB_ACTIONS__": pja_map}
})
}
invocation_id = self.__invoke_workflow(history_id, workflow_id, inputs=inputs, request=workflow_request)

time.sleep(2)
Expand Down Expand Up @@ -3860,7 +3859,7 @@ def test_value_restriction_with_select_and_text_param(self):
@skip_without_tool("random_lines1")
def test_run_replace_params_by_tool(self):
workflow_request, history_id = self._setup_random_x2_workflow("test_for_replace_tool_params")
workflow_request["parameters"] = dumps(dict(random_lines1=dict(num_lines=5)))
workflow_request["parameters"] = dict(random_lines1=dict(num_lines=5))
run_workflow_response = self._post("workflows", data=workflow_request)
self._assert_status_code_is(run_workflow_response, 200)
self.dataset_populator.wait_for_history(history_id, assert_ok=True)
Expand All @@ -3871,10 +3870,10 @@ def test_run_replace_params_by_tool(self):
@skip_without_tool("random_lines1")
def test_run_replace_params_by_uuid(self):
workflow_request, history_id = self._setup_random_x2_workflow("test_for_replace_tool_params")
workflow_request["parameters"] = dumps({
workflow_request["parameters"] = {
"58dffcc9-bcb7-4117-a0e1-61513524b3b1": dict(num_lines=4),
"58dffcc9-bcb7-4117-a0e1-61513524b3b2": dict(num_lines=3),
})
}
run_workflow_response = self._post("workflows", data=workflow_request)
self._assert_status_code_is(run_workflow_response, 200)
self.dataset_populator.wait_for_history(history_id, assert_ok=True)
Expand Down Expand Up @@ -3902,7 +3901,7 @@ def test_run_batch(self):
"history_id": history_id,
"batch": True,
"parameters_normalized": True,
"parameters": dumps(parameters),
"parameters": parameters,
}
invocation_response = self._post(f"workflows/{workflow_id}/usage", data=workflow_request)
self._assert_status_code_is(invocation_response, 200)
Expand Down Expand Up @@ -3943,10 +3942,10 @@ def test_run_batch_inputs(self):
workflow_request = {
"history_id": history_id,
"batch": True,
"inputs": dumps(inputs),
"inputs": inputs,
"inputs_by": "name",
"parameters_normalized": True,
"parameters": dumps(parameters),
"parameters": parameters,
}
invocation_response = self._post(f"workflows/{workflow_id}/usage", data=workflow_request)
self._assert_status_code_is(invocation_response, 200)
Expand Down Expand Up @@ -3987,7 +3986,7 @@ def test_parameter_substitution_validation_value_errors_0(self):
""")
workflow_request = dict(
history=f"hist_id={history_id}",
parameters=dumps(dict(validation_repeat={"r2_0|text": ""}))
parameters=dict(validation_repeat={"r2_0|text": ""})
)
url = f"workflows/{workflow_id}/invocations"
invocation_response = self._post(url, data=workflow_request)
Expand Down Expand Up @@ -4021,15 +4020,15 @@ def _run_validation_workflow_with_substitions(self, substitions):
workflow_request = dict(
history=f"hist_id={history_id}",
workflow_id=uploaded_workflow_id,
parameters=dumps(dict(validation_default=substitions))
parameters=dict(validation_default=substitions)
)
run_workflow_response = self._post("workflows", data=workflow_request)
return run_workflow_response, history_id

@skip_without_tool("random_lines1")
def test_run_replace_params_by_steps(self):
workflow_request, history_id, steps = self._setup_random_x2_workflow_steps("test_for_replace_step_params")
params = dumps({str(steps[1]["id"]): dict(num_lines=5)})
params = {str(steps[1]["id"]): dict(num_lines=5)}
workflow_request["parameters"] = params
run_workflow_response = self._post("workflows", data=workflow_request)
self._assert_status_code_is(run_workflow_response, 200)
Expand All @@ -4045,8 +4044,8 @@ def test_run_replace_params_nested(self):
seed_source_selector="set_seed",
seed="moo",
)
params = dumps({str(steps[0]["id"]): dict(num_lines=1, seed_source=seed_source),
str(steps[1]["id"]): dict(num_lines=1, seed_source=seed_source)})
params = {str(steps[0]["id"]): dict(num_lines=1, seed_source=seed_source),
str(steps[1]["id"]): dict(num_lines=1, seed_source=seed_source)}
workflow_request["parameters"] = params
run_workflow_response = self._post("workflows", data=workflow_request)
self._assert_status_code_is(run_workflow_response, 200)
Expand All @@ -4061,8 +4060,8 @@ def test_run_replace_params_nested_normalized(self):
"seed_source|seed_source_selector": "set_seed",
"seed_source|seed": "moo",
}
params = dumps({str(steps[0]["id"]): parameters,
str(steps[1]["id"]): parameters})
params = {str(steps[0]["id"]): parameters,
str(steps[1]["id"]): parameters}
workflow_request["parameters"] = params
workflow_request["parameters_normalized"] = False
run_workflow_response = self._post("workflows", data=workflow_request)
Expand Down Expand Up @@ -4387,7 +4386,7 @@ def _invocation_step_details(self, workflow_id, invocation_id, step_id):
def _execute_invocation_step_action(self, workflow_id, invocation_id, step_id, action):
raw_url = f"workflows/{workflow_id}/usage/{invocation_id}/steps/{step_id}"
url = self._api_url(raw_url, use_key=True)
payload = dumps(dict(action=action))
payload = dict(action=action)
action_response = put(url, data=payload)
self._assert_status_code_is(action_response, 200)
invocation_step_details = action_response.json()
Expand Down Expand Up @@ -4431,9 +4430,9 @@ def _setup_random_x2_workflow(self, name):
workflow_request = dict(
history=f"hist_id={history_id}",
workflow_id=uploaded_workflow_id,
ds_map=dumps({
ds_map={
key: self._ds_entry(hda1),
}),
},
)
return workflow_request, history_id

Expand Down Expand Up @@ -4548,7 +4547,7 @@ def test_import_export_dynamic_tools(self):
hda1 = self.dataset_populator.new_dataset(history_id, content="Hello World Second!")
workflow_request = dict(
inputs_by="name",
inputs=json.dumps({'input1': self._ds_entry(hda1)}),
inputs={'input1': self._ds_entry(hda1)},
)
invocation_id = self.workflow_populator.invoke_workflow(history_id, workflow_id, request=workflow_request, assert_ok=True)
self.wait_for_invocation_and_jobs(history_id, workflow_id, invocation_id)
Expand Down

0 comments on commit 884b537

Please sign in to comment.