Skip to content

Commit

Permalink
Merge branch 'release_24.1' into release_24.2
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Feb 5, 2025
2 parents 0696805 + 13f9977 commit 1d31d5b
Show file tree
Hide file tree
Showing 6 changed files with 55 additions and 13 deletions.
9 changes: 7 additions & 2 deletions lib/galaxy/managers/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ def get_job_console_output(

def stop(self, job, message=None):
if not job.finished:
job.mark_deleted(self.app.config.track_jobs_in_database)
job.mark_deleted(self.app.config.track_jobs_in_database, message)
session = self.app.model.session
with transaction(session):
session.commit()
Expand Down Expand Up @@ -393,7 +393,12 @@ def populate_input_data_input_id(path, key, value):
current_case = param_dump
for p in path:
current_case = current_case[p]
src = current_case["src"]
src = current_case.get("src")
if src is None:
# just a parameter named id.
# TODO: dispatch on tool parameter type instead of values,
# maybe with tool state variant
return key, value
current_case = param
for i, p in enumerate(path):
if p == "values" and i == len(path) - 2:
Expand Down
27 changes: 19 additions & 8 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1903,7 +1903,7 @@ def mark_stopped(self, track_jobs_in_database=False):
else:
self.state = Job.states.STOPPED

def mark_deleted(self, track_jobs_in_database=False):
def mark_deleted(self, track_jobs_in_database=False, message=None):
"""
Mark this job as deleted, and mark any output datasets as discarded.
"""
Expand All @@ -1914,7 +1914,8 @@ def mark_deleted(self, track_jobs_in_database=False):
self.state = Job.states.DELETING
else:
self.state = Job.states.DELETED
self.info = "Job output deleted by user before job completed."
info = message or "Job output deleted by user before job completed."
self.info = info
for jtoda in self.output_datasets:
output_hda = jtoda.dataset
output_hda.deleted = True
Expand All @@ -1924,7 +1925,7 @@ def mark_deleted(self, track_jobs_in_database=False):
shared_hda.deleted = True
shared_hda.blurb = "deleted"
shared_hda.peek = "Job deleted"
shared_hda.info = "Job output deleted by user before job completed"
shared_hda.info = info

def mark_failed(self, info="Job execution failed", blurb=None, peek=None):
"""
Expand Down Expand Up @@ -3708,6 +3709,10 @@ def active_contents(self):
"""Return all active contents ordered by hid."""
return self.contents_iter(types=["dataset", "dataset_collection"], deleted=False, visible=True)

@property
def visible_contents(self):
return self.contents_iter(types=["dataset", "dataset_collection"], visible=True)

def contents_iter(self, **kwds):
"""
Fetch filtered list of contents of history.
Expand Down Expand Up @@ -5012,11 +5017,16 @@ def display_name(self):
def display_info(self):
return self.datatype.display_info(self)

def get_converted_files_by_type(self, file_type):
def get_converted_files_by_type(self, file_type, include_errored=False):
for assoc in self.implicitly_converted_datasets:
if not assoc.deleted and assoc.type == file_type:
item = assoc.dataset or assoc.dataset_ldda
if not item.deleted and item.state in Dataset.valid_input_states:
valid_states = (
(Dataset.states.ERROR, *Dataset.valid_input_states)
if include_errored
else Dataset.valid_input_states
)
if not item.deleted and item.state in valid_states:
return item
return None

Expand All @@ -5031,7 +5041,7 @@ def get_converted_dataset_deps(self, trans, target_ext):
depends_list = []
return {dep: self.get_converted_dataset(trans, dep) for dep in depends_list}

def get_converted_dataset(self, trans, target_ext, target_context=None, history=None):
def get_converted_dataset(self, trans, target_ext, target_context=None, history=None, include_errored=False):
"""
Return converted dataset(s) if they exist, along with a dict of dependencies.
If not converted yet, do so and return None (the first time). If unconvertible, raise exception.
Expand All @@ -5043,7 +5053,7 @@ def get_converted_dataset(self, trans, target_ext, target_context=None, history=
converted_dataset = self.get_metadata_dataset(target_ext)
if converted_dataset:
return converted_dataset
converted_dataset = self.get_converted_files_by_type(target_ext)
converted_dataset = self.get_converted_files_by_type(target_ext, include_errored=include_errored)
if converted_dataset:
return converted_dataset
deps = {}
Expand Down Expand Up @@ -5267,7 +5277,8 @@ def convert_dataset(self, trans, target_type):

# Get converted dataset; this will start the conversion if necessary.
try:
converted_dataset = self.get_converted_dataset(trans, target_type)
# Include errored datasets here, we let user chose to retry or view error
converted_dataset = self.get_converted_dataset(trans, target_type, include_errored=True)
except NoConverterException:
return self.conversion_messages.NO_CONVERTER
except ConverterDependencyException as dep_error:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/workflow/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def __summarize(self):
# just grab the implicitly mapped jobs and handle in second pass. Second pass is
# needed because cannot allow selection of individual datasets from an implicit
# mapping during extraction - you get the collection or nothing.
for content in self.history.active_contents:
for content in self.history.visible_contents:
self.__summarize_content(content)

def __summarize_content(self, content):
Expand Down
18 changes: 18 additions & 0 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1062,6 +1062,24 @@ def test_delete_job(self, history_id):
self._assert_status_code_is(empty_search_response, 200)
assert len(empty_search_response.json()) == 0

@pytest.mark.require_new_history
def test_delete_job_with_message(self, history_id):
input_dataset_id = self.__history_with_ok_dataset(history_id)
inputs = json.dumps({"input1": {"src": "hda", "id": input_dataset_id}})
search_payload = self._search_payload(history_id=history_id, tool_id="cat1", inputs=inputs)
# create a job
tool_response = self._post("tools", data=search_payload).json()
job_id = tool_response["jobs"][0]["id"]
output_dataset_id = tool_response["outputs"][0]["id"]
# delete the job with message
expected_message = "test message"
delete_job_response = self._delete(f"jobs/{job_id}", data={"message": expected_message}, json=True)
self._assert_status_code_is(delete_job_response, 200)
# Check the output dataset is deleted and the info field contains the message
dataset_details = self._get(f"histories/{history_id}/contents/{output_dataset_id}").json()
assert dataset_details["deleted"] is True
assert dataset_details["misc_info"] == expected_message

@pytest.mark.require_new_history
def test_destination_params(self, history_id):
dataset_id = self.__history_with_ok_dataset(history_id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ into a workflow will be shown in gray.</p>
<div class="warningmark">${warning}</div>
%endfor

<form method="post" action="${h.url_for(controller='workflow', action='build_from_current_history')}">
<form method="post" action="${h.url_for(controller='workflow', action='build_from_current_history', history_id=trans.security.encode_id(history.id))}">
<div class='form-row'>
<label>Workflow name</label>
<input name="workflow_name" type="text" value="Workflow constructed from history '${ util.unicodify( history.name )}'" size="60"/>
Expand All @@ -112,6 +112,7 @@ into a workflow will be shown in gray.</p>
<%
cls = "toolForm"
tool_name = "Unknown"
checked_job = "checked" if any(True for d in datasets if not d[1].deleted) else ""
if hasattr( job, 'is_fake' ) and job.is_fake:
cls += " toolFormDisabled"
disabled = True
Expand Down Expand Up @@ -142,7 +143,10 @@ into a workflow will be shown in gray.</p>
%if disabled:
<div style="font-style: italic; color: gray">${disabled_why}</div>
%else:
<div><input type="checkbox" name="job_ids" value="${job.id}" checked="true" />Include "${tool_name}" in workflow</div>
<div><input type="checkbox" name="job_ids" value="${job.id}" ${checked_job} />Include "${tool_name}" in workflow</div>
%if not checked_job:
${ render_msg( "All job outputs have been deleted", status="info" ) }
%endif
%if tool_version_warning:
${ render_msg( tool_version_warning, status="warning" ) }
%endif
Expand Down
4 changes: 4 additions & 0 deletions test/unit/workflows/test_extract_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,10 @@ def __init__(self):
def active_contents(self):
return self.active_datasets

@property
def visible_contents(self):
return self.active_contents


class MockTrans:
def __init__(self, history):
Expand Down

0 comments on commit 1d31d5b

Please sign in to comment.