Skip to content

Commit

Permalink
Merge branch 'release_24.0' into release_24.1
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Feb 5, 2025
2 parents e1599f3 + 31c0eba commit 13f9977
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 9 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy/managers/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def get_accessible_job(self, trans: ProvidesUserContext, decoded_job_id) -> Job:

def stop(self, job, message=None):
if not job.finished:
job.mark_deleted(self.app.config.track_jobs_in_database)
job.mark_deleted(self.app.config.track_jobs_in_database, message)
session = self.app.model.session
with transaction(session):
session.commit()
Expand Down
23 changes: 15 additions & 8 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1880,7 +1880,7 @@ def mark_stopped(self, track_jobs_in_database=False):
else:
self.state = Job.states.STOPPED

def mark_deleted(self, track_jobs_in_database=False):
def mark_deleted(self, track_jobs_in_database=False, message=None):
"""
Mark this job as deleted, and mark any output datasets as discarded.
"""
Expand All @@ -1891,7 +1891,8 @@ def mark_deleted(self, track_jobs_in_database=False):
self.state = Job.states.DELETING
else:
self.state = Job.states.DELETED
self.info = "Job output deleted by user before job completed."
info = message or "Job output deleted by user before job completed."
self.info = info
for jtoda in self.output_datasets:
output_hda = jtoda.dataset
output_hda.deleted = True
Expand All @@ -1901,7 +1902,7 @@ def mark_deleted(self, track_jobs_in_database=False):
shared_hda.deleted = True
shared_hda.blurb = "deleted"
shared_hda.peek = "Job deleted"
shared_hda.info = "Job output deleted by user before job completed"
shared_hda.info = info

def mark_failed(self, info="Job execution failed", blurb=None, peek=None):
"""
Expand Down Expand Up @@ -4905,11 +4906,16 @@ def display_name(self):
def display_info(self):
return self.datatype.display_info(self)

def get_converted_files_by_type(self, file_type):
def get_converted_files_by_type(self, file_type, include_errored=False):
for assoc in self.implicitly_converted_datasets:
if not assoc.deleted and assoc.type == file_type:
item = assoc.dataset or assoc.dataset_ldda
if not item.deleted and item.state in Dataset.valid_input_states:
valid_states = (
(Dataset.states.ERROR, *Dataset.valid_input_states)
if include_errored
else Dataset.valid_input_states
)
if not item.deleted and item.state in valid_states:
return item
return None

Expand All @@ -4924,7 +4930,7 @@ def get_converted_dataset_deps(self, trans, target_ext):
depends_list = []
return {dep: self.get_converted_dataset(trans, dep) for dep in depends_list}

def get_converted_dataset(self, trans, target_ext, target_context=None, history=None):
def get_converted_dataset(self, trans, target_ext, target_context=None, history=None, include_errored=False):
"""
Return converted dataset(s) if they exist, along with a dict of dependencies.
If not converted yet, do so and return None (the first time). If unconvertible, raise exception.
Expand All @@ -4936,7 +4942,7 @@ def get_converted_dataset(self, trans, target_ext, target_context=None, history=
converted_dataset = self.get_metadata_dataset(target_ext)
if converted_dataset:
return converted_dataset
converted_dataset = self.get_converted_files_by_type(target_ext)
converted_dataset = self.get_converted_files_by_type(target_ext, include_errored=include_errored)
if converted_dataset:
return converted_dataset
deps = {}
Expand Down Expand Up @@ -5160,7 +5166,8 @@ def convert_dataset(self, trans, target_type):

# Get converted dataset; this will start the conversion if necessary.
try:
converted_dataset = self.get_converted_dataset(trans, target_type)
# Include errored datasets here, we let user chose to retry or view error
converted_dataset = self.get_converted_dataset(trans, target_type, include_errored=True)
except NoConverterException:
return self.conversion_messages.NO_CONVERTER
except ConverterDependencyException as dep_error:
Expand Down
18 changes: 18 additions & 0 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1062,6 +1062,24 @@ def test_delete_job(self, history_id):
self._assert_status_code_is(empty_search_response, 200)
assert len(empty_search_response.json()) == 0

@pytest.mark.require_new_history
def test_delete_job_with_message(self, history_id):
input_dataset_id = self.__history_with_ok_dataset(history_id)
inputs = json.dumps({"input1": {"src": "hda", "id": input_dataset_id}})
search_payload = self._search_payload(history_id=history_id, tool_id="cat1", inputs=inputs)
# create a job
tool_response = self._post("tools", data=search_payload).json()
job_id = tool_response["jobs"][0]["id"]
output_dataset_id = tool_response["outputs"][0]["id"]
# delete the job with message
expected_message = "test message"
delete_job_response = self._delete(f"jobs/{job_id}", data={"message": expected_message}, json=True)
self._assert_status_code_is(delete_job_response, 200)
# Check the output dataset is deleted and the info field contains the message
dataset_details = self._get(f"histories/{history_id}/contents/{output_dataset_id}").json()
assert dataset_details["deleted"] is True
assert dataset_details["misc_info"] == expected_message

@pytest.mark.require_new_history
def test_destination_params(self, history_id):
dataset_id = self.__history_with_ok_dataset(history_id)
Expand Down

0 comments on commit 13f9977

Please sign in to comment.