Skip to content

Commit

Permalink
Merge branch 'release_24.2' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Feb 5, 2025
2 parents 8f9dfaa + 1d31d5b commit dbc264c
Show file tree
Hide file tree
Showing 21 changed files with 159 additions and 182 deletions.
4 changes: 3 additions & 1 deletion client/src/components/Panels/Common/ToolSearch.vue
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,9 @@ const localFilterText = computed({
return props.query !== null ? props.query : "";
},
set: (newVal: any) => {
checkQuery(newVal);
if (newVal.trim() || props.query.trim()) {
checkQuery(newVal);
}
},
});
Expand Down
27 changes: 17 additions & 10 deletions client/src/viz/trackster.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,23 +74,30 @@ export class TracksterUI extends Backbone.Model {
},
bookmarks: bookmarks,
};

// Make call to save visualization.
return $.ajax({
url: `${getAppRoot()}visualization/save`,
type: "POST",
const request = {
dataType: "json",
data: {
contentType: "application/json; charset=utf-8",
data: JSON.stringify({
id: this.view.vis_id,
title: this.view.config.get_value("name"),
dbkey: this.view.dbkey,
type: "trackster",
vis_json: JSON.stringify(viz_config),
},
})
config: viz_config,
}),
};
if (!this.view.vis_id) {
request.url = `${getAppRoot()}api/visualizations`;
request.type = "POST";
} else {
request.url = `${getAppRoot()}api/visualizations/${this.view.vis_id}`;
request.type = "PUT";
}

// Make call to save visualization.
return $.ajax(request)
.success((vis_info) => {
Galaxy.modal.hide();
this.view.vis_id = vis_info.vis_id;
this.view.vis_id = vis_info.id;
this.view.has_changes = false;

// Needed to set URL when first saving a visualization.
Expand Down
9 changes: 7 additions & 2 deletions lib/galaxy/managers/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ def get_job_console_output(

def stop(self, job, message=None):
if not job.finished:
job.mark_deleted(self.app.config.track_jobs_in_database)
job.mark_deleted(self.app.config.track_jobs_in_database, message)
session = self.app.model.session
session.commit()
self.app.job_manager.stop(job, message=message)
Expand Down Expand Up @@ -391,7 +391,12 @@ def populate_input_data_input_id(path, key, value):
current_case = param_dump
for p in path:
current_case = current_case[p]
src = current_case["src"]
src = current_case.get("src")
if src is None:
# just a parameter named id.
# TODO: dispatch on tool parameter type instead of values,
# maybe with tool state variant
return key, value
current_case = param
for i, p in enumerate(path):
if p == "values" and i == len(path) - 2:
Expand Down
45 changes: 28 additions & 17 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1893,7 +1893,7 @@ def mark_stopped(self, track_jobs_in_database=False):
else:
self.state = Job.states.STOPPED

def mark_deleted(self, track_jobs_in_database=False):
def mark_deleted(self, track_jobs_in_database=False, message=None):
"""
Mark this job as deleted, and mark any output datasets as discarded.
"""
Expand All @@ -1904,7 +1904,8 @@ def mark_deleted(self, track_jobs_in_database=False):
self.state = Job.states.DELETING
else:
self.state = Job.states.DELETED
self.info = "Job output deleted by user before job completed."
info = message or "Job output deleted by user before job completed."
self.info = info
for jtoda in self.output_datasets:
output_hda = jtoda.dataset
output_hda.deleted = True
Expand All @@ -1914,7 +1915,7 @@ def mark_deleted(self, track_jobs_in_database=False):
shared_hda.deleted = True
shared_hda.blurb = "deleted"
shared_hda.peek = "Job deleted"
shared_hda.info = "Job output deleted by user before job completed"
shared_hda.info = info

def mark_failed(self, info="Job execution failed", blurb=None, peek=None):
"""
Expand Down Expand Up @@ -3686,6 +3687,10 @@ def active_contents(self):
"""Return all active contents ordered by hid."""
return self.contents_iter(types=["dataset", "dataset_collection"], deleted=False, visible=True)

@property
def visible_contents(self):
return self.contents_iter(types=["dataset", "dataset_collection"], visible=True)

def contents_iter(self, **kwds):
"""
Fetch filtered list of contents of history.
Expand Down Expand Up @@ -4979,11 +4984,16 @@ def display_name(self):
def display_info(self):
return self.datatype.display_info(self)

def get_converted_files_by_type(self, file_type):
def get_converted_files_by_type(self, file_type, include_errored=False):
for assoc in self.implicitly_converted_datasets:
if not assoc.deleted and assoc.type == file_type:
item = assoc.dataset or assoc.dataset_ldda
if not item.deleted and item.state in Dataset.valid_input_states:
valid_states = (
(Dataset.states.ERROR, *Dataset.valid_input_states)
if include_errored
else Dataset.valid_input_states
)
if not item.deleted and item.state in valid_states:
return item
return None

Expand All @@ -4998,7 +5008,7 @@ def get_converted_dataset_deps(self, trans, target_ext):
depends_list = []
return {dep: self.get_converted_dataset(trans, dep) for dep in depends_list}

def get_converted_dataset(self, trans, target_ext, target_context=None, history=None):
def get_converted_dataset(self, trans, target_ext, target_context=None, history=None, include_errored=False):
"""
Return converted dataset(s) if they exist, along with a dict of dependencies.
If not converted yet, do so and return None (the first time). If unconvertible, raise exception.
Expand All @@ -5010,7 +5020,7 @@ def get_converted_dataset(self, trans, target_ext, target_context=None, history=
converted_dataset = self.get_metadata_dataset(target_ext)
if converted_dataset:
return converted_dataset
converted_dataset = self.get_converted_files_by_type(target_ext)
converted_dataset = self.get_converted_files_by_type(target_ext, include_errored=include_errored)
if converted_dataset:
return converted_dataset
deps = {}
Expand Down Expand Up @@ -5232,7 +5242,8 @@ def convert_dataset(self, trans, target_type):

# Get converted dataset; this will start the conversion if necessary.
try:
converted_dataset = self.get_converted_dataset(trans, target_type)
# Include errored datasets here, we let user chose to retry or view error
converted_dataset = self.get_converted_dataset(trans, target_type, include_errored=True)
except NoConverterException:
return self.conversion_messages.NO_CONVERTER
except ConverterDependencyException as dep_error:
Expand Down Expand Up @@ -7871,7 +7882,7 @@ class Workflow(Base, Dictifiable, RepresentById):
source_metadata: Mapped[Optional[Dict[str, str]]] = mapped_column(JSONType)
uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType)

steps = relationship(
steps: Mapped[List["WorkflowStep"]] = relationship(
"WorkflowStep",
back_populates="workflow",
primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id),
Expand Down Expand Up @@ -7921,7 +7932,7 @@ def to_dict(self, view="collection", value_mapper=None):
return rval

@property
def steps_by_id(self):
def steps_by_id(self) -> Dict[int, "WorkflowStep"]:
steps = {}
for step in self.steps:
step_id = step.id
Expand Down Expand Up @@ -8096,7 +8107,7 @@ class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime):
back_populates="workflow_step",
)
post_job_actions = relationship("PostJobAction", back_populates="workflow_step")
inputs = relationship("WorkflowStepInput", back_populates="workflow_step")
inputs: Mapped[List["WorkflowStepInput"]] = relationship("WorkflowStepInput", back_populates="workflow_step")
workflow_outputs: Mapped[List["WorkflowOutput"]] = relationship(back_populates="workflow_step")
output_connections: Mapped[List["WorkflowStepConnection"]] = relationship(
primaryjoin=(lambda: WorkflowStepConnection.output_step_id == WorkflowStep.id)
Expand Down Expand Up @@ -8436,16 +8447,16 @@ class WorkflowStepConnection(Base, RepresentById):
output_name: Mapped[Optional[str]] = mapped_column(TEXT)
input_subworkflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True)

input_step_input = relationship(
input_step_input: Mapped["WorkflowStepInput"] = relationship(
"WorkflowStepInput",
back_populates="connections",
cascade="all",
primaryjoin=(lambda: WorkflowStepConnection.input_step_input_id == WorkflowStepInput.id),
)
input_subworkflow_step = relationship(
input_subworkflow_step: Mapped[Optional["WorkflowStep"]] = relationship(
"WorkflowStep", primaryjoin=(lambda: WorkflowStepConnection.input_subworkflow_step_id == WorkflowStep.id)
)
output_step = relationship(
output_step: Mapped["WorkflowStep"] = relationship(
"WorkflowStep",
back_populates="output_connections",
cascade="all",
Expand All @@ -8469,7 +8480,7 @@ def input_name(self):

@property
def input_step(self) -> Optional[WorkflowStep]:
return self.input_step_input and self.input_step_input.workflow_step
return self.input_step_input.workflow_step

@property
def input_step_id(self):
Expand Down Expand Up @@ -8678,7 +8689,7 @@ class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializabl
back_populates="workflow_invocation",
order_by=lambda: WorkflowInvocationStep.order_index,
)
workflow = relationship("Workflow")
workflow: Mapped[Workflow] = relationship("Workflow")
output_dataset_collections = relationship(
"WorkflowInvocationOutputDatasetCollectionAssociation",
back_populates="workflow_invocation",
Expand Down Expand Up @@ -9594,7 +9605,7 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable):
ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True
)
workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"))
value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType)
value: Mapped[Optional[Dict[str, Any]]] = mapped_column(MutableJSONType)
workflow_step: Mapped[Optional["WorkflowStep"]] = relationship()
workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="step_states")

Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/tool_util/toolbox/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .base import (
AbstractToolBox,
AbstractToolTagManager,
ToolLoadConfigurationConflict,
ToolLoadError,
)
from .panel import (
Expand All @@ -15,6 +16,7 @@
"AbstractToolBox",
"AbstractToolTagManager",
"panel_item_types",
"ToolLoadConfigurationConflict",
"ToolLoadError",
"ToolSection",
"ToolSectionLabel",
Expand Down
6 changes: 6 additions & 0 deletions lib/galaxy/tool_util/toolbox/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,10 @@ class ToolLoadError(Exception):
pass


class ToolLoadConfigurationConflict(Exception):
pass


class AbstractToolBox(ManagesIntegratedToolPanelMixin):
"""
Abstract container for managing a ToolPanel - containing tools and
Expand Down Expand Up @@ -1081,6 +1085,8 @@ def quick_load(tool_file, async_load=True):
self._load_tool_panel_views()
self._save_integrated_tool_panel()
return tool.id
except ToolLoadConfigurationConflict as e:
log.warning(f"Configuration does not permit loading tool {tool_file} - {e}")
except ToolLoadError as e:
# no need for full stack trace - ToolLoadError corresponds to a known load
# error with defined cause that is included in the message
Expand Down
5 changes: 4 additions & 1 deletion lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@
from galaxy.tool_util.toolbox import (
AbstractToolBox,
AbstractToolTagManager,
ToolLoadConfigurationConflict,
ToolLoadError,
ToolSection,
)
Expand Down Expand Up @@ -3194,7 +3195,9 @@ class InteractiveTool(Tool):

def __init__(self, config_file, tool_source, app, **kwd):
if not app.config.interactivetools_enable:
raise ToolLoadError("Trying to load an InteractiveTool, but InteractiveTools are not enabled.")
raise ToolLoadConfigurationConflict(
"Trying to load an InteractiveTool, but InteractiveTools are not enabled."
)
super().__init__(config_file, tool_source, app, **kwd)

def __remove_interactivetool_by_job(self, job):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/web/framework/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ def __init__(self):
"""
Create a new Response defaulting to HTML content and "200 OK" status
"""
self.status = "200 OK"
self.status: int = 200
self.headers = HeaderDict({"content-type": "text/html; charset=UTF-8"})
self.cookies = SimpleCookie()

Expand Down
Loading

0 comments on commit dbc264c

Please sign in to comment.