Skip to content

Commit

Permalink
Merge branch 'release_23.1' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Aug 31, 2023
2 parents 9b55269 + 6bb9bc6 commit 913182f
Show file tree
Hide file tree
Showing 25 changed files with 186 additions and 222 deletions.
2 changes: 2 additions & 0 deletions client/src/components/Page/PageList.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ describe("PgeList.vue", () => {
offset: 0,
sort_by: "update_time",
sort_desc: true,
show_published: false,
show_shared: true,
};
const publishedGridApiParams = {
...personalGridApiParams,
Expand Down
6 changes: 5 additions & 1 deletion client/src/components/Page/PageList.vue
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,11 @@ export default {
},
computed: {
dataProviderParameters() {
const extraParams = { search: this.effectiveFilter };
const extraParams = {
search: this.effectiveFilter,
show_published: false,
show_shared: true,
};
if (this.published) {
extraParams.show_published = true;
extraParams.show_shared = false;
Expand Down
2 changes: 1 addition & 1 deletion client/src/schema/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13546,7 +13546,7 @@ export interface operations {
header?: {
"run-as"?: string;
};
/** @description The ID of the History. */
/** @description History ID or any string. */
/** @description The ID of the item (`HDA`/`HDCA`) contained in the history. */
/**
* @description The type of the target history element.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ window.bundleEntries.jqplot_box = function (options) {
chart : options.chart,
dataset_id : dataset.id,
dataset_groups : dataset_groups,
targets : options.targets,
target : options.target,
makeConfig : function( groups, plot_config ){
var boundary = getDomains( groups, 'x' );
$.extend( true, plot_config, {
Expand Down
5 changes: 5 additions & 0 deletions lib/galaxy/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@
)
from galaxy.queue_worker import (
GalaxyQueueWorker,
reload_toolbox,
send_local_control_task,
)
from galaxy.quota import (
Expand Down Expand Up @@ -773,6 +774,10 @@ def __init__(self, **kwargs) -> None:
# Start web stack message handling
self.application_stack.register_postfork_function(self.application_stack.start)
self.application_stack.register_postfork_function(self.queue_worker.bind_and_start)
# Reload toolbox to pick up changes to toolbox made after master was ready
self.application_stack.register_postfork_function(
lambda: reload_toolbox(self, save_integrated_tool_panel=False), post_fork_only=True
)
# Delay toolbox index until after startup
self.application_stack.register_postfork_function(
lambda: send_local_control_task(self, "rebuild_toolbox_search_index")
Expand Down
13 changes: 10 additions & 3 deletions lib/galaxy/authnz/psa_authnz.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
from galaxy.util import DEFAULT_SOCKET_TIMEOUT
from . import IdentityProvider

log = logging.getLogger(__name__)

# key: a component name which PSA requests.
# value: is the name of a class associated with that key.
DEFAULTS = {"STRATEGY": "Strategy", "STORAGE": "Storage"}
Expand Down Expand Up @@ -170,9 +172,14 @@ def refresh(self, trans, user_authnz_token):
if not user_authnz_token or not user_authnz_token.extra_data:
return False
# refresh tokens if they reached their half lifetime
if int(user_authnz_token.extra_data["auth_time"]) + int(user_authnz_token.extra_data["expires"]) / 2 <= int(
time.time()
):
if "expires" in user_authnz_token.extra_data:
expires = user_authnz_token.extra_data["expires"]
elif "expires_in" in user_authnz_token.extra_data:
expires = user_authnz_token.extra_data["expires_in"]
else:
log.debug("No `expires` or `expires_in` key found in token extra data, cannot refresh")
return False
if int(user_authnz_token.extra_data["auth_time"]) + int(expires) / 2 <= int(time.time()):
on_the_fly_config(trans.sa_session)
if self.config["provider"] == "azure":
self.refresh_azure(user_authnz_token)
Expand Down
11 changes: 0 additions & 11 deletions lib/galaxy/managers/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,17 +159,6 @@ def sa_session(self) -> galaxy_scoped_session:
"""
return self.app.model.session

def expunge_all(self):
"""Expunge all the objects in Galaxy's SQLAlchemy sessions."""
app = self.app
context = app.model.context
context.expunge_all()
# This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
if hasattr(app, "install_model"):
install_model = app.install_model
if install_model != app.model:
install_model.context.expunge_all()

def get_toolbox(self):
"""Returns the application toolbox.
Expand Down
14 changes: 14 additions & 0 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10746,6 +10746,20 @@ def __repr__(self):
# <user_obj>.preferences[pref_name] = pref_value
User.preferences = association_proxy("_preferences", "value", creator=UserPreference)

# Optimized version of getting the current Galaxy session.
# See https://github.com/sqlalchemy/sqlalchemy/discussions/7638 for approach
session_partition = select(
GalaxySession,
func.row_number().over(order_by=GalaxySession.update_time, partition_by=GalaxySession.user_id).label("index"),
).alias()
partitioned_session = aliased(GalaxySession, session_partition)
User.current_galaxy_session = relationship(
partitioned_session,
primaryjoin=and_(partitioned_session.user_id == User.id, session_partition.c.index < 2),
uselist=False,
viewonly=True,
)


@event.listens_for(HistoryDatasetCollectionAssociation, "init")
def receive_init(target, args, kwargs):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/queue_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def reload_tool_data_tables(app, **kwargs):


def rebuild_toolbox_search_index(app, **kwargs):
if app.is_webapp:
if app.is_webapp and app.database_heartbeat.is_config_watcher:
if app.toolbox_search.index_count < app.toolbox._reload_count:
app.reindex_tool_search()
else:
Expand Down
5 changes: 4 additions & 1 deletion lib/galaxy/schema/invocation.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,10 @@ def get(self, key: Any, default: Any = None) -> Any:
if key == "workflow_step_id":
return self._obj.workflow_step.order_index
elif key == "dependent_workflow_step_id":
return self._obj.dependent_workflow_step.order_index
if self._obj.dependent_workflow_step_id:
return self._obj.dependent_workflow_step.order_index
else:
return default

return super().get(key, default)

Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/tool_util/parser/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -923,6 +923,9 @@ def __parse_param_elem(param_elem, i=0):
else:
value = None

if value is None and attrib.get("location", None) is not None:
value = os.path.basename(attrib["location"])

children_elem = param_elem
if children_elem is not None:
# At this time, we can assume having children only
Expand Down
41 changes: 36 additions & 5 deletions lib/galaxy/tools/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@
import tempfile
import zlib
from threading import Lock
from typing import (
Dict,
Optional,
)

from sqlitedict import SqliteDict

Expand Down Expand Up @@ -134,7 +138,7 @@ class ToolCache:

def __init__(self):
self._lock = Lock()
self._hash_by_tool_paths = {}
self._hash_by_tool_paths: Dict[str, ToolHash] = {}
self._tools_by_path = {}
self._tool_paths_by_id = {}
self._macro_paths_by_id = {}
Expand Down Expand Up @@ -195,9 +199,12 @@ def _should_cleanup(self, config_filename):
try:
new_mtime = os.path.getmtime(config_filename)
tool_hash = self._hash_by_tool_paths.get(config_filename)
if tool_hash.modtime < new_mtime:
if md5_hash_file(config_filename) != tool_hash.hash:
if tool_hash and tool_hash.modtime_less_than(new_mtime):
if not tool_hash.hash_equals(md5_hash_file(config_filename)):
return True
else:
# No change of content, so not necessary to calculate the md5 checksum every time
tool_hash.modtime = new_mtime
tool = self._tools_by_path[config_filename]
for macro_path in tool._macro_paths:
new_mtime = os.path.getmtime(macro_path)
Expand Down Expand Up @@ -256,13 +263,37 @@ def reset_status(self):


class ToolHash:
def __init__(self, path, modtime=None, lazy_hash=False):
def __init__(self, path: str, modtime: Optional[float] = None, lazy_hash: bool = False):
self.path = path
self.modtime = modtime or os.path.getmtime(path)
self._modtime = modtime or os.path.getmtime(path)
self._tool_hash = None
if not lazy_hash:
self.hash # noqa: B018

def modtime_less_than(self, other_modtime: float):
if self._modtime is None:
# For the purposes of the tool cache,
# if we haven't seen the modtime we consider it not equal
return True
return self._modtime < other_modtime

def hash_equals(self, other_hash: Optional[str]):
if self._tool_hash is None or other_hash is None:
# For the purposes of the tool cache,
# if we haven't seen the hash yet we consider it not equal
return False
return self.hash == other_hash

@property
def modtime(self) -> float:
if self._modtime is None:
self._modtime = os.path.getmtime(self.path)
return self._modtime

@modtime.setter
def modtime(self, new_value: float):
self._modtime = new_value

@property
def hash(self):
if self._tool_hash is None:
Expand Down
23 changes: 16 additions & 7 deletions lib/galaxy/tools/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,16 +184,25 @@ def _process_raw_inputs(
param_value = raw_input_dict["value"]
param_extra = raw_input_dict["attributes"]
location = param_extra.get("location")
if param_value is None and location:
# If no value is given, we try to get the file name directly from the URL
param_value = os.path.basename(location)
if not value.type == "text":
param_value = _split_if_str(param_value)
if isinstance(value, galaxy.tools.parameters.basic.DataToolParameter):
if not isinstance(param_value, list):
param_value = [param_value]
for v in param_value:
_add_uploaded_dataset(context.for_state(), v, param_extra, value, required_files)
if location and value.multiple:
# We get the input/s from the location which can be a list of urls separated by commas
locations = _split_if_str(location)
param_value = []
for location in locations:
v = os.path.basename(location)
param_value.append(v)
# param_extra should contain only the corresponding location
extra = dict(param_extra)
extra["location"] = location
_add_uploaded_dataset(context.for_state(), v, extra, value, required_files)
else:
if not isinstance(param_value, list):
param_value = [param_value]
for v in param_value:
_add_uploaded_dataset(context.for_state(), v, param_extra, value, required_files)
processed_value = param_value
elif isinstance(value, galaxy.tools.parameters.basic.DataCollectionToolParameter):
assert "collection" in param_extra
Expand Down
13 changes: 7 additions & 6 deletions lib/galaxy/web_stack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,9 @@ def get_app_kwds(cls, config_section, app_name=None, for_paste_app=False):
return {}

@classmethod
def register_postfork_function(cls, f, *args, **kwargs):
f(*args, **kwargs)
def register_postfork_function(cls, f, *args, post_fork_only=False, **kwargs):
if not post_fork_only:
f(*args, **kwargs)

def __init__(self, app=None, config=None):
self.app = app
Expand Down Expand Up @@ -191,15 +192,15 @@ class GunicornApplicationStack(ApplicationStack):
late_postfork_thread: threading.Thread

@classmethod
def register_postfork_function(cls, f, *args, **kwargs):
def register_postfork_function(cls, f, *args, post_fork_only=False, **kwargs):
# do_post_fork determines if we need to run postfork functions
if cls.do_post_fork:
# if so, we call ApplicationStack.late_postfork once after forking ...
if not cls.postfork_functions:
os.register_at_fork(after_in_child=cls.late_postfork)
# ... and store everything we need to run in ApplicationStack.postfork_functions
cls.postfork_functions.append(lambda: f(*args, **kwargs))
else:
elif not post_fork_only:
f(*args, **kwargs)

@classmethod
Expand Down Expand Up @@ -300,8 +301,8 @@ def application_stack_log_formatter():
return logging.Formatter(fmt=application_stack_class().log_format)


def register_postfork_function(f, *args, **kwargs):
application_stack_class().register_postfork_function(f, *args, **kwargs)
def register_postfork_function(f, *args, post_fork_only=False, **kwargs):
application_stack_class().register_postfork_function(f, *args, post_fork_only=post_fork_only**kwargs)


def get_app_kwds(config_section, app_name=None):
Expand Down
3 changes: 1 addition & 2 deletions lib/galaxy/webapps/base/webapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,6 @@ def __init__(
self.user_manager = app[UserManager]
self.session_manager = app[GalaxySessionManager]
super().__init__(environ)
self.expunge_all()
config = self.app.config
self.debug = asbool(config.get("debug", False))
x_frame_options = getattr(config, "x_frame_options", None)
Expand Down Expand Up @@ -784,7 +783,7 @@ def _associate_user_history(self, user, prev_galaxy_session=None):
history = None
set_permissions = False
try:
users_last_session = user.galaxy_sessions[0]
users_last_session = user.current_galaxy_session
except Exception:
users_last_session = None
if (
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/api/history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -838,7 +838,7 @@ def delete_typed(
self,
response: Response,
trans: ProvidesHistoryContext = DependsOnTrans,
history_id: DecodedDatabaseIdField = HistoryIDPathParam,
history_id: str = Path(..., description="History ID or any string."),
id: DecodedDatabaseIdField = HistoryItemIDPathParam,
type: HistoryContentType = ContentTypePathParam,
serialization_params: SerializationParams = Depends(query_serialization_params),
Expand Down
6 changes: 0 additions & 6 deletions lib/galaxy/webapps/galaxy/buildapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -1219,12 +1219,6 @@ def wrap_in_middleware(app, global_conf, application_stack, **local_conf):
normalize_remote_user_email=conf.get("normalize_remote_user_email", False),
),
)
# The recursive middleware allows for including requests in other
# requests or forwarding of requests, all on the server side.
if asbool(conf.get("use_recursive", True)):
from paste import recursive

app = wrap_if_allowed(app, stack, recursive.RecursiveMiddleware, args=(conf,))

# Error middleware
app = wrap_if_allowed(app, stack, ErrorMiddleware, args=(conf,))
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/controllers/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def get_value(self, trans, grid, user):
class LastLoginColumn(grids.GridColumn):
def get_value(self, trans, grid, user):
if user.galaxy_sessions:
return self.format(user.galaxy_sessions[0].update_time)
return self.format(user.current_galaxy_session.update_time)
return "never"

def sort(self, trans, query, ascending, column_name=None):
Expand Down
6 changes: 5 additions & 1 deletion lib/galaxy/webapps/galaxy/controllers/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,13 +253,17 @@ def get_edit(self, trans, dataset_id=None, **kwd):
# datatype changing
datatype_options = [(ext_name, ext_id) for ext_id, ext_name in ldatatypes]
datatype_disable = len(datatype_options) == 0
datatype_input_default_value = None
current_datatype = trans.app.datatypes_registry.datatypes_by_extension.get(data.ext)
if current_datatype and current_datatype.is_datatype_change_allowed():
datatype_input_default_value = data.ext
datatype_inputs = [
{
"type": "select",
"name": "datatype",
"label": "New Type",
"options": datatype_options,
"value": [ext_id for ext_id, ext_name in ldatatypes if ext_id == data.ext],
"value": datatype_input_default_value,
"help": "This will change the datatype of the existing dataset but not modify its contents. Use this if Galaxy has incorrectly guessed the type of your dataset.",
}
]
Expand Down
6 changes: 0 additions & 6 deletions lib/galaxy/webapps/reports/buildapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,6 @@ def wrap_in_middleware(app, global_conf, application_stack, **local_conf):
# wrapped around the application (it can interact poorly with
# other middleware):
app = wrap_if_allowed(app, stack, httpexceptions.make_middleware, name="paste.httpexceptions", args=(conf,))
# The recursive middleware allows for including requests in other
# requests or forwarding of requests, all on the server side.
if asbool(conf.get("use_recursive", True)):
from paste import recursive

app = wrap_if_allowed(app, stack, recursive.RecursiveMiddleware, args=(conf,))

# Error middleware
app = wrap_if_allowed(app, stack, ErrorMiddleware, args=(conf,))
Expand Down
5 changes: 3 additions & 2 deletions lib/galaxy/webapps/reports/controllers/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,9 @@ def name_to_num(name):
.filter(galaxy.model.User.table.c.deleted == false())
.order_by(galaxy.model.User.table.c.email)
):
if user.galaxy_sessions:
last_galaxy_session = user.galaxy_sessions[0]
current_galaxy_session = user.current_galaxy_session
if current_galaxy_session:
last_galaxy_session = current_galaxy_session
if last_galaxy_session.update_time < cutoff_time:
users.append((user.email, last_galaxy_session.update_time.strftime("%Y-%m-%d")))
else:
Expand Down
Loading

0 comments on commit 913182f

Please sign in to comment.