Skip to content

Commit

Permalink
Merge pull request galaxyproject#17097 from mvdbeek/fix_merge_forward…
Browse files Browse the repository at this point in the history
…_file_name

Fix `.file_name` access in merge forward
  • Loading branch information
mvdbeek authored Nov 29, 2023
2 parents 3502290 + c51a15e commit 8874d69
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions lib/galaxy/datatypes/larch.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def extract_arg(args: List[str], arg_name: str):
except ValueError:
return

headers = get_headers(dataset.file_name, sep=" = ", count=3, comment_designator="#")
headers = get_headers(dataset.get_file_name(), sep=" = ", count=3, comment_designator="#")
args = []
for header in headers:
if header[0] == "@args":
Expand All @@ -104,7 +104,7 @@ def extract_arg(args: List[str], arg_name: str):

def set_peek(self, dataset: DatasetProtocol, **kwd) -> None:
if not dataset.dataset.purged:
dataset.peek = get_file_peek(dataset.file_name)
dataset.peek = get_file_peek(dataset.get_file_name())
dataset.info = (
f"atsym: {dataset.metadata.atsym}\n"
f"bkg_e0: {dataset.metadata.bkg_e0}\n"
Expand Down Expand Up @@ -201,7 +201,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N
Extract metadata from TITLE
"""
title_block = ""
headers = get_headers(dataset.file_name, sep=None, comment_designator="*")
headers = get_headers(dataset.get_file_name(), sep=None, comment_designator="*")
for header in headers:
if header and header[0] == "TITLE":
title_block += " ".join(header[1:]) + "\n"
Expand All @@ -210,7 +210,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N

def set_peek(self, dataset: DatasetProtocol, **kwd) -> None:
if not dataset.dataset.purged:
dataset.peek = get_file_peek(dataset.file_name)
dataset.peek = get_file_peek(dataset.get_file_name())
dataset.info = dataset.metadata.title_block

else:
Expand Down

0 comments on commit 8874d69

Please sign in to comment.