Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

295 local configuration #296

Merged
merged 4 commits into from
Jun 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion abm/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.9.0-dev.8
2.9.0-dev.9
36 changes: 36 additions & 0 deletions abm/lib/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import os
import subprocess
import sys
from pathlib import Path

from math import ceil

import bioblend.galaxy
Expand Down Expand Up @@ -405,6 +407,40 @@ def find_history(gi, name_or_id):
return history[0]['id']


def find_dataset(gi, history_id, name_or_id):
try:
dataset = gi.datasets.show_dataset(name=name_or_id)
return dataset['id']
except:
pass

try:
dataset = gi.datasets.show_dataset(name_or_id)
return dataset['id']
except:
pass
return None
# print("Calling get_datasets")
# datasets = gi.datasets.get_datasets(history_id=history_id, name=name_or_id)
# if datasets is None:
# print("Not found")
# return None
# if len(datasets) == 0:
# print("No datasets found (len == 0)")
# return None
# return datasets[0]['id']


def find_config(name: str) -> str:
if os.path.exists(".abm"):
if os.path.exists(f".abm/{name}"):
return f".abm/{name}"
config = os.path.join(Path.home(), ".abm", name)
if os.path.exists(config):
return config
return None


def _get_dataset_data(gi, name_or_id):
print(f"Getting dataset data for {name_or_id}")
def make_result(data):
Expand Down
82 changes: 50 additions & 32 deletions abm/lib/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import yaml
from bioblend.galaxy import dataset_collections
from common import (Context, _get_dataset_data, _make_dataset_element, connect,
find_history, print_json)
find_history, print_json, find_config, find_dataset)


def do_list(context: Context, argv: list):
Expand Down Expand Up @@ -162,45 +162,55 @@ def collection(context: Context, args: list):


def import_from_config(context: Context, args: list):
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--create', help='create a new history for the dataset', required=False, default=None)
parser.add_argument('-f', '--file', help='use instead of the datasets.yml', required=False, default=None)
parser.add_argument('--history', help='add datasets to the given history', required=False, default=None)
parser.add_argument('-n', '--name', help='set the name of the dataset', required=False, default=None)
parser.add_argument('keys', help='the key of the dataset to import', nargs='+')
gi = None
key = None
history = None
kwargs = {}
while len(args) > 0:
arg = args.pop(0)
if arg in ['--hs', '--hist', '--history']:
history = args.pop(0)
elif arg in ['-c', '--create']:
gi = connect(context)
history = gi.histories.create_history(args.pop(0)).get('id')
elif arg in ['-n', '--name']:
kwargs['file_name'] = args.pop(0)
elif key is not None:
print(f"ERROR: key already set: {key}")
argv = parser.parse_args(args)
if argv.name is not None:
if len(argv.keys) > 1:
print("ERROR: cannot specify --name with multiple keys")
return
else:
key = arg
kwargs['file_name'] = argv.name

configfile = os.path.join(Path.home(), '.abm', 'datasets.yml')
if not os.path.exists(configfile):
print("ERROR: ABM has not been configured to import datasets.")
print(f"Please create {configfile}")
if argv.create is not None and argv.history is not None:
print("ERROR: cannot specify both --create and --history")
return

if argv.create is not None:
gi = connect(context)
history = gi.histories.create_history(argv.create).get('id')
if argv.history is not None:
gi = connect(context)
history = find_history(gi, argv.history)
if argv.file is not None:
configfile = argv.file
if not os.path.exists(configfile):
print(f"ERROR: the specified file {configfile} was not found")
return
else:
configfile = find_config("datasets.yml")
if configfile is None:
print("ERROR: ABM has not been configured to import datasets.")
print(f"Please create {configfile}")
return
with open(configfile, 'r') as f:
datasets = yaml.safe_load(f)
if not key in datasets:
print(f"ERROR: dataset {key} has not been defined.")
return
url = datasets[key]

if gi is None:
gi = connect(context)
if history is not None:
history = find_history(gi, history)

response = gi.tools.put_url(url, history, **kwargs)
print(json.dumps(response, indent=4))
for key in argv.keys:
if not key in datasets:
print(f"ERROR: dataset {key} has not been defined.")
else:
url = datasets[key]
print(f"Importing {key} from {url}")
response = gi.tools.put_url(url, history, **kwargs)
print(json.dumps(response, indent=4))


def _import_from_url(gi, history, url, **kwargs):
Expand Down Expand Up @@ -240,9 +250,17 @@ def rename(context: Context, args: list):
print("ERROR: please provide the history ID, dataset ID, and new name.")
return
gi = connect(context)
response = gi.histories.update_dataset(args[0], args[1], name=args[2])
result = {'state': response['state'], 'name': response['name']}
print(json.dumps(result, indent=4))
hid = find_history(gi, args[0])
if hid is None:
print("ERROR: no such history")
return
dsid = find_dataset(gi, hid, args[1])
if dsid is None:
print("ERROR: no such dataset")
return
response = gi.histories.update_dataset(hid, dsid, name=args[2])
# result = {'state': response['state'], 'name': response['name']}
print(json.dumps(response, indent=4))


def test(context: Context, args: list):
Expand Down
68 changes: 21 additions & 47 deletions abm/lib/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from bioblend.galaxy.objects import GalaxyInstance
from lib.common import (Context, connect, find_history, parse_profile,
print_json, summarize_metrics, print_markdown_table,
get_float_key, get_str_key, print_table_header, try_for)
get_float_key, get_str_key, print_table_header, try_for, find_config)

#
# History related functions
Expand Down Expand Up @@ -194,55 +194,29 @@ def _import(context: Context, args: list):


def himport(context: Context, args: list):
def error_message(msg='Invalid command'):
print(f"ERROR: {msg}")
print(f"USAGE: {sys.argv[0]} history import SERVER HISTORY_ID JEHA_ID")
print(f" {sys.argv[0]} history import http://GALAXY_SERVER_URL")
print(f" {sys.argv[0]} history import [dna|rna]")

wait = True
if '-n' in args:
args.remove('-n')
wait = False
if '--no-wait' in args:
args.remove('--no-wait')
wait = False
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--no-wait', action='store_true', help='Do not wait for the import to complete', default=False)
parser.add_argument('-f', '--file', help='Use the specified histories.yml file', required=False, default=None)
parser.add_argument('identifier', help='The history alias or URL to import')
argv = parser.parse_args(args)

if len(args) == 1:
if 'http' in args[0]:
url = args[0]
wait = not argv.no_wait
if argv.identifier.startswith('http'):
url = argv.identifier
else:
if argv.file is not None:
config = argv.file
else:
datasets = None
config = f'{os.path.dirname(os.path.abspath(__file__))}/histories.yml'
# First load the histories.yml file that is packaged with abm
if os.path.exists(config):
with open(config, 'r') as f:
datasets = yaml.safe_load(f)
# Then load the user histories.yml, if any
userfile = os.path.join(Path.home(), ".abm", "histories.yml")
if os.path.exists(userfile):
if datasets is None:
datasets = {}
with open(userfile, 'r') as f:
userdata = yaml.safe_load(f)
for key, item in userdata.items():
datasets[key] = item
if datasets is None:
error_message("No history URLs have been configured.")
return
if not args[0] in datasets:
error_message('Please specify a URL or name of the history to import')
return
url = datasets[args[0]]
elif len(args) == 3:
server, key, kube, master = parse_profile(args[0])
if server is None:
error_message(f"Invalid server profile name: {args[0]}")
config = find_config("histories.yml")
if config is None:
print("ERROR: No histories.yml file found.")
return
url = f"{server}history/export_archive?id={args[1]}&jeha_id={args[2]}"
else:
error_message()
return
with open(config, 'r') as f:
histories = yaml.safe_load(f)
if argv.identifier not in histories:
print(f"ERROR: No such history {argv.identifier}")
return
url = histories[argv.identifier]

gi = connect(context)
print(f"Importing history from {url}")
Expand Down
4 changes: 2 additions & 2 deletions abm/lib/menu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@
help: download a dataset from the server
- name: ['import', 'imp']
handler: dataset.import_from_config
params: KEY [--hs|--hist|--history HISTORY_ID | -c|--create "History name"]
help: imports a dataset to the server from a URL specified in the datasets.yml config file.
params: '[--hs|--hist|--history HISTORY_ID | -c|--create "History name"] KEY [KEY...]'
help: imports one or more datasets to the server from a URL specified in the datasets.yml config file.
- name: ['list', 'ls']
handler: dataset.do_list
help: lists all the datasets on the server
Expand Down
16 changes: 9 additions & 7 deletions abm/lib/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import requests
import yaml
from common import Context, connect, summarize_metrics, print_markdown_table, get_float_key, get_str_key, \
print_table_header
print_table_header, find_config
from planemo.galaxy.workflows import install_shed_repos
from planemo.runnable import for_path, for_uri

Expand Down Expand Up @@ -118,26 +118,28 @@ def import_from_url(context: Context, args: list):


def import_from_config(context: Context, args: list):
print("Importing workflow from configuration")
key = None
install = True
config = None
for arg in args:
if arg in ['-n', '--no-tools']:
print("Skipping tools")
install = False
elif arg in ['-f', '--file']:
config = arg
else:
key = arg
if key is None:
print("ERROR: no workflow ID given")
return


userfile = os.path.join(Path.home(), ".abm", "workflows.yml")
if not os.path.exists(userfile):
if config is None:
config = find_config("workflows.yml")
if config is None:
print("ERROR: this instance has not been configured to import workflows.")
print(f"Please configure {userfile} to enable workflow imports")
print(f"Please configure a workflows.yml file to enable imports")
return
with open(userfile, 'r') as f:
with open(config, 'r') as f:
workflows = yaml.safe_load(f)
if not key in workflows:
print(f"ERROR: no such workflow: {key}")
Expand Down