Skip to content

Commit

Permalink
linting cleanups
Browse files Browse the repository at this point in the history
  • Loading branch information
hexylena committed Dec 20, 2023
1 parent c0c8f53 commit 067687b
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 33 deletions.
9 changes: 2 additions & 7 deletions bin/fetch-categories.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

# Get the list of toolcats
def fetch_toolcats(server)
uri = URI.parse("#{server}")
uri = URI.parse(server.to_s)
request = Net::HTTP::Get.new(uri)
req_options = {
use_ssl: uri.scheme == 'https',
Expand Down Expand Up @@ -45,16 +45,11 @@ def fetch_toolcats(server)
values = [eu, org, aus].compact
# values = [org].compact


# Majority answer wins
# set that value to toolcats[k]
# If there is no majority, pick one.
# print("#{k} - #{values.length} => #{values.uniq.compact.length}\n")
if values.length.positive?
toolcats[k] = values.max_by { |v| v['count'] }
else
toolcats[k] = nil
end
toolcats[k] = (values.max_by { |v| v['count'] } if values.length.positive?)
end

# Write the list to a file
Expand Down
4 changes: 3 additions & 1 deletion bin/lint.rb
Original file line number Diff line number Diff line change
Expand Up @@ -1110,7 +1110,9 @@ def self.enumerate_symlinks
end

def self.enumerate_lintable
enumerate_type(/bib$/) + enumerate_type(/md$/) + enumerate_type(/md$/, root_dir: 'faqs') + enumerate_type(/md$/, root_dir: 'news')
enumerate_type(/bib$/) + enumerate_type(/md$/) + enumerate_type(/md$/,
root_dir: 'faqs') + enumerate_type(/md$/,
root_dir: 'news')
end

def self.enumerate_all
Expand Down
5 changes: 3 additions & 2 deletions bin/workflow-test.rb
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require './_plugins/gtn/usegalaxy'

require 'open3'
require 'json'

GALAXIES = Gtn::Usegalaxy.servers.select{|s| s[:id] == "eu"}.map do |server|
GALAXIES = Gtn::Usegalaxy.servers.select { |s| s[:id] == 'eu' }.to_h do |server|
[
server[:id],
{ url: server[:url], key: ENV.fetch("GALAXY_#{server[:id].upcase}_KEY", 'NONE') }
]
end.to_h
end

def test_workflow(workflow_file, galaxy_id)
directory = File.dirname(workflow_file)
Expand Down
41 changes: 18 additions & 23 deletions bin/workflows-fetch.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,16 @@
require 'yaml'
require './_plugins/gtn/usegalaxy'


def request(url)
uri = URI.parse(url)
request = Net::HTTP::Get.new(uri)
request['Accept'] = 'application/json'
req_options = {
use_ssl: uri.scheme == 'https',
}
response = Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|
Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|
http.request(request)
end
response
end

# Get the list of workflows
Expand All @@ -33,34 +31,32 @@ def fetch_workflows(server)
end
end

def fetch_workflowhub()
projects = JSON.parse(request("https://workflowhub.eu/projects").body)
project_mapping = projects['data'].map{|p| [p['id'], p['attributes']['title']]}.to_h
def fetch_workflowhub
projects = JSON.parse(request('https://workflowhub.eu/projects').body)
project_mapping = projects['data'].to_h { |p| [p['id'], p['attributes']['title']] }

response = request("https://workflowhub.eu/workflows?filter[workflow_type]=galaxy")
response = request('https://workflowhub.eu/workflows?filter[workflow_type]=galaxy')
data = JSON.parse(response.body)
if !data['links']['next'].nil?
puts "ERROR: Cannot yet handle multiple pages"
puts 'ERROR: Cannot yet handle multiple pages'
exit 42
end
puts "INFO: Fetching #{data['data'].length} workflows from WorkflowHub"
data['data'].map.with_index { |w, i|
data['data'].map.with_index do |w, _i|
# {"id"=>"14", "type"=>"workflows", "attributes"=>{"title"=>"Cheminformatics - Docking"}, "links"=>{"self"=>"/workflows/14"}}
wf_info = JSON.parse(request("https://workflowhub.eu#{w['links']['self']}").body)
creator_list = []

creator0 = wf_info['data']['attributes']['creators'][0]
owner = ""
if !creator0.nil?
# Primary
creator_list.push(creator0['given_name'] + " " + creator0['family_name'])
else
if creator0.nil?
# Other creators
other = wf_info['data']['attributes']['other_creators']
if !other.nil? && other.length.positive?
creator_list.push(wf_info['data']['attributes']['other_creators'].split(',').map{|x| x.strip})
else
creator_list.push(wf_info['data']['attributes']['other_creators'].split(',').map(&:strip))
end
else
# Primary
creator_list.push("#{creator0['given_name']} #{creator0['family_name']}")
end
# Projects
wf_info['data']['relationships']['projects']['data'].each do |p|
Expand All @@ -76,31 +72,30 @@ def fetch_workflowhub()
'number_of_steps' => wf_info['data']['attributes']['internals']['steps'].length,
'server' => 'https://workflowhub.eu',
'id' => wf_info['data']['id'],
'tags' => wf_info['data']['attributes']['tags'].map{|t| t.gsub(/^name:/, '')},
'tags' => wf_info['data']['attributes']['tags'].map { |t| t.gsub(/^name:/, '') },
'update_time' => wf_info['data']['attributes']['updated_at'],
}
rescue
rescue StandardError
r = nil
end
r
}.reject{|x| x.nil? }
end.compact
end


# Parse the response
workflows = Gtn::Usegalaxy.servers.map {|server|
workflows = Gtn::Usegalaxy.servers.map do |server|
workflows = fetch_workflows(server[:url])
puts "INFO: Fetched #{workflows.length} workflows from #{server[:name]}"
workflows
}.flatten
end.flatten

# Cleanup the list
workflows.filter! do |w|
w['published'] == true && w['importable'] == true && w['deleted'] == false && w['hidden'] == false
end

# Add in WFHub workflows
workflows += fetch_workflowhub()
workflows += fetch_workflowhub

# Group by name + owner
cleaned = workflows.group_by { |w| "#{w['name']}<WFID>#{w['owner']}" }
Expand Down

0 comments on commit 067687b

Please sign in to comment.