Skip to content

Commit

Permalink
Feature: Add index all data step (#136)
Browse files Browse the repository at this point in the history
* move the submission_all_data concern to a submission process service

* add index_all step to submission parsing steps

* add index all data submission status
  • Loading branch information
syphax-bouazzouni authored Apr 21, 2024
1 parent d5ff793 commit 135b0df
Show file tree
Hide file tree
Showing 8 changed files with 205 additions and 232 deletions.

This file was deleted.

64 changes: 14 additions & 50 deletions lib/ontologies_linked_data/models/concerns/submission_process.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,83 +7,47 @@ def process_submission(logger, options = {})
end

def generate_missing_labels(logger)
puts 'Start generate_mission_label'
time = Benchmark.realtime do
LinkedData::Services::GenerateMissingLabels.new(self).process(logger, file_path: self.master_file_path)
end
puts "generate_mission_label ended in #{time}"
LinkedData::Services::GenerateMissingLabels.new(self).process(logger, file_path: self.master_file_path)
end

def generate_obsolete_classes(logger)
puts 'Start submission_obsolete_classes'
time = Benchmark.realtime do
LinkedData::Services::ObsoleteClassesGenerator.new(self).process(logger, file_path: self.master_file_path)
end
puts "submission_obsolete_classes ended in #{time}"
LinkedData::Services::ObsoleteClassesGenerator.new(self).process(logger, file_path: self.master_file_path)
end

def extract_metadata(logger, options = {})
puts 'Start extract metadata'
time = Benchmark.realtime do
LinkedData::Services::SubmissionMetadataExtractor.new(self).process(logger, options)
end
puts "Extract metadata ended in #{time}"
LinkedData::Services::SubmissionMetadataExtractor.new(self).process(logger, options)
end

def diff(logger, older)
puts 'Start diff'
time = Benchmark.realtime do
LinkedData::Services::SubmissionDiffGenerator.new(self).diff(logger, older)
end
puts "Diff ended in #{time}"
LinkedData::Services::SubmissionDiffGenerator.new(self).diff(logger, older)
end

def generate_diff(logger)
puts 'Start diff'
time = Benchmark.realtime do
LinkedData::Services::SubmissionDiffGenerator.new(self).process(logger)
end
puts "Diff ended in #{time}"
LinkedData::Services::SubmissionDiffGenerator.new(self).process(logger)
end

def index_all(logger, commit: true)
LinkedData::Services::OntologySubmissionAllDataIndexer.new(self).process(logger, commit: commit)
end

def index_terms(logger, commit: true, optimize: true)
puts 'Start index terms'
time = Benchmark.realtime do
LinkedData::Services::OntologySubmissionIndexer.new(self).process(logger, commit: commit, optimize: optimize)
end
puts "Index terms ended in #{time}"
LinkedData::Services::OntologySubmissionIndexer.new(self).process(logger, commit: commit, optimize: optimize)
end

def index_properties(logger, commit: true, optimize: true)
puts 'Start index properties'
time = Benchmark.realtime do
LinkedData::Services::SubmissionPropertiesIndexer.new(self).process(logger, commit: commit, optimize: optimize)
end
puts "Index properties ended in #{time}"
LinkedData::Services::SubmissionPropertiesIndexer.new(self).process(logger, commit: commit, optimize: optimize)
end

def archive
puts 'Start archive'
time = Benchmark.realtime do
LinkedData::Services::OntologySubmissionArchiver.new(self).process
end
puts "Archive ended in #{time}"
LinkedData::Services::OntologySubmissionArchiver.new(self).process
end

def generate_rdf(logger, reasoning: true)
puts 'Start generate RDF'
time = Benchmark.realtime do
LinkedData::Services::SubmissionRDFGenerator.new(self).process(logger, reasoning: reasoning)
end
puts "Generate RDF ended in #{time}"
LinkedData::Services::SubmissionRDFGenerator.new(self).process(logger, reasoning: reasoning)
end

def generate_metrics(logger)
puts 'Start generate metrics'
time = Benchmark.realtime do
LinkedData::Services::SubmissionMetricsCalculator.new(self).process(logger)
end
puts "Generate metrics ended in #{time}"
LinkedData::Services::SubmissionMetricsCalculator.new(self).process(logger)
end

end
Expand Down
11 changes: 10 additions & 1 deletion lib/ontologies_linked_data/models/ontology_submission.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ module Models

class OntologySubmission < LinkedData::Models::Base

include LinkedData::Concerns::OntologySubmission::IndexAllData
include LinkedData::Concerns::SubmissionProcessable
include LinkedData::Concerns::OntologySubmission::Validators
include LinkedData::Concerns::OntologySubmission::UpdateCallbacks
Expand Down Expand Up @@ -295,6 +294,16 @@ def self.copy_file_repository(acronym, submissionId, src, filename = nil)
return dst
end

def self.clear_indexed_content(ontology)
conn = Goo.init_search_connection(:ontology_data)
begin
conn.delete_by_query("ontology_t:\"#{ontology}\"")
rescue StandardError => e
#puts e.message
end
conn
end

def valid?
valid_result = super
return false unless valid_result
Expand Down
3 changes: 3 additions & 0 deletions lib/ontologies_linked_data/models/submission_status.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ class SubmissionStatus < LinkedData::Models::Base
"RDF_LABELS", "ERROR_RDF_LABELS",
"OBSOLETE", "ERROR_OBSOLETE",
"INDEXED", "ERROR_INDEXED",
"INDEXED_ALL_DATA", "ERROR_INDEXED_ALL_DATA",
"INDEXED_PROPERTIES", "ERROR_INDEXED_PROPERTIES",
"METRICS", "ERROR_METRICS",
"ANNOTATOR", "ERROR_ANNOTATOR",
Expand All @@ -18,6 +19,8 @@ class SubmissionStatus < LinkedData::Models::Base
"RDF" => "Parsed successfully",
"RDF_ERROR" => "Error parsing",
"INDEXED" => "Indexed terms for search",
"INDEXED_ALL_DATA" => "Indexed all the data of the resource",
"ERROR_INDEXED_ALL_DATA" => "Error indexeding all the data of the resource",
"ERROR_INDEXED" => "Error indexing terms for search",
"INDEXED_PROPERTIES" => "Indexed properties for search",
"ERROR_INDEXED_PROPERTIES" => "Error indexing properties for search",
Expand Down
Loading

0 comments on commit 135b0df

Please sign in to comment.