diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index bf6bb1cc..1ade7407 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -15,10 +15,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- - name: Set up solr configsets
- run: ./test/solr/generate_ncbo_configsets.sh
- name: create config.rb file
- run: cp config/config.test.rb config/config.rb
+ run: cp config/config.rb.sample config/config.rb
- name: Install Dependencies
run: sudo apt-get update && sudo apt-get -y install raptor2-utils
- name: Set up JDK 11
diff --git a/Dockerfile b/Dockerfile
index ccf1defb..42760153 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,6 +7,7 @@ RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends \
openjdk-11-jre-headless \
raptor2-utils \
wait-for-it \
+ libraptor2-dev \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /srv/ontoportal/ontologies_linked_data
diff --git a/Gemfile b/Gemfile
index 7e1f17c7..2c0563cc 100644
--- a/Gemfile
+++ b/Gemfile
@@ -21,6 +21,9 @@ gem 'rubyzip', '~> 1.0'
gem 'thin'
gem 'request_store'
gem 'jwt'
+gem 'json-ld', '~> 3.0.2'
+gem "parallel", "~> 1.24"
+
# Testing
group :test do
@@ -39,3 +42,4 @@ end
# NCBO gems (can be from a local dev path or from rubygems/git)
gem 'goo', github: 'ontoportal-lirmm/goo', branch: 'development'
gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'master'
+
diff --git a/Gemfile.lock b/Gemfile.lock
index 31d6a0a1..a130d85c 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,9 +1,10 @@
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: cd5bc312bd3acd2ff056a87a6f37b5419f3444dc
+ revision: 0e554fce49713ce4d5a742a06c2fb59a547caf47
branch: development
specs:
goo (0.0.2)
+ addressable (~> 2.8)
pry
rdf (= 3.2.11)
rdf-raptor
@@ -17,7 +18,7 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/sparql-client.git
- revision: aed51baf4106fd0f3d0e3f9238f0aad9406aa3f0
+ revision: 180c818f7715baac64b2699bb452ef5c756f62c5
branch: master
specs:
sparql-client (1.0.1)
@@ -40,7 +41,7 @@ GEM
ast (2.4.2)
base64 (0.2.0)
bcrypt (3.1.20)
- bigdecimal (3.1.6)
+ bigdecimal (3.1.7)
builder (3.2.4)
coderay (1.1.3)
concurrent-ruby (1.2.3)
@@ -90,9 +91,12 @@ GEM
domain_name (~> 0.5)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
- json (2.7.1)
- json_pure (2.7.1)
- jwt (2.8.0)
+ json (2.7.2)
+ json-ld (3.0.2)
+ multi_json (~> 1.12)
+ rdf (>= 2.2.8, < 4.0)
+ json_pure (2.7.2)
+ jwt (2.8.1)
base64
language_server-protocol (3.17.0.3)
launchy (2.5.2)
@@ -107,10 +111,10 @@ GEM
net-imap
net-pop
net-smtp
- method_source (1.0.0)
+ method_source (1.1.0)
mime-types (3.5.2)
mime-types-data (~> 3.2015)
- mime-types-data (3.2024.0206)
+ mime-types-data (3.2024.0305)
mini_mime (1.1.5)
minitest (4.7.5)
minitest-reporters (0.14.24)
@@ -128,7 +132,7 @@ GEM
net-protocol
net-protocol (0.2.2)
timeout
- net-smtp (0.4.0.1)
+ net-smtp (0.5.0)
net-protocol
netrc (0.11.0)
oj (2.18.5)
@@ -145,7 +149,7 @@ GEM
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.4)
+ public_suffix (5.0.5)
racc (1.7.3)
rack (1.6.13)
rack-test (0.8.3)
@@ -167,9 +171,9 @@ GEM
rdf-xsd (3.2.1)
rdf (~> 3.2)
rexml (~> 3.2)
- redis (5.1.0)
- redis-client (>= 0.17.0)
- redis-client (0.20.0)
+ redis (5.2.0)
+ redis-client (>= 0.22.0)
+ redis-client (0.22.1)
connection_pool
regexp_parser (2.9.0)
request_store (1.6.0)
@@ -182,7 +186,7 @@ GEM
rexml (3.2.6)
rsolr (1.1.2)
builder (>= 2.1.2)
- rubocop (1.60.2)
+ rubocop (1.63.2)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
@@ -190,11 +194,11 @@ GEM
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 1.8, < 3.0)
rexml (>= 3.2.5, < 4.0)
- rubocop-ast (>= 1.30.0, < 2.0)
+ rubocop-ast (>= 1.31.1, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.30.0)
- parser (>= 3.2.1.0)
+ rubocop-ast (1.31.2)
+ parser (>= 3.3.0.4)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
rubyzip (1.3.0)
@@ -220,14 +224,12 @@ GEM
unicode-display_width (2.5.0)
uuid (2.3.9)
macaddr (~> 1.0)
- webmock (3.22.0)
+ webmock (3.23.0)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff (>= 0.4.0, < 2.0.0)
PLATFORMS
- x86_64-darwin-21
- x86_64-darwin-22
x86_64-darwin-23
x86_64-linux
@@ -240,6 +242,7 @@ DEPENDENCIES
faraday (~> 1.9)
ffi
goo!
+ json-ld (~> 3.0.2)
jwt
libxml-ruby (~> 2.0)
minitest
@@ -247,6 +250,7 @@ DEPENDENCIES
multi_json (~> 1.0)
oj (~> 2.0)
omni_logger
+ parallel (~> 1.24)
pony
pry
rack (~> 1.0)
@@ -265,4 +269,4 @@ DEPENDENCIES
webmock
BUNDLED WITH
- 2.4.21
+ 2.4.22
diff --git a/config/config.rb.sample b/config/config.rb.sample
index 0a0fedf0..7539c8d8 100644
--- a/config/config.rb.sample
+++ b/config/config.rb.sample
@@ -1,45 +1,102 @@
-LinkedData.config do |config|
- config.goo_port = 9000
- config.goo_host = "localhost"
- config.search_server_url = "http://localhost:8983/solr/term_search_core1"
- config.property_search_server_url = "http://localhost:8983/solr/prop_search_core1"
- config.repository_folder = "./test/data/ontology_files/repo"
- config.rest_url_prefix = "http://data.bioontology.org/"
- config.enable_security = false
- config.java_max_heap_size = '10240M'
- #PURL server config parameters
- config.enable_purl = false
- config.purl_host = "purl.bioontology.org"
- config.purl_port = 80
- config.purl_username = ""
- config.purl_password = ""
- config.purl_maintainers = ""
- config.purl_target_url_prefix = "http://bioportal.bioontology.org"
- config.sparql_endpoint_url = "http:://sparql_endpoint.com"
- Goo.configure do |conf|
- conf.main_languages = ['en']
- end
+GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
+GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
+GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
+GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
+GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
+GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
+REDIS_HOST = ENV.include?("REDIS_HOST") ? ENV["REDIS_HOST"] : "localhost"
+REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
+SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr"
+SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr"
+GOO_SLICES = ENV["GOO_SLICES"] || 500
+begin
+ LinkedData.config do |config|
+ Goo.slice_loading_size = GOO_SLICES.to_i
+ config.goo_backend_name = GOO_BACKEND_NAME.to_s
+ config.goo_host = GOO_HOST.to_s
+ config.goo_port = GOO_PORT.to_i
+ config.goo_path_query = GOO_PATH_QUERY.to_s
+ config.goo_path_data = GOO_PATH_DATA.to_s
+ config.goo_path_update = GOO_PATH_UPDATE.to_s
+ config.goo_redis_host = REDIS_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.http_redis_host = REDIS_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+ config.ontology_analytics_redis_host = REDIS_HOST.to_s
+ config.ontology_analytics_redis_port = REDIS_PORT.to_i
+ config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
+ config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
+ config.sparql_endpoint_url = "http:://sparql_endpoint.com"
+ # config.enable_notifications = false
+ #
+ config.java_max_heap_size = '20480M'
+ config.main_languages = ['en']
+
+ # Caches
+ config.goo_redis_host = REDIS_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.enable_http_cache = false
+
+ # Email notifications
+ config.enable_notifications = false
+ config.email_sender = 'notifications@bioportal.lirmm.fr' # Default sender for emails
+ config.email_override = 'syphax.bouazzouni@lirmm.fr' # all email gets sent here. Disable with email_override_disable.
+ config.email_disable_override = true
+ config.smtp_host = 'localhost'
+ config.smtp_port = 1025
+ config.smtp_auth_type = :plain # :none, :plain, :login, :cram_md5
+ config.smtp_domain = 'lirmm.fr'
+ config.smtp_user = 'test'
+ config.smtp_password = 'test'
+ # Emails of the instance administrators to get mail notifications when new user or new ontology
+ # config.admin_emails = ['syphax.bouazzouni@lirmm.fr']
- #oauth
- config.oauth_providers = {
- github: {
- check: :access_token,
- link: 'https://api.github.com/user'
- },
- keycloak: {
- check: :jwt_token,
- cert: 'KEYCLOAK_SECRET_KEY'
- },
- orcid: {
- check: :access_token,
- link: 'https://pub.orcid.org/v3.0/me'
- },
- google: {
- check: :access_token,
- link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ # Used to define other bioportal that can be mapped to
+ # Example to map to ncbo bioportal : {"ncbo" => {"api" => "http://data.bioontology.org", "ui" => "http://bioportal.bioontology.org", "apikey" => ""}
+ # Then create the mapping using the following class in JSON : "http://purl.bioontology.org/ontology/MESH/C585345": "ncbo:MESH"
+ # Where "ncbo" is the namespace used as key in the interportal_hash
+ config.interportal_hash = {
+ 'agroportal' => {
+ 'api' => 'http://data.agroportal.lirmm.fr',
+ 'ui' => 'http://agroportal.lirmm.fr',
+ 'apikey' => '1cfae05f-9e67-486f-820b-b393dec5764b'
+ },
+ 'ncbo' => {
+ 'api' => 'http://data.bioontology.org',
+ 'ui' => 'http://bioportal.bioontology.org',
+ 'apikey' => '4a5011ea-75fa-4be6-8e89-f45c8c84844e'
+ },
+ 'sifr' => {
+ 'api' => 'http://data.bioportal.lirmm.fr',
+ 'ui' => 'http://bioportal.lirmm.fr',
+ 'apikey' => '1cfae05f-9e67-486f-820b-b393dec5764b'
+ }
}
- }
+
+ # oauth
+ config.oauth_providers = {
+ github: {
+ check: :access_token,
+ link: 'https://api.github.com/user'
+ },
+ keycloak: {
+ check: :jwt_token,
+ cert: 'KEYCLOAK_SECRET_KEY'
+ },
+ orcid: {
+ check: :access_token,
+ link: 'https://pub.orcid.org/v3.0/me'
+ },
+ google: {
+ check: :access_token,
+ link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ }
+ }
+ end
+rescue NameError => e
+ binding.pry
+ # puts '(CNFG) >> LinkedData not available, cannot load config'
end
-#sometimes tmp by default cannot allocate large files
-$TMP_SORT_FOLDER = "SOME TMP FOLDER"
+# sometimes tmp by default cannot allocate large files
+$TMP_SORT_FOLDER = 'SOME TMP FOLDER'
\ No newline at end of file
diff --git a/config/config.test.rb b/config/config.test.rb
deleted file mode 100644
index 4c21b095..00000000
--- a/config/config.test.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-###
-# This file is designed for use in docker based unit testing
-#
-# All the defaults are set in
-# https://github.com/ncbo/ontologies_linked_data/blob/master/lib/ontologies_linked_data/config/config.rb
-###
-
-GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
-GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
-GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
-GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
-GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
-GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
-REDIS_HOST = ENV.include?("REDIS_HOST") ? ENV["REDIS_HOST"] : "localhost"
-REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
-SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr/term_search_core1"
-SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8984/solr/prop_search_core1"
-GOO_SLICES = ENV["GOO_SLICES"] || 500
-LinkedData.config do |config|
- Goo.slice_loading_size = GOO_SLICES.to_i
- config.goo_backend_name = GOO_BACKEND_NAME.to_s
- config.goo_host = GOO_HOST.to_s
- config.goo_port = GOO_PORT.to_i
- config.goo_path_query = GOO_PATH_QUERY.to_s
- config.goo_path_data = GOO_PATH_DATA.to_s
- config.goo_path_update = GOO_PATH_UPDATE.to_s
- config.goo_redis_host = REDIS_HOST.to_s
- config.goo_redis_port = REDIS_PORT.to_i
- config.http_redis_host = REDIS_HOST.to_s
- config.http_redis_port = REDIS_PORT.to_i
- config.ontology_analytics_redis_host = REDIS_HOST.to_s
- config.ontology_analytics_redis_port = REDIS_PORT.to_i
- config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
- config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
- config.sparql_endpoint_url = "http:://sparql_endpoint.com"
- # config.enable_notifications = false
- config.oauth_providers = {
- github: {
- check: :access_token,
- link: 'https://api.github.com/user'
- },
- keycloak: {
- check: :jwt_token,
- cert: 'KEYCLOAK_SECRET_KEY'
- },
- orcid: {
- check: :access_token,
- link: 'https://pub.orcid.org/v3.0/me'
- },
- google: {
- check: :access_token,
- link: 'https://www.googleapis.com/oauth2/v3/userinfo'
- }
- }
-end
\ No newline at end of file
diff --git a/config/solr/property_search/enumsconfig.xml b/config/solr/property_search/enumsconfig.xml
deleted file mode 100644
index 72e7b7d3..00000000
--- a/config/solr/property_search/enumsconfig.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- ONTOLOGY
- VALUE_SET_COLLECTION
-
-
- ANNOTATION
- DATATYPE
- OBJECT
-
-
\ No newline at end of file
diff --git a/config/solr/property_search/mapping-ISOLatin1Accent.txt b/config/solr/property_search/mapping-ISOLatin1Accent.txt
deleted file mode 100644
index ede77425..00000000
--- a/config/solr/property_search/mapping-ISOLatin1Accent.txt
+++ /dev/null
@@ -1,246 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Syntax:
-# "source" => "target"
-# "source".length() > 0 (source cannot be empty.)
-# "target".length() >= 0 (target can be empty.)
-
-# example:
-# "À" => "A"
-# "\u00C0" => "A"
-# "\u00C0" => "\u0041"
-# "ß" => "ss"
-# "\t" => " "
-# "\n" => ""
-
-# À => A
-"\u00C0" => "A"
-
-# Á => A
-"\u00C1" => "A"
-
-# Â => A
-"\u00C2" => "A"
-
-# Ã => A
-"\u00C3" => "A"
-
-# Ä => A
-"\u00C4" => "A"
-
-# Å => A
-"\u00C5" => "A"
-
-# Æ => AE
-"\u00C6" => "AE"
-
-# Ç => C
-"\u00C7" => "C"
-
-# È => E
-"\u00C8" => "E"
-
-# É => E
-"\u00C9" => "E"
-
-# Ê => E
-"\u00CA" => "E"
-
-# Ë => E
-"\u00CB" => "E"
-
-# Ì => I
-"\u00CC" => "I"
-
-# Í => I
-"\u00CD" => "I"
-
-# Î => I
-"\u00CE" => "I"
-
-# Ï => I
-"\u00CF" => "I"
-
-# IJ => IJ
-"\u0132" => "IJ"
-
-# Ð => D
-"\u00D0" => "D"
-
-# Ñ => N
-"\u00D1" => "N"
-
-# Ò => O
-"\u00D2" => "O"
-
-# Ó => O
-"\u00D3" => "O"
-
-# Ô => O
-"\u00D4" => "O"
-
-# Õ => O
-"\u00D5" => "O"
-
-# Ö => O
-"\u00D6" => "O"
-
-# Ø => O
-"\u00D8" => "O"
-
-# Œ => OE
-"\u0152" => "OE"
-
-# Þ
-"\u00DE" => "TH"
-
-# Ù => U
-"\u00D9" => "U"
-
-# Ú => U
-"\u00DA" => "U"
-
-# Û => U
-"\u00DB" => "U"
-
-# Ü => U
-"\u00DC" => "U"
-
-# Ý => Y
-"\u00DD" => "Y"
-
-# Ÿ => Y
-"\u0178" => "Y"
-
-# à => a
-"\u00E0" => "a"
-
-# á => a
-"\u00E1" => "a"
-
-# â => a
-"\u00E2" => "a"
-
-# ã => a
-"\u00E3" => "a"
-
-# ä => a
-"\u00E4" => "a"
-
-# å => a
-"\u00E5" => "a"
-
-# æ => ae
-"\u00E6" => "ae"
-
-# ç => c
-"\u00E7" => "c"
-
-# è => e
-"\u00E8" => "e"
-
-# é => e
-"\u00E9" => "e"
-
-# ê => e
-"\u00EA" => "e"
-
-# ë => e
-"\u00EB" => "e"
-
-# ì => i
-"\u00EC" => "i"
-
-# í => i
-"\u00ED" => "i"
-
-# î => i
-"\u00EE" => "i"
-
-# ï => i
-"\u00EF" => "i"
-
-# ij => ij
-"\u0133" => "ij"
-
-# ð => d
-"\u00F0" => "d"
-
-# ñ => n
-"\u00F1" => "n"
-
-# ò => o
-"\u00F2" => "o"
-
-# ó => o
-"\u00F3" => "o"
-
-# ô => o
-"\u00F4" => "o"
-
-# õ => o
-"\u00F5" => "o"
-
-# ö => o
-"\u00F6" => "o"
-
-# ø => o
-"\u00F8" => "o"
-
-# œ => oe
-"\u0153" => "oe"
-
-# ß => ss
-"\u00DF" => "ss"
-
-# þ => th
-"\u00FE" => "th"
-
-# ù => u
-"\u00F9" => "u"
-
-# ú => u
-"\u00FA" => "u"
-
-# û => u
-"\u00FB" => "u"
-
-# ü => u
-"\u00FC" => "u"
-
-# ý => y
-"\u00FD" => "y"
-
-# ÿ => y
-"\u00FF" => "y"
-
-# ff => ff
-"\uFB00" => "ff"
-
-# fi => fi
-"\uFB01" => "fi"
-
-# fl => fl
-"\uFB02" => "fl"
-
-# ffi => ffi
-"\uFB03" => "ffi"
-
-# ffl => ffl
-"\uFB04" => "ffl"
-
-# ſt => ft
-"\uFB05" => "ft"
-
-# st => st
-"\uFB06" => "st"
diff --git a/config/solr/property_search/schema.xml b/config/solr/property_search/schema.xml
deleted file mode 100644
index 20824ea6..00000000
--- a/config/solr/property_search/schema.xml
+++ /dev/null
@@ -1,1179 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- id
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/solr/property_search/solrconfig.xml b/config/solr/property_search/solrconfig.xml
deleted file mode 100644
index 771a0f32..00000000
--- a/config/solr/property_search/solrconfig.xml
+++ /dev/null
@@ -1,1299 +0,0 @@
-
-
-
-
-
-
-
-
- 8.8.2
-
-
-
-
-
-
-
-
-
-
- ${solr.data.dir:}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.lock.type:native}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.ulog.dir:}
- ${solr.ulog.numVersionBuckets:65536}
-
-
-
-
- ${solr.autoCommit.maxTime:15000}
- false
-
-
-
-
-
- ${solr.autoSoftCommit.maxTime:-1}
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.max.booleanClauses:500000}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- 20
-
-
- 200
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- json
- true
-
-
-
-
-
- _text_
-
-
-
-
-
-
-
-
- text_general
-
-
-
-
-
- default
- _text_
- solr.DirectSolrSpellChecker
-
- internal
-
- 0.5
-
- 2
-
- 1
-
- 5
-
- 4
-
- 0.01
-
-
-
-
-
-
-
-
-
-
-
- default
- on
- true
- 10
- 5
- 5
- true
- true
- 10
- 5
-
-
- spellcheck
-
-
-
-
-
-
-
-
-
- true
- false
-
-
- terms
-
-
-
-
-
-
-
-
-
-
- 100
-
-
-
-
-
-
-
- 70
-
- 0.5
-
- [-\w ,/\n\"']{20,200}
-
-
-
-
-
-
- ]]>
- ]]>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ,,
- ,,
- ,,
- ,,
- ,]]>
- ]]>
-
-
-
-
-
- 10
- .,!?
-
-
-
-
-
-
- WORD
-
-
- en
- US
-
-
-
-
-
-
-
-
-
-
-
- [^\w-\.]
- _
-
-
-
-
-
-
- yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z
- yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z
- yyyy-MM-dd HH:mm[:ss[.SSS]][z
- yyyy-MM-dd HH:mm[:ss[,SSS]][z
- [EEE, ]dd MMM yyyy HH:mm[:ss] z
- EEEE, dd-MMM-yy HH:mm:ss z
- EEE MMM ppd HH:mm:ss [z ]yyyy
-
-
-
-
- java.lang.String
- text_general
-
- *_str
- 256
-
-
- true
-
-
- java.lang.Boolean
- booleans
-
-
- java.util.Date
- pdates
-
-
- java.lang.Long
- java.lang.Integer
- plongs
-
-
- java.lang.Number
- pdoubles
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/plain; charset=UTF-8
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/solr/solr.xml b/config/solr/solr.xml
deleted file mode 100644
index d9d089e4..00000000
--- a/config/solr/solr.xml
+++ /dev/null
@@ -1,60 +0,0 @@
-
-
-
-
-
-
-
- ${solr.max.booleanClauses:500000}
- ${solr.sharedLib:}
- ${solr.allowPaths:}
-
-
-
- ${host:}
- ${solr.port.advertise:0}
- ${hostContext:solr}
-
- ${genericCoreNodeNames:true}
-
- ${zkClientTimeout:30000}
- ${distribUpdateSoTimeout:600000}
- ${distribUpdateConnTimeout:60000}
- ${zkCredentialsProvider:org.apache.solr.common.cloud.DefaultZkCredentialsProvider}
- ${zkACLProvider:org.apache.solr.common.cloud.DefaultZkACLProvider}
-
-
-
-
- ${socketTimeout:600000}
- ${connTimeout:60000}
- ${solr.shardsWhitelist:}
-
-
-
-
-
diff --git a/config/solr/term_search/enumsconfig.xml b/config/solr/term_search/enumsconfig.xml
deleted file mode 100644
index 72e7b7d3..00000000
--- a/config/solr/term_search/enumsconfig.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- ONTOLOGY
- VALUE_SET_COLLECTION
-
-
- ANNOTATION
- DATATYPE
- OBJECT
-
-
\ No newline at end of file
diff --git a/config/solr/term_search/mapping-ISOLatin1Accent.txt b/config/solr/term_search/mapping-ISOLatin1Accent.txt
deleted file mode 100644
index ede77425..00000000
--- a/config/solr/term_search/mapping-ISOLatin1Accent.txt
+++ /dev/null
@@ -1,246 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Syntax:
-# "source" => "target"
-# "source".length() > 0 (source cannot be empty.)
-# "target".length() >= 0 (target can be empty.)
-
-# example:
-# "À" => "A"
-# "\u00C0" => "A"
-# "\u00C0" => "\u0041"
-# "ß" => "ss"
-# "\t" => " "
-# "\n" => ""
-
-# À => A
-"\u00C0" => "A"
-
-# Á => A
-"\u00C1" => "A"
-
-# Â => A
-"\u00C2" => "A"
-
-# Ã => A
-"\u00C3" => "A"
-
-# Ä => A
-"\u00C4" => "A"
-
-# Å => A
-"\u00C5" => "A"
-
-# Æ => AE
-"\u00C6" => "AE"
-
-# Ç => C
-"\u00C7" => "C"
-
-# È => E
-"\u00C8" => "E"
-
-# É => E
-"\u00C9" => "E"
-
-# Ê => E
-"\u00CA" => "E"
-
-# Ë => E
-"\u00CB" => "E"
-
-# Ì => I
-"\u00CC" => "I"
-
-# Í => I
-"\u00CD" => "I"
-
-# Î => I
-"\u00CE" => "I"
-
-# Ï => I
-"\u00CF" => "I"
-
-# IJ => IJ
-"\u0132" => "IJ"
-
-# Ð => D
-"\u00D0" => "D"
-
-# Ñ => N
-"\u00D1" => "N"
-
-# Ò => O
-"\u00D2" => "O"
-
-# Ó => O
-"\u00D3" => "O"
-
-# Ô => O
-"\u00D4" => "O"
-
-# Õ => O
-"\u00D5" => "O"
-
-# Ö => O
-"\u00D6" => "O"
-
-# Ø => O
-"\u00D8" => "O"
-
-# Œ => OE
-"\u0152" => "OE"
-
-# Þ
-"\u00DE" => "TH"
-
-# Ù => U
-"\u00D9" => "U"
-
-# Ú => U
-"\u00DA" => "U"
-
-# Û => U
-"\u00DB" => "U"
-
-# Ü => U
-"\u00DC" => "U"
-
-# Ý => Y
-"\u00DD" => "Y"
-
-# Ÿ => Y
-"\u0178" => "Y"
-
-# à => a
-"\u00E0" => "a"
-
-# á => a
-"\u00E1" => "a"
-
-# â => a
-"\u00E2" => "a"
-
-# ã => a
-"\u00E3" => "a"
-
-# ä => a
-"\u00E4" => "a"
-
-# å => a
-"\u00E5" => "a"
-
-# æ => ae
-"\u00E6" => "ae"
-
-# ç => c
-"\u00E7" => "c"
-
-# è => e
-"\u00E8" => "e"
-
-# é => e
-"\u00E9" => "e"
-
-# ê => e
-"\u00EA" => "e"
-
-# ë => e
-"\u00EB" => "e"
-
-# ì => i
-"\u00EC" => "i"
-
-# í => i
-"\u00ED" => "i"
-
-# î => i
-"\u00EE" => "i"
-
-# ï => i
-"\u00EF" => "i"
-
-# ij => ij
-"\u0133" => "ij"
-
-# ð => d
-"\u00F0" => "d"
-
-# ñ => n
-"\u00F1" => "n"
-
-# ò => o
-"\u00F2" => "o"
-
-# ó => o
-"\u00F3" => "o"
-
-# ô => o
-"\u00F4" => "o"
-
-# õ => o
-"\u00F5" => "o"
-
-# ö => o
-"\u00F6" => "o"
-
-# ø => o
-"\u00F8" => "o"
-
-# œ => oe
-"\u0153" => "oe"
-
-# ß => ss
-"\u00DF" => "ss"
-
-# þ => th
-"\u00FE" => "th"
-
-# ù => u
-"\u00F9" => "u"
-
-# ú => u
-"\u00FA" => "u"
-
-# û => u
-"\u00FB" => "u"
-
-# ü => u
-"\u00FC" => "u"
-
-# ý => y
-"\u00FD" => "y"
-
-# ÿ => y
-"\u00FF" => "y"
-
-# ff => ff
-"\uFB00" => "ff"
-
-# fi => fi
-"\uFB01" => "fi"
-
-# fl => fl
-"\uFB02" => "fl"
-
-# ffi => ffi
-"\uFB03" => "ffi"
-
-# ffl => ffl
-"\uFB04" => "ffl"
-
-# ſt => ft
-"\uFB05" => "ft"
-
-# st => st
-"\uFB06" => "st"
diff --git a/config/solr/term_search/schema.xml b/config/solr/term_search/schema.xml
deleted file mode 100644
index fa95e127..00000000
--- a/config/solr/term_search/schema.xml
+++ /dev/null
@@ -1,1222 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- id
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/solr/term_search/solrconfig.xml b/config/solr/term_search/solrconfig.xml
deleted file mode 100644
index 771a0f32..00000000
--- a/config/solr/term_search/solrconfig.xml
+++ /dev/null
@@ -1,1299 +0,0 @@
-
-
-
-
-
-
-
-
- 8.8.2
-
-
-
-
-
-
-
-
-
-
- ${solr.data.dir:}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.lock.type:native}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.ulog.dir:}
- ${solr.ulog.numVersionBuckets:65536}
-
-
-
-
- ${solr.autoCommit.maxTime:15000}
- false
-
-
-
-
-
- ${solr.autoSoftCommit.maxTime:-1}
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.max.booleanClauses:500000}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- 20
-
-
- 200
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- json
- true
-
-
-
-
-
- _text_
-
-
-
-
-
-
-
-
- text_general
-
-
-
-
-
- default
- _text_
- solr.DirectSolrSpellChecker
-
- internal
-
- 0.5
-
- 2
-
- 1
-
- 5
-
- 4
-
- 0.01
-
-
-
-
-
-
-
-
-
-
-
- default
- on
- true
- 10
- 5
- 5
- true
- true
- 10
- 5
-
-
- spellcheck
-
-
-
-
-
-
-
-
-
- true
- false
-
-
- terms
-
-
-
-
-
-
-
-
-
-
- 100
-
-
-
-
-
-
-
- 70
-
- 0.5
-
- [-\w ,/\n\"']{20,200}
-
-
-
-
-
-
- ]]>
- ]]>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ,,
- ,,
- ,,
- ,,
- ,]]>
- ]]>
-
-
-
-
-
- 10
- .,!?
-
-
-
-
-
-
- WORD
-
-
- en
- US
-
-
-
-
-
-
-
-
-
-
-
- [^\w-\.]
- _
-
-
-
-
-
-
- yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z
- yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z
- yyyy-MM-dd HH:mm[:ss[.SSS]][z
- yyyy-MM-dd HH:mm[:ss[,SSS]][z
- [EEE, ]dd MMM yyyy HH:mm[:ss] z
- EEEE, dd-MMM-yy HH:mm:ss z
- EEE MMM ppd HH:mm:ss [z ]yyyy
-
-
-
-
- java.lang.String
- text_general
-
- *_str
- 256
-
-
- true
-
-
- java.lang.Boolean
- booleans
-
-
- java.util.Date
- pdates
-
-
- java.lang.Long
- java.lang.Integer
- plongs
-
-
- java.lang.Number
- pdoubles
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/plain; charset=UTF-8
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/docker-compose.yml b/docker-compose.yml
index 72a209a4..55229ff6 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -56,36 +56,16 @@ services:
profiles:
- fs
- solr-term-ut:
- image: solr:8
- volumes:
- - ./test/solr/configsets:/configsets:ro
+ solr-ut:
+ image: solr:8.11.2
ports:
- - "8983:8983"
- command: [ "solr-precreate", "term_search_core1", "/configsets/term_search" ]
- healthcheck:
- test: [ "CMD-SHELL", "curl -sf http://localhost:8983/solr/term_search_core1/admin/ping?wt=json | grep -iq '\"status\":\"OK\"}' || exit 1" ]
- start_period: 5s
- interval: 10s
- timeout: 5s
- retries: 5
+ - 8983:8983
+ command: bin/solr start -cloud -f
+
- solr-prop-ut:
- image: solr:8
- volumes:
- - ./test/solr/configsets:/configsets:ro
- ports:
- - "8984:8983"
- command: [ "solr-precreate", "prop_search_core1", "/configsets/property_search" ]
- healthcheck:
- test: [ "CMD-SHELL", "curl -sf http://localhost:8983/solr/prop_search_core1/admin/ping?wt=json | grep -iq '\"status\":\"OK\"}' || exit 1" ]
- start_period: 5s
- interval: 10s
- timeout: 5s
- retries: 5
agraph-ut:
- image: franzinc/agraph:v8.0.0.rc1
+ image: franzinc/agraph:v8.1.0
platform: linux/amd64
environment:
- AGRAPH_SUPER_USER=test
diff --git a/lib/ontologies_linked_data/concerns/mappings/mapping_external.rb b/lib/ontologies_linked_data/concerns/mappings/mapping_external.rb
deleted file mode 100644
index 08717380..00000000
--- a/lib/ontologies_linked_data/concerns/mappings/mapping_external.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-module LinkedData
- module Concerns
- module Mappings
- module ExternalUtils
-
-
- end
- end
- end
-end
-
diff --git a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_index_all_data.rb b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_index_all_data.rb
new file mode 100644
index 00000000..58be6165
--- /dev/null
+++ b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_index_all_data.rb
@@ -0,0 +1,161 @@
+require 'parallel'
+module LinkedData
+ module Concerns
+ module OntologySubmission
+ module IndexAllData
+
+ module ClassMethods
+ def clear_indexed_content(ontology)
+ conn = Goo.init_search_connection(:ontology_data)
+ begin
+ conn.delete_by_query("ontology_t:\"#{ontology}\"")
+ rescue StandardError => e
+ puts e.message
+ end
+ conn
+ end
+
+ end
+
+ def self.included(base)
+ base.extend(ClassMethods)
+ end
+
+ def index_sorted_ids(ids, ontology, conn, logger, commit = true)
+ total_triples = Parallel.map(ids.each_slice(1000), in_threads: 10) do |ids_slice|
+ index_ids = 0
+ triples_count = 0
+ documents = {}
+ time = Benchmark.realtime do
+ documents, triples_count = fetch_triples(ids_slice, ontology)
+ end
+
+ return if documents.empty?
+
+ logger.info("Worker #{Parallel.worker_number} > Fetched #{triples_count} triples of #{id} in #{time} sec.") if triples_count.positive?
+
+ time = Benchmark.realtime do
+ conn.index_document(documents.values, commit: false)
+ conn.index_commit if commit
+ index_ids = documents.size
+ documents = {}
+ end
+ logger.info("Worker #{Parallel.worker_number} > Indexed #{index_ids} ids of #{id} in #{time} sec. Total #{documents.size} ids.")
+ triples_count
+ end
+ total_triples.sum
+ end
+
+ def index_all_data(logger, commit = true)
+ page = 1
+ size = 10_000
+ count_ids = 0
+ total_time = 0
+ total_triples = 0
+ old_count = -1
+
+ ontology = self.bring(:ontology).ontology
+ .bring(:acronym).acronym
+ conn = init_search_collection(ontology)
+
+ ids = {}
+
+ while count_ids != old_count
+ old_count = count_ids
+ count = 0
+ time = Benchmark.realtime do
+ ids = fetch_sorted_ids(size, page)
+ count = ids.size
+ end
+
+ count_ids += count
+ total_time += time
+ page += 1
+
+ next unless count.positive?
+
+ logger.info("Fetched #{count} ids of #{id} page: #{page} in #{time} sec.")
+
+ total_triples += index_sorted_ids(ids, ontology, conn, logger, commit)
+
+ end
+ logger.info("Completed indexing all ontology data: #{self.id} in #{total_time} sec. (#{count_ids} ids / #{total_triples} triples)")
+ logger.flush
+ end
+
+ private
+
+ def fetch_sorted_ids(size, page)
+ query = Goo.sparql_query_client.select(:id)
+ .distinct
+ .from(RDF::URI.new(self.id))
+ .where(%i[id p v])
+ .limit(size)
+ .offset((page - 1) * size)
+
+ query.each_solution.map(&:id).sort
+ end
+
+ def update_doc(doc, property, new_val)
+ unescaped_prop = property.gsub('___', '://')
+
+ unescaped_prop = unescaped_prop.gsub('_', '/')
+ existent_val = doc["#{unescaped_prop}_t"] || doc["#{unescaped_prop}_txt"]
+
+ if !existent_val && !property['#']
+ unescaped_prop = unescaped_prop.sub(%r{/([^/]+)$}, '#\1') # change latest '/' with '#'
+ existent_val = doc["#{unescaped_prop}_t"] || doc["#{unescaped_prop}_txt"]
+ end
+
+ if existent_val && new_val || new_val.is_a?(Array)
+ doc.delete("#{unescaped_prop}_t")
+ doc["#{unescaped_prop}_txt"] = Array(existent_val) + Array(new_val).map(&:to_s)
+ elsif existent_val.nil? && new_val
+ doc["#{unescaped_prop}_t"] = new_val.to_s
+ end
+ doc
+ end
+
+ def init_search_collection(ontology)
+ self.class.clear_indexed_content(ontology)
+ end
+
+ def fetch_triples(ids_slice, ontology)
+ documents = {}
+ count = 0
+ filter = ids_slice.map { |x| "?id = <#{x}>" }.join(' || ')
+ query = Goo.sparql_query_client.select(:id, :p, :v)
+ .from(RDF::URI.new(self.id))
+ .where(%i[id p v])
+ .filter(filter)
+ query.each_solution do |sol|
+ count += 1
+ doc = documents[sol[:id].to_s]
+ doc ||= {
+ id: "#{sol[:id]}_#{ontology}", submission_id_t: self.id.to_s,
+ ontology_t: ontology, resource_model: self.class.model_name,
+ resource_id: sol[:id].to_s
+ }
+ property = sol[:p].to_s
+ value = sol[:v]
+
+ if property.to_s.eql?(RDF.type.to_s)
+ update_doc(doc, 'type', value)
+ else
+ update_doc(doc, property, value)
+ end
+ documents[sol[:id].to_s] = doc
+ end
+ [documents, count]
+ end
+
+ end
+ end
+ end
+end
+
+
+
+
+
+
diff --git a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb
index 4125f501..4849a8d0 100644
--- a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb
+++ b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb
@@ -276,6 +276,11 @@ def ontology_has_domain(sub)
ontology_domain_list
end
+ def default_sparql_endpoint(sub)
+ url = LinkedData.settings.sparql_endpoint_url || ''
+
+ url.strip.blank? ? [] : [RDF::URI.new(url)]
+ end
def open_search_default(sub)
RDF::URI.new("#{LinkedData.settings.rest_url_prefix}search?ontologies=#{sub.ontology.acronym}&q=")
end
diff --git a/lib/ontologies_linked_data/config/config.rb b/lib/ontologies_linked_data/config/config.rb
index ba5274cb..537a04f6 100644
--- a/lib/ontologies_linked_data/config/config.rb
+++ b/lib/ontologies_linked_data/config/config.rb
@@ -25,8 +25,8 @@ def config(&block)
@settings.goo_path_query ||= '/sparql/'
@settings.goo_path_data ||= '/data/'
@settings.goo_path_update ||= '/update/'
- @settings.search_server_url ||= 'http://localhost:8983/solr/term_search_core1'
- @settings.property_search_server_url ||= 'http://localhost:8983/solr/prop_search_core1'
+ @settings.search_server_url ||= 'http://localhost:8983/solr'
+ @settings.property_search_server_url ||= 'http://localhost:8983/solr'
@settings.repository_folder ||= './test/data/ontology_files/repo'
@settings.rest_url_prefix ||= DEFAULT_PREFIX
@settings.enable_security ||= false
diff --git a/lib/ontologies_linked_data/media_types.rb b/lib/ontologies_linked_data/media_types.rb
index d109e80d..01a26480 100644
--- a/lib/ontologies_linked_data/media_types.rb
+++ b/lib/ontologies_linked_data/media_types.rb
@@ -3,8 +3,11 @@ module MediaTypes
HTML = :html
JSON = :json
JSONP = :jsonp
+ JSONLD = :jsonld
XML = :xml
+ RDF_XML = :rdf_xml
TURTLE = :turtle
+ NTRIPLES = :ntriples
DEFAULT = JSON
def self.all
diff --git a/lib/ontologies_linked_data/models/agents/agent.rb b/lib/ontologies_linked_data/models/agents/agent.rb
index c31921a2..24601748 100644
--- a/lib/ontologies_linked_data/models/agents/agent.rb
+++ b/lib/ontologies_linked_data/models/agents/agent.rb
@@ -7,13 +7,13 @@ class Agent < LinkedData::Models::Base
model :Agent, namespace: :foaf, name_with: lambda { |cc| uuid_uri_generator(cc) }
attribute :agentType, enforce: [:existence], enforcedValues: %w[person organization]
- attribute :name, namespace: :foaf, enforce: %i[existence]
+ attribute :name, namespace: :foaf, enforce: %i[existence], fuzzy_search: true
attribute :homepage, namespace: :foaf
- attribute :acronym, namespace: :skos, property: :altLabel
- attribute :email, namespace: :foaf, property: :mbox, enforce: %i[email unique]
+ attribute :acronym, namespace: :skos, property: :altLabel, fuzzy_search: true
+ attribute :email, namespace: :foaf, property: :mbox, enforce: %i[email unique], fuzzy_search: true
- attribute :identifiers, namespace: :adms, property: :identifier, enforce: %i[Identifier list unique_identifiers]
+ attribute :identifiers, namespace: :adms, property: :identifier, enforce: %i[Identifier list unique_identifiers], fuzzy_search: true
attribute :affiliations, enforce: %i[Agent list is_organization], namespace: :org, property: :memberOf
attribute :creator, type: :user, enforce: [:existence]
embed :identifiers, :affiliations
@@ -23,6 +23,11 @@ class Agent < LinkedData::Models::Base
write_access :creator
access_control_load :creator
+ enable_indexing(:agents_metadata)
+
+ def embedded_doc
+ "#{self.name} #{self.acronym} #{self.email} #{self.agentType}"
+ end
def self.load_agents_usages(agents = [], agent_attributes = OntologySubmission.agents_attr_uris)
q = Goo.sparql_query_client.select(:id, :property, :agent, :status).distinct.from(LinkedData::Models::OntologySubmission.uri_type).where([:id,LinkedData::Models::OntologySubmission.attribute_uri(:submissionStatus),:status], [:id, :property, :agent])
diff --git a/lib/ontologies_linked_data/models/agents/identifier.rb b/lib/ontologies_linked_data/models/agents/identifier.rb
index 7f504456..5e7d77cc 100644
--- a/lib/ontologies_linked_data/models/agents/identifier.rb
+++ b/lib/ontologies_linked_data/models/agents/identifier.rb
@@ -21,6 +21,10 @@ def self.generate_identifier(notation, schema_agency)
return RDF::URI.new(Goo.id_prefix + 'Identifiers/' + out.join(':')) if out.size.eql?(2)
end
+ def embedded_doc
+ "#{self.id.split('/').last}"
+ end
+
def no_url(inst,attr)
inst.bring(attr) if inst.bring?(attr)
notation = inst.send(attr)
diff --git a/lib/ontologies_linked_data/models/class.rb b/lib/ontologies_linked_data/models/class.rb
index 930177e4..5c2700bb 100644
--- a/lib/ontologies_linked_data/models/class.rb
+++ b/lib/ontologies_linked_data/models/class.rb
@@ -116,6 +116,66 @@ def self.urn_id(acronym,classId)
cache_segment_keys [:class]
cache_load submission: [ontology: [:acronym]]
+ # Index settings
+ def self.index_schema(schema_generator)
+ schema_generator.add_field(:prefLabel, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:synonym, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:notation, 'text_general', indexed: true, stored: true, multi_valued: false)
+
+ schema_generator.add_field(:definition, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:submissionAcronym, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:parents, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:ontologyType, 'string', indexed: true, stored: true, multi_valued: false)
+ # schema_generator.add_field(:ontologyType, 'ontologyType', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:ontologyId, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:submissionId, 'pint', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:childCount, 'pint', indexed: true, stored: true, multi_valued: false)
+
+ schema_generator.add_field(:cui, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:semanticType, 'text_general', indexed: true, stored: true, multi_valued: true)
+
+ schema_generator.add_field(:property, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:propertyRaw, 'text_general', indexed: false, stored: true, multi_valued: false)
+
+ schema_generator.add_field(:obsolete, 'boolean', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:provisional, 'boolean', indexed: true, stored: true, multi_valued: false)
+
+ # Copy fields for term search
+ schema_generator.add_copy_field('notation', '_text_')
+
+ %w[prefLabel synonym].each do |field|
+
+ schema_generator.add_field("#{field}Exact", 'string', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_field("#{field}Suggest", 'text_suggest', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+ schema_generator.add_field("#{field}SuggestEdge", 'text_suggest_edge', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_field("#{field}SuggestNgram", 'text_suggest_ngram', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+
+ schema_generator.add_copy_field(field, '_text_')
+ schema_generator.add_copy_field(field, "#{field}Exact")
+ schema_generator.add_copy_field(field, "#{field}Suggest")
+ schema_generator.add_copy_field(field, "#{field}SuggestEdge")
+ schema_generator.add_copy_field(field, "#{field}SuggestNgram")
+
+ schema_generator.add_dynamic_field("#{field}_*", 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_dynamic_field("#{field}Exact_*", 'string', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_dynamic_field("#{field}Suggest_*", 'text_suggest', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+ schema_generator.add_dynamic_field("#{field}SuggestEdge_*", 'text_suggest_edge', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_dynamic_field("#{field}SuggestNgram_*", 'text_suggest_ngram', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+
+ schema_generator.add_copy_field("#{field}_*", "#{field}Exact_*")
+ schema_generator.add_copy_field("#{field}_*", "#{field}Suggest_*")
+ schema_generator.add_copy_field("#{field}_*", "#{field}SuggestEdge_*")
+ schema_generator.add_copy_field("#{field}_*", "#{field}SuggestNgram_*")
+ end
+
+ schema_generator.add_dynamic_field('definition_*', 'text_general', indexed: true, stored: true, multi_valued: true)
+
+ end
+
+ enable_indexing(:term_search_core1) do |schema_generator|
+ index_schema(schema_generator)
+ end
+
def self.tree_view_property(*args)
submission = args.first
unless submission.loaded_attributes.include?(:hasOntologyLanguage)
diff --git a/lib/ontologies_linked_data/models/concerns/submission_process.rb b/lib/ontologies_linked_data/models/concerns/submission_process.rb
index 15c7c3fc..2bdfb4f1 100644
--- a/lib/ontologies_linked_data/models/concerns/submission_process.rb
+++ b/lib/ontologies_linked_data/models/concerns/submission_process.rb
@@ -18,7 +18,7 @@ def generate_diff(logger)
LinkedData::Services::SubmissionDiffGenerator.new(self).process(logger)
end
- def index(logger, commit: true, optimize: true)
+ def index_terms(logger, commit: true, optimize: true)
LinkedData::Services::OntologySubmissionIndexer.new(self).process(logger, commit: commit, optimize: optimize)
end
diff --git a/lib/ontologies_linked_data/models/contact.rb b/lib/ontologies_linked_data/models/contact.rb
index 9af31a95..ccb3d5bf 100644
--- a/lib/ontologies_linked_data/models/contact.rb
+++ b/lib/ontologies_linked_data/models/contact.rb
@@ -6,6 +6,14 @@ class Contact < LinkedData::Models::Base
attribute :email, enforce: [:existence]
embedded true
+
+ def embedded_doc
+ bring(:name) if bring?(:name)
+ bring(:email) if bring?(:email)
+
+ "#{self.name} | #{self.email}"
+ end
+
end
end
end
diff --git a/lib/ontologies_linked_data/models/metric.rb b/lib/ontologies_linked_data/models/metric.rb
index 84ee0305..2d39be66 100644
--- a/lib/ontologies_linked_data/models/metric.rb
+++ b/lib/ontologies_linked_data/models/metric.rb
@@ -53,6 +53,14 @@ def self.metrics_id_generator(m)
raise ArgumentError, "Metrics id needs to be set"
#return RDF::URI.new(m.submission.id.to_s + "/metrics")
end
+
+ def embedded_doc
+ doc = indexable_object
+ doc.delete(:resource_model)
+ doc.delete(:resource_id)
+ doc.delete(:id)
+ doc
+ end
end
end
end
diff --git a/lib/ontologies_linked_data/models/ontology.rb b/lib/ontologies_linked_data/models/ontology.rb
index 94e03ce1..442eb868 100644
--- a/lib/ontologies_linked_data/models/ontology.rb
+++ b/lib/ontologies_linked_data/models/ontology.rb
@@ -26,8 +26,8 @@ class OntologyAnalyticsError < StandardError; end
model :ontology, :name_with => :acronym
attribute :acronym, namespace: :omv,
- enforce: [:unique, :existence, lambda { |inst,attr| validate_acronym(inst,attr) } ]
- attribute :name, :namespace => :omv, enforce: [:unique, :existence]
+ enforce: [:unique, :existence, lambda { |inst,attr| validate_acronym(inst,attr) } ], fuzzy_search: true
+ attribute :name, :namespace => :omv, enforce: [:unique, :existence], fuzzy_search: true
attribute :submissions, inverse: { on: :ontology_submission, attribute: :ontology },
metadataMappings: ["dct:hasVersion", "pav:hasCurrentVersion", "pav:hasVersion", "prov:generalizationOf", "adms:next"]
attribute :projects,
@@ -88,6 +88,10 @@ class OntologyAnalyticsError < StandardError; end
# Cache
cache_timeout 3600
+ enable_indexing(:ontology_metadata)
+
+ after_save :index_latest_submission
+
def self.validate_acronym(inst, attr)
inst.bring(attr) if inst.bring?(attr)
acronym = inst.send(attr)
@@ -420,9 +424,8 @@ def delete(*args)
end
# remove index entries
- unindex(index_commit)
- unindex_properties(index_commit)
-
+ unindex_all_data(index_commit)
+
# delete all files
ontology_dir = File.join(LinkedData.settings.repository_folder, self.acronym.to_s)
FileUtils.rm_rf(ontology_dir)
@@ -443,19 +446,43 @@ def save(*args)
self
end
- def unindex(commit=true)
+ def index_latest_submission
+ last_s = latest_submission(status: :any)
+ return if last_s.nil?
+
+ last_s.ontology = self
+ last_s.index_update([:ontology])
+ end
+
+ def unindex_all_data(commit=true)
unindex_by_acronym(commit)
+ unindex_properties(commit)
+ end
+
+ def embedded_doc
+ self.administeredBy.map{|x| x.bring_remaining}
+ doc = indexable_object
+ doc.delete(:id)
+ doc.delete(:resource_id)
+ doc.delete('ontology_viewOf_resource_model_t')
+ doc['ontology_viewOf_t'] = self.viewOf.id.to_s unless self.viewOf.nil?
+ doc[:resource_model_t] = doc.delete(:resource_model)
+ doc
end
def unindex_properties(commit=true)
- unindex_by_acronym(commit, :property)
+ self.bring(:acronym) if self.bring?(:acronym)
+ query = "submissionAcronym:#{acronym}"
+ OntologyProperty.unindexByQuery(query)
+ OntologyProperty.indexCommit(nil) if commit
end
- def unindex_by_acronym(commit=true, connection_name=:main)
+ def unindex_by_acronym(commit=true)
self.bring(:acronym) if self.bring?(:acronym)
query = "submissionAcronym:#{acronym}"
- Ontology.unindexByQuery(query, connection_name)
- Ontology.indexCommit(nil, connection_name) if commit
+ Class.unindexByQuery(query)
+ Class.indexCommit(nil) if commit
+ OntologySubmission.clear_indexed_content(acronym)
end
def restricted?
diff --git a/lib/ontologies_linked_data/models/ontology_submission.rb b/lib/ontologies_linked_data/models/ontology_submission.rb
index 53f65464..a274e6cb 100644
--- a/lib/ontologies_linked_data/models/ontology_submission.rb
+++ b/lib/ontologies_linked_data/models/ontology_submission.rb
@@ -12,6 +12,7 @@ module Models
class OntologySubmission < LinkedData::Models::Base
+ include LinkedData::Concerns::OntologySubmission::IndexAllData
include LinkedData::Concerns::SubmissionProcessable
include LinkedData::Concerns::OntologySubmission::Validators
include LinkedData::Concerns::OntologySubmission::UpdateCallbacks
@@ -24,39 +25,39 @@ class OntologySubmission < LinkedData::Models::Base
FLAT_ROOTS_LIMIT = 1000
model :ontology_submission, scheme: File.join(__dir__, '../../../config/schemes/ontology_submission.yml'),
- name_with: ->(s) { submission_id_generator(s) }
+ name_with: ->(s) { submission_id_generator(s) }
attribute :submissionId, type: :integer, enforce: [:existence]
# Object description properties metadata
# Configurable properties for processing
- attribute :prefLabelProperty, type: :uri, default: ->(s) {Goo.vocabulary(:skos)[:prefLabel]}
- attribute :definitionProperty, type: :uri, default: ->(s) {Goo.vocabulary(:skos)[:definition]}
- attribute :synonymProperty, type: :uri, default: ->(s) {Goo.vocabulary(:skos)[:altLabel]}
- attribute :authorProperty, type: :uri, default: ->(s) {Goo.vocabulary(:dc)[:creator]}
+ attribute :prefLabelProperty, type: :uri, default: ->(s) { Goo.vocabulary(:skos)[:prefLabel] }
+ attribute :definitionProperty, type: :uri, default: ->(s) { Goo.vocabulary(:skos)[:definition] }
+ attribute :synonymProperty, type: :uri, default: ->(s) { Goo.vocabulary(:skos)[:altLabel] }
+ attribute :authorProperty, type: :uri, default: ->(s) { Goo.vocabulary(:dc)[:creator] }
attribute :classType, type: :uri
- attribute :hierarchyProperty, type: :uri, default: ->(s) {default_hierarchy_property(s)}
- attribute :obsoleteProperty, type: :uri, default: ->(s) {Goo.vocabulary(:owl)[:deprecated]}
- attribute :obsoleteParent, type: :uri, default: ->(s) {RDF::URI.new("http://www.geneontology.org/formats/oboInOwl#ObsoleteClass")}
- attribute :createdProperty, type: :uri, default: ->(s) {Goo.vocabulary(:dc)[:created]}
- attribute :modifiedProperty, type: :uri, default: ->(s) {Goo.vocabulary(:dc)[:modified]}
+ attribute :hierarchyProperty, type: :uri, default: ->(s) { default_hierarchy_property(s) }
+ attribute :obsoleteProperty, type: :uri, default: ->(s) { Goo.vocabulary(:owl)[:deprecated] }
+ attribute :obsoleteParent, type: :uri, default: ->(s) { RDF::URI.new("http://www.geneontology.org/formats/oboInOwl#ObsoleteClass") }
+ attribute :createdProperty, type: :uri, default: ->(s) { Goo.vocabulary(:dc)[:created] }
+ attribute :modifiedProperty, type: :uri, default: ->(s) { Goo.vocabulary(:dc)[:modified] }
# Ontology metadata
# General metadata
- attribute :URI, namespace: :omv, type: :uri, enforce: %i[existence distinct_of_identifier]
+ attribute :URI, namespace: :omv, type: :uri, enforce: %i[existence distinct_of_identifier], fuzzy_search: true
attribute :versionIRI, namespace: :owl, type: :uri, enforce: [:distinct_of_URI]
attribute :version, namespace: :omv
attribute :status, namespace: :omv, enforce: %i[existence], default: ->(x) { 'production' }
attribute :deprecated, namespace: :owl, type: :boolean, default: ->(x) { false }
attribute :hasOntologyLanguage, namespace: :omv, type: :ontology_format, enforce: [:existence]
attribute :hasFormalityLevel, namespace: :omv, type: :uri
- attribute :hasOntologySyntax, namespace: :omv, type: :uri, default: ->(s) {ontology_syntax_default(s)}
+ attribute :hasOntologySyntax, namespace: :omv, type: :uri, default: ->(s) { ontology_syntax_default(s) }
attribute :naturalLanguage, namespace: :omv, type: %i[list uri], enforce: [:lexvo_language]
attribute :isOfType, namespace: :omv, type: :uri
attribute :identifier, namespace: :dct, type: %i[list uri], enforce: [:distinct_of_URI]
# Description metadata
- attribute :description, namespace: :omv, enforce: %i[concatenate existence]
+ attribute :description, namespace: :omv, enforce: %i[concatenate existence], fuzzy_search: true
attribute :homepage, namespace: :foaf, type: :uri
attribute :documentation, namespace: :omv, type: :uri
attribute :notes, namespace: :omv, type: :list
@@ -100,7 +101,7 @@ class OntologySubmission < LinkedData::Models::Base
# Usage metadata
attribute :knownUsage, namespace: :omv, type: :list
attribute :designedForOntologyTask, namespace: :omv, type: %i[list uri]
- attribute :hasDomain, namespace: :omv, type: :list, default: ->(s) {ontology_has_domain(s)}
+ attribute :hasDomain, namespace: :omv, type: :list, default: ->(s) { ontology_has_domain(s) }
attribute :coverage, namespace: :dct
attribute :example, namespace: :vann, type: :list
@@ -119,13 +120,13 @@ class OntologySubmission < LinkedData::Models::Base
attribute :pullLocation, type: :uri # URI for pulling ontology
attribute :isFormatOf, namespace: :dct, type: :uri
attribute :hasFormat, namespace: :dct, type: %i[uri list]
- attribute :dataDump, namespace: :void, type: :uri, default: -> (s) {data_dump_default(s)}
- attribute :csvDump, type: :uri, default: -> (s) {csv_dump_default(s)}
- attribute :uriLookupEndpoint, namespace: :void, type: :uri, default: -> (s) {uri_lookup_default(s)}
- attribute :openSearchDescription, namespace: :void, type: :uri, default: -> (s) {open_search_default(s)}
+ attribute :dataDump, namespace: :void, type: :uri, default: -> (s) { data_dump_default(s) }
+ attribute :csvDump, type: :uri, default: -> (s) { csv_dump_default(s) }
+ attribute :uriLookupEndpoint, namespace: :void, type: :uri, default: -> (s) { uri_lookup_default(s) }
+ attribute :openSearchDescription, namespace: :void, type: :uri, default: -> (s) { open_search_default(s) }
attribute :source, namespace: :dct, type: :list
attribute :endpoint, namespace: :sd, type: %i[uri list],
- default: ->(s) {[RDF::URI.new(LinkedData.settings.sparql_endpoint_url)]}
+ default: ->(s) { default_sparql_endpoint(s)}
attribute :includedInDataCatalog, namespace: :schema, type: %i[list uri]
# Relations
@@ -174,13 +175,14 @@ class OntologySubmission < LinkedData::Models::Base
# Link to ontology
attribute :ontology, type: :ontology, enforce: [:existence]
-
def self.agents_attrs
- [:hasCreator, :publisher, :copyrightHolder, :hasContributor,
- :translator, :endorsedBy, :fundedBy, :curatedBy]
+ %i[hasCreator publisher copyrightHolder hasContributor
+ translator endorsedBy fundedBy curatedBy]
end
+
# Hypermedia settings
- embed *[:contact, :ontology, :metrics] + agents_attrs
+ embed *%i[contact ontology metrics] + agents_attrs
+
def self.embed_values_hash
out = {
submissionStatus: [:code], hasOntologyLanguage: [:acronym]
@@ -189,11 +191,11 @@ def self.embed_values_hash
agent_attributes = LinkedData::Models::Agent.goo_attrs_to_load +
[identifiers: LinkedData::Models::AgentIdentifier.goo_attrs_to_load, affiliations: LinkedData::Models::Agent.goo_attrs_to_load]
- agents_attrs.each { |k| out[k] = agent_attributes}
+ agents_attrs.each { |k| out[k] = agent_attributes }
out
end
- embed_values self.embed_values_hash
+ embed_values self.embed_values_hash
serialize_default :contact, :ontology, :hasOntologyLanguage, :released, :creationDate, :homepage,
:publication, :documentation, :version, :description, :status, :submissionId
@@ -213,6 +215,8 @@ def self.embed_values_hash
read_restriction_based_on ->(sub) { sub.ontology }
access_control_load ontology: %i[administeredBy acl viewingRestriction]
+ enable_indexing(:ontology_metadata)
+
def initialize(*args)
super(*args)
@mutex = Mutex.new
@@ -223,7 +227,7 @@ def synchronize(&block)
end
def self.agents_attr_uris
- agents_attrs.map{ |x| self.attribute_uri(x) }
+ agents_attrs.map { |x| self.attribute_uri(x) }
end
def self.ontology_link(m)
@@ -267,12 +271,8 @@ def self.segment_instance(sub)
end
def self.submission_id_generator(ss)
- if !ss.ontology.loaded_attributes.include?(:acronym)
- ss.ontology.bring(:acronym)
- end
- if ss.ontology.acronym.nil?
- raise ArgumentError, "Submission cannot be saved if ontology does not have acronym"
- end
+ ss.ontology.bring(:acronym) if !ss.ontology.loaded_attributes.include?(:acronym)
+ raise ArgumentError, "Submission cannot be saved if ontology does not have acronym" if ss.ontology.acronym.nil?
return RDF::URI.new(
"#{(Goo.id_prefix)}ontologies/#{CGI.escape(ss.ontology.acronym.to_s)}/submissions/#{ss.submissionId.to_s}"
)
@@ -291,9 +291,7 @@ def self.copy_file_repository(acronym, submissionId, src, filename = nil)
dst = File.join([path_to_repo, name])
FileUtils.copy(src, dst)
logger.debug("File created #{dst} | #{"%o" % File.stat(dst).mode} | umask: #{File.umask}") # NCBO-795
- if not File.exist? dst
- raise Exception, "Unable to copy #{src} to #{dst}"
- end
+ raise Exception, "Unable to copy #{src} to #{dst}" if not File.exist? dst
return dst
end
@@ -342,9 +340,7 @@ def sanity_check
rescue Exception => e1
sum_only = nil
- if i == num_calls
- raise $!, "#{$!} after retrying #{i} times...", $!.backtrace
- end
+ raise $!, "#{$!} after retrying #{i} times...", $!.backtrace if i == num_calls
end
end
end
@@ -356,9 +352,7 @@ def sanity_check
return false
elsif self.pullLocation
self.errors[:pullLocation] = ["File at #{self.pullLocation.to_s} does not exist"]
- if self.uploadFilePath.nil?
- return remote_file_exists?(self.pullLocation.to_s)
- end
+ return remote_file_exists?(self.pullLocation.to_s) if self.uploadFilePath.nil?
return true
end
@@ -374,12 +368,10 @@ def sanity_check
self.masterFileName = LinkedData::Utils::FileHelpers.automaster(self.uploadFilePath, self.hasOntologyLanguage.file_extension)
return true
elsif zip and self.masterFileName.nil?
- #zip and masterFileName not set. The user has to choose.
- if self.errors[:uploadFilePath].nil?
- self.errors[:uploadFilePath] = []
- end
+ # zip and masterFileName not set. The user has to choose.
+ self.errors[:uploadFilePath] = [] if self.errors[:uploadFilePath].nil?
- #check for duplicated names
+ # check for duplicated names
repeated_names = LinkedData::Utils::FileHelpers.repeated_names_in_file_list(files)
if repeated_names.length > 0
names = repeated_names.keys.to_s
@@ -388,13 +380,13 @@ def sanity_check
return false
end
- #error message with options to choose from.
+ # error message with options to choose from.
self.errors[:uploadFilePath] << {
:message => "Zip file detected, choose the master file.", :options => files }
return false
elsif zip and not self.masterFileName.nil?
- #if zip and the user chose a file then we make sure the file is in the list.
+ # if zip and the user chose a file then we make sure the file is in the list.
files = LinkedData::Utils::FileHelpers.files_from_zip(self.uploadFilePath)
if not files.include? self.masterFileName
if self.errors[:uploadFilePath].nil?
@@ -460,9 +452,7 @@ def unzip_submission(logger)
if zipped?
zip_dst = self.zip_folder
- if Dir.exist? zip_dst
- FileUtils.rm_r [zip_dst]
- end
+ FileUtils.rm_r [zip_dst] if Dir.exist? zip_dst
FileUtils.mkdir_p zip_dst
extracted = LinkedData::Utils::FileHelpers.unzip(self.uploadFilePath, zip_dst)
@@ -514,9 +504,7 @@ def class_count(logger = nil)
unless mx.empty?
count = mx[1][0].to_i
- if self.hasOntologyLanguage.skos?
- count += mx[1][1].to_i
- end
+ count += mx[1][1].to_i if self.hasOntologyLanguage.skos?
count_set = true
end
end
@@ -547,7 +535,7 @@ def add_submission_status(status)
valid = status.is_a?(LinkedData::Models::SubmissionStatus)
raise ArgumentError, "The status being added is not SubmissionStatus object" unless valid
- #archive removes the other status
+ # archive removes the other status
if status.archived?
self.submissionStatus = [status]
return self.submissionStatus
@@ -559,7 +547,9 @@ def add_submission_status(status)
if (status.error?)
# remove the corresponding non_error status (if exists)
non_error_status = status.get_non_error_status()
- s.reject! { |stat| stat.get_code_from_id() == non_error_status.get_code_from_id() } unless non_error_status.nil?
+ unless non_error_status.nil?
+ s.reject! { |stat| stat.get_code_from_id() == non_error_status.get_code_from_id() }
+ end
else
# remove the corresponding non_error status (if exists)
error_status = status.get_error_status()
@@ -624,7 +614,7 @@ def archived?
end
# Override delete to add removal from the search index
- #TODO: revise this with a better process
+ # TODO: revise this with a better process
def delete(*args)
options = {}
args.each { |e| options.merge!(e) if e.is_a?(Hash) }
@@ -632,8 +622,7 @@ def delete(*args)
index_commit = options[:index_commit] == false ? false : true
super(*args)
- self.ontology.unindex(index_commit)
- self.ontology.unindex_properties(index_commit)
+ self.ontology.unindex_all_data(index_commit)
self.bring(:metrics) if self.bring?(:metrics)
self.metrics.delete if self.metrics
@@ -646,7 +635,7 @@ def delete(*args)
prev_sub = self.ontology.latest_submission
if prev_sub
- prev_sub.index(LinkedData::Parser.logger || Logger.new($stderr))
+ prev_sub.index_terms(LinkedData::Parser.logger || Logger.new($stderr))
prev_sub.index_properties(LinkedData::Parser.logger || Logger.new($stderr))
end
end
@@ -726,15 +715,11 @@ def roots(extra_include = [], page = nil, pagesize = nil, concept_schemes: [], c
load_children = [:children]
end
- if extra_include.length > 0
- where.include(extra_include)
- end
+ where.include(extra_include) if extra_include.length > 0
end
where.all
- if load_children.length > 0
- LinkedData::Models::Class.partially_load_children(classes, 99, self)
- end
+ LinkedData::Models::Class.partially_load_children(classes, 99, self) if load_children.length > 0
classes.delete_if { |c|
obs = !c.obsolete.nil? && c.obsolete == true
@@ -872,9 +857,7 @@ def check_ftp_file(uri)
def self.loom_transform_literal(lit)
res = []
lit.each_char do |c|
- if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
- res << c.downcase
- end
+ res << c.downcase if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
end
return res.join ''
end
diff --git a/lib/ontologies_linked_data/models/properties/annotation_property.rb b/lib/ontologies_linked_data/models/properties/annotation_property.rb
index b071d09f..783e7021 100644
--- a/lib/ontologies_linked_data/models/properties/annotation_property.rb
+++ b/lib/ontologies_linked_data/models/properties/annotation_property.rb
@@ -34,6 +34,10 @@ class AnnotationProperty < LinkedData::Models::OntologyProperty
LinkedData::Hypermedia::Link.new("ancestors", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/ancestors"}, self.uri_type),
LinkedData::Hypermedia::Link.new("descendants", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/descendants"}, self.uri_type),
LinkedData::Hypermedia::Link.new("tree", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/tree"}, self.uri_type)
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
end
end
diff --git a/lib/ontologies_linked_data/models/properties/datatype_property.rb b/lib/ontologies_linked_data/models/properties/datatype_property.rb
index 1974bdb2..13d7b431 100644
--- a/lib/ontologies_linked_data/models/properties/datatype_property.rb
+++ b/lib/ontologies_linked_data/models/properties/datatype_property.rb
@@ -34,6 +34,10 @@ class DatatypeProperty < LinkedData::Models::OntologyProperty
LinkedData::Hypermedia::Link.new("ancestors", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/ancestors"}, self.uri_type),
LinkedData::Hypermedia::Link.new("descendants", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/descendants"}, self.uri_type),
LinkedData::Hypermedia::Link.new("tree", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/tree"}, self.uri_type)
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
end
end
diff --git a/lib/ontologies_linked_data/models/properties/object_property.rb b/lib/ontologies_linked_data/models/properties/object_property.rb
index 8abbc52f..0a85f2da 100644
--- a/lib/ontologies_linked_data/models/properties/object_property.rb
+++ b/lib/ontologies_linked_data/models/properties/object_property.rb
@@ -34,6 +34,10 @@ class ObjectProperty < LinkedData::Models::OntologyProperty
LinkedData::Hypermedia::Link.new("ancestors", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/ancestors"}, self.uri_type),
LinkedData::Hypermedia::Link.new("descendants", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/descendants"}, self.uri_type),
LinkedData::Hypermedia::Link.new("tree", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/tree"}, self.uri_type)
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
end
end
diff --git a/lib/ontologies_linked_data/models/properties/ontology_property.rb b/lib/ontologies_linked_data/models/properties/ontology_property.rb
index 1e9ced84..ac2c1499 100644
--- a/lib/ontologies_linked_data/models/properties/ontology_property.rb
+++ b/lib/ontologies_linked_data/models/properties/ontology_property.rb
@@ -3,6 +3,36 @@ module LinkedData
module Models
class OntologyProperty < LinkedData::Models::Base
+ model :ontology_property, name_with: ->(p) { uuid_uri_generator(p) }
+
+
+ def self.index_schema(schema_generator)
+ schema_generator.add_field(:label, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:labelGenerated, 'text_general', indexed: true, stored: true, multi_valued: true)
+
+ schema_generator.add_field(:definition, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:submissionAcronym, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:parents, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:ontologyType, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:propertyType, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:ontologyId, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:submissionId, 'pint', indexed: true, stored: true, multi_valued: false)
+
+ %i[label labelGenerated].each do |field|
+ schema_generator.add_copy_field(field, '_text_')
+ schema_generator.add_copy_field(field, "#{field}Exact")
+ schema_generator.add_copy_field(field, "#{field}Suggest")
+ schema_generator.add_copy_field(field, "#{field}SuggestEdge")
+ schema_generator.add_copy_field(field, "#{field}SuggestNgram")
+ end
+ end
+
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
+
+
def retrieve_ancestors
retrieve_ancestors_descendants(:ancestors)
@@ -234,7 +264,7 @@ def index_doc(to_set=nil)
}
all_attrs = self.to_hash
- std = [:id, :label, :definition, :parents]
+ std = %i[id label definition parents]
std.each do |att|
cur_val = all_attrs[att]
@@ -288,7 +318,7 @@ def traverse_path_to_root(parents, paths, path_i, tree=false, top_property=nil)
rec_i = recursions[i]
path = paths[rec_i]
p = path.last
- p.bring(parents: [:label, :definition]) if p.bring?(:parents)
+ p.bring(parents: %i[label definition]) if p.bring?(:parents)
unless p.loaded_attributes.include?(:parents)
# fail safely
@@ -313,7 +343,7 @@ def self.ontology_link(m)
end
def self.partially_load_children(models, threshold, submission)
- ld = [:label, :definition]
+ ld = %i[label definition]
single_load = []
query = self.in(submission).models(models)
query.aggregate(:count, :children).all
diff --git a/lib/ontologies_linked_data/models/provisional_class.rb b/lib/ontologies_linked_data/models/provisional_class.rb
index b6c1a79e..1f4b06c4 100644
--- a/lib/ontologies_linked_data/models/provisional_class.rb
+++ b/lib/ontologies_linked_data/models/provisional_class.rb
@@ -38,6 +38,10 @@ class ProvisionalClass < LinkedData::Models::Base
end
}, Goo.vocabulary["Ontology"])
+ enable_indexing(:term_search_core1) do |schema_generator|
+ Class.index_schema(schema_generator)
+ end
+
def index_id()
self.bring(:ontology) if self.bring?(:ontology)
return nil unless self.ontology
@@ -141,38 +145,6 @@ def append_if_not_there_already(path, r)
true
end
- def index()
- if index_id
- unindex
- super
- LinkedData::Models::Ontology.indexCommit
- end
- end
-
- def unindex()
- ind_id = index_id
-
- if ind_id
- query = "id:#{solr_escape(ind_id)}"
- LinkedData::Models::Ontology.unindexByQuery(query)
- LinkedData::Models::Ontology.indexCommit
- end
- end
-
- ##
- # Override save to allow indexing
- def save(*args)
- super(*args)
- index
- self
- end
-
- def delete(*args)
- # remove index entries
- unindex
- super(*args)
- end
-
def solr_escape(text)
RSolr.solr_escape(text).gsub(/\s+/,"\\ ")
end
diff --git a/lib/ontologies_linked_data/models/resource.rb b/lib/ontologies_linked_data/models/resource.rb
new file mode 100644
index 00000000..9bccc785
--- /dev/null
+++ b/lib/ontologies_linked_data/models/resource.rb
@@ -0,0 +1,187 @@
+require 'rdf/raptor'
+
+module LinkedData
+ module Models
+
+ class Resource
+
+ def initialize(graph, id)
+ @id = id
+ @graph = graph
+ @hash = fetch_related_triples(graph, id)
+ end
+
+ def to_hash
+ @hash.dup
+ end
+
+ def to_object
+ hashes = self.to_hash
+ class_name = "GeneratedModel_#{Time.now.to_i}_#{rand(10000..99999)}"
+ model_schema = ::Class.new(LinkedData::Models::Base)
+ Object.const_set(class_name, model_schema)
+
+ model_schema.model(:resource, name_with: :id, rdf_type: lambda { |*_x| self.to_hash[Goo.namespaces[:rdf][:type].to_s] })
+ values_hash = {}
+ hashes.each do |predicate, value|
+ namespace, attr = namespace_predicate(predicate)
+ next if namespace.nil?
+
+ values = Array(value).map do |v|
+ if v.is_a?(Hash)
+ Struct.new(*v.keys.map { |k| namespace_predicate(k)[1].to_sym }.compact).new(*v.values)
+ else
+ v.is_a?(RDF::URI) ? v.to_s : v.object
+ end
+ end.compact
+
+ model_schema.attribute(attr.to_sym, property: namespace.to_s, enforce: get_type(value))
+ values_hash[attr.to_sym] = value.is_a?(Array) ? values : values.first
+ end
+
+ values_hash[:id] = hashes['id']
+ model_schema.new(values_hash)
+ end
+
+ def to_json
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::JSONLD, namespaces)
+ end
+
+ def to_xml
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::RDF_XML, namespaces)
+ end
+
+ def to_ntriples
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::NTRIPLES, namespaces)
+ end
+
+ def to_turtle
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::TURTLE, namespaces)
+ end
+
+ def namespaces
+ prefixes = {}
+ ns_count = 0
+ hash = to_hash
+ reverse = hash.delete('reverse')
+
+ hash.each do |key, value|
+ uris = [key]
+ uris += Array(value).map { |v| v.is_a?(Hash) ? v.to_a.flatten : v }.flatten
+ prefixes, ns_count = transform_to_prefixes(ns_count, prefixes, uris)
+ end
+
+ reverse.each { |key, uris| prefixes, ns_count = transform_to_prefixes(ns_count, prefixes, [key] + Array(uris)) }
+
+ prefixes
+ end
+
+ private
+
+ def transform_to_prefixes(ns_count, prefixes, uris)
+ uris.each do |uri|
+ namespace, id = namespace_predicate(uri)
+ next if namespace.nil? || prefixes.value?(namespace)
+
+ prefix, prefix_namespace = Goo.namespaces.select { |_k, v| v.to_s.eql?(namespace) }.first
+ if prefix
+ prefixes[prefix] = prefix_namespace.to_s
+ else
+ prefixes["ns#{ns_count}".to_sym] = namespace
+ ns_count += 1
+ end
+ end
+ [prefixes, ns_count]
+ end
+
+ def fetch_related_triples(graph, id)
+ direct_fetch_query = Goo.sparql_query_client.select(:predicate, :object)
+ .from(RDF::URI.new(graph))
+ .where([RDF::URI.new(id), :predicate, :object])
+
+ inverse_fetch_query = Goo.sparql_query_client.select(:subject, :predicate)
+ .from(RDF::URI.new(graph))
+ .where([:subject, :predicate, RDF::URI.new(id)])
+
+ hashes = { 'id' => RDF::URI.new(id) }
+
+ direct_fetch_query.each_solution do |solution|
+ predicate = solution[:predicate].to_s
+ value = solution[:object]
+
+ if value.is_a?(RDF::Node) && Array(hashes[predicate]).none? { |x| x.is_a?(Hash) }
+ value = fetch_b_nodes_triples(graph, id, solution[:predicate])
+ elsif value.is_a?(RDF::Node)
+ next
+ end
+
+ hashes[predicate] = hashes[predicate] ? (Array(hashes[predicate]) + Array(value)) : value
+ end
+
+ hashes['reverse'] = {}
+ inverse_fetch_query.each_solution do |solution|
+ subject = solution[:subject].to_s
+ predicate = solution[:predicate]
+
+ if hashes['reverse'][subject]
+ if hashes['reverse'][subject].is_a?(Array)
+ hashes['reverse'][subject] << predicate
+ else
+ hashes['reverse'][subject] = [predicate, hashes['reverse'][subject]]
+ end
+ else
+ hashes['reverse'][subject] = predicate
+ end
+
+ end
+
+ hashes
+ end
+
+ def fetch_b_nodes_triples(graph, id, predicate)
+ b_node_fetch_query = Goo.sparql_query_client.select(:b, :predicate, :object)
+ .from(RDF::URI.new(graph))
+ .where(
+ [RDF::URI.new(id), predicate, :b],
+ %i[b predicate object]
+ )
+
+ b_nodes_hash = {}
+ b_node_fetch_query.each_solution do |s|
+ b_node_id = s[:b].to_s
+ s[:predicate].to_s
+ s[:object]
+ if b_nodes_hash[b_node_id]
+ b_nodes_hash[b_node_id][s[:predicate].to_s] = s[:object]
+ else
+ b_nodes_hash[b_node_id] = { s[:predicate].to_s => s[:object] }
+ end
+ end
+ b_nodes_hash.values
+ end
+
+ def get_type(value)
+ types = []
+ types << :list if value.is_a?(Array)
+ value = Array(value).first
+ if value.is_a?(RDF::URI)
+ types << :uri
+ elsif value.is_a?(Float)
+ types << :float
+ elsif value.is_a?(Integer)
+ types << :integer
+ elsif value.to_s.eql?('true') || value.to_s.eql?('false')
+ types << :boolean
+ end
+ types
+ end
+
+ def namespace_predicate(property_url)
+ regex = /^(?.*[\/#])(?[^\/#]+)$/
+ match = regex.match(property_url.to_s)
+ [match[:namespace], match[:id]] if match
+ end
+
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/models/users/user.rb b/lib/ontologies_linked_data/models/users/user.rb
index ec99a45c..470bbc49 100644
--- a/lib/ontologies_linked_data/models/users/user.rb
+++ b/lib/ontologies_linked_data/models/users/user.rb
@@ -57,6 +57,10 @@ def self.show_apikey?(inst)
end
end
+ def embedded_doc
+ "#{self.firstName} #{self.lastName} #{self.username}"
+ end
+
def initialize(attributes = {})
# Don't allow passwordHash to be set here
attributes.delete(:passwordHash)
diff --git a/lib/ontologies_linked_data/sample_data/ontology.rb b/lib/ontologies_linked_data/sample_data/ontology.rb
index 0528e238..61dcc04d 100644
--- a/lib/ontologies_linked_data/sample_data/ontology.rb
+++ b/lib/ontologies_linked_data/sample_data/ontology.rb
@@ -44,7 +44,7 @@ def self.create_ontologies_and_submissions(options = {})
o = LinkedData::Models::Ontology.new({
acronym: acronym_count,
- name: name || "#{acronym_count} Ontology",
+ name: name ? "#{name}#{count > 0 ? count : ''}" : "#{acronym_count} Ontology",
administeredBy: [u],
summaryOnly: false,
ontologyType: ontology_type
diff --git a/lib/ontologies_linked_data/security/authorization.rb b/lib/ontologies_linked_data/security/authorization.rb
index 05a02789..167ac4f4 100644
--- a/lib/ontologies_linked_data/security/authorization.rb
+++ b/lib/ontologies_linked_data/security/authorization.rb
@@ -1,6 +1,5 @@
require 'set'
-
module LinkedData
module Security
class Authorization
@@ -8,7 +7,6 @@ class Authorization
USER_APIKEY_PARAM = 'userapikey'.freeze
API_KEY_PARAM = 'apikey'.freeze
-
def initialize(app = nil)
@app = app
end
@@ -23,12 +21,11 @@ def initialize(app = nil)
def call(env)
req = Rack::Request.new(env)
params = req.params
-
+
apikey = find_apikey(env, params)
status = 200
error_message = ''
-
if !apikey
status = 401
error_message = <<-MESSAGE
@@ -48,6 +45,9 @@ def call(env)
if status.eql?(401) && !bypass?(env)
LinkedData::Serializer.build_response(env, status: status, body: response)
else
+ # unfrozen params so that they can be encoded by Rack using occurring after updating the gem RDF to v3.0
+ env["rack.request.form_hash"]&.transform_values!(&:dup)
+ env["rack.request.query_hash"]&.transform_values!(&:dup)
status, headers, response = @app.call(env)
save_apikey_in_cookie(env, headers, apikey, params)
[status, headers, response]
@@ -64,6 +64,7 @@ def bypass?(env)
##
# Inject a cookie with the API Key if it is present and we're in HTML content type
COOKIE_APIKEY_PARAM = "ncbo_apikey"
+
def save_apikey_in_cookie(env, headers, apikey, params)
# If we're using HTML, inject the apikey in a cookie (ignores bad accept headers)
best = nil
@@ -95,7 +96,6 @@ def find_apikey(env, params)
cookie_apikey(env)
end
-
def authorized?(apikey, env)
return false if apikey.nil?
@@ -103,8 +103,8 @@ def authorized?(apikey, env)
store_user(APIKEYS_FOR_AUTHORIZATION[apikey], env)
else
user = LinkedData::Models::User.where(apikey: apikey)
- .include(LinkedData::Models::User.attributes(:all))
- .first
+ .include(LinkedData::Models::User.attributes(:all))
+ .first
return false if user.nil?
# This will kind-of break if multiple apikeys exist
@@ -122,7 +122,6 @@ def store_user(user, env)
private
-
def request_header_apikey(env)
header_auth = get_header_auth(env)
return if header_auth.empty?
@@ -151,7 +150,7 @@ def get_header_auth(env)
env["HTTP_AUTHORIZATION"] || env["Authorization"] || ''
end
- def user_apikey(env,params)
+ def user_apikey(env, params)
return unless (params["apikey"] && params["userapikey"])
apikey_authed = authorized?(params[API_KEY_PARAM], env)
diff --git a/lib/ontologies_linked_data/serializers/jsonld.rb b/lib/ontologies_linked_data/serializers/jsonld.rb
new file mode 100644
index 00000000..22e6b7d6
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/jsonld.rb
@@ -0,0 +1,40 @@
+require 'multi_json'
+require 'json/ld'
+
+module LinkedData
+ module Serializers
+ class JSONLD
+
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes['id'])
+ reverse = hashes['reverse'] || {}
+ hashes.delete('id')
+ hashes.delete('reverse')
+ graph = RDF::Graph.new
+
+ hashes.each do |property_url, val|
+ Array(val).each do |v|
+ if v.is_a?(Hash)
+ blank_node = RDF::Node.new
+ v.each do |blank_predicate, blank_value|
+ graph << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ v = blank_node
+ end
+ graph << RDF::Statement.new(subject, RDF::URI.new(property_url), v)
+ end
+ end
+
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ graph << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+
+ context = { '@context' => options.transform_keys(&:to_s) }
+ compacted = ::JSON::LD::API.compact(::JSON::LD::API.fromRdf(graph), context['@context'])
+ MultiJson.dump(compacted)
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/ntriples.rb b/lib/ontologies_linked_data/serializers/ntriples.rb
new file mode 100644
index 00000000..c96795a7
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/ntriples.rb
@@ -0,0 +1,37 @@
+module LinkedData
+ module Serializers
+ class NTRIPLES
+
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes['id'])
+ reverse = hashes['reverse'] || {}
+ hashes.delete('id')
+ hashes.delete('reverse')
+ RDF::Writer.for(:ntriples).buffer(prefixes: options) do |writer|
+ hashes.each do |p, o|
+ predicate = RDF::URI.new(p)
+ Array(o).each do |item|
+ if item.is_a?(Hash)
+ blank_node = RDF::Node.new
+ item.each do |blank_predicate, blank_value|
+ writer << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ item = blank_node
+ end
+ writer << RDF::Statement.new(subject, predicate, item)
+ end
+ end
+
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ writer << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+ end
+ end
+
+ end
+ end
+end
+
+
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/rdf_xml.rb b/lib/ontologies_linked_data/serializers/rdf_xml.rb
new file mode 100644
index 00000000..e06590f0
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/rdf_xml.rb
@@ -0,0 +1,43 @@
+module LinkedData
+ module Serializers
+ class RDF_XML
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes["id"])
+ reverse = hashes["reverse"] || {}
+ hashes.delete("id")
+ hashes.delete("reverse")
+ graph = RDF::Graph.new
+
+ hashes.each do |property_url, val|
+ Array(val).each do |v|
+ if v.is_a?(Hash)
+ blank_node = RDF::Node.new
+ v.each do |blank_predicate, blank_value|
+ graph << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ v = blank_node
+ end
+ graph << RDF::Statement.new(subject, RDF::URI.new(property_url), v)
+ end
+ end
+
+ inverse_graph = RDF::Graph.new
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ inverse_graph << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+
+ a = RDF::RDFXML::Writer.buffer(prefixes: options) do |writer|
+ writer << graph
+ end
+
+ b = RDF::RDFXML::Writer.buffer(prefixes: options) do |writer|
+ writer << inverse_graph
+ end
+ xml_result = "#{a.chomp("\n")}\n#{b.sub!(/^<\?xml[^>]*>\n]*>/, '').gsub(/^$\n/, '')}"
+ xml_result.gsub(/^$\n/, '')
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/serializers.rb b/lib/ontologies_linked_data/serializers/serializers.rb
index b6006280..1603c1db 100644
--- a/lib/ontologies_linked_data/serializers/serializers.rb
+++ b/lib/ontologies_linked_data/serializers/serializers.rb
@@ -1,8 +1,12 @@
require 'ontologies_linked_data/media_types'
require 'ontologies_linked_data/serializers/xml'
+require 'ontologies_linked_data/serializers/rdf_xml'
require 'ontologies_linked_data/serializers/json'
require 'ontologies_linked_data/serializers/jsonp'
+require 'ontologies_linked_data/serializers/jsonld'
require 'ontologies_linked_data/serializers/html'
+require 'ontologies_linked_data/serializers/ntriples'
+require 'ontologies_linked_data/serializers/turtle'
module LinkedData
module Serializers
@@ -10,17 +14,15 @@ def self.serialize(obj, type, options = {})
SERIALIZERS[type].serialize(obj, options)
end
- class Turtle
- def self.serialize(obj, options)
- end
- end
-
SERIALIZERS = {
LinkedData::MediaTypes::HTML => HTML,
LinkedData::MediaTypes::JSON => JSON,
LinkedData::MediaTypes::JSONP => JSONP,
+ LinkedData::MediaTypes::JSONLD => JSONLD,
LinkedData::MediaTypes::XML => XML,
- LinkedData::MediaTypes::TURTLE => JSON
+ LinkedData::MediaTypes::RDF_XML => RDF_XML,
+ LinkedData::MediaTypes::TURTLE => TURTLE,
+ LinkedData::MediaTypes::NTRIPLES => NTRIPLES
}
end
end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/turtle.rb b/lib/ontologies_linked_data/serializers/turtle.rb
new file mode 100644
index 00000000..b0cc9ecf
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/turtle.rb
@@ -0,0 +1,38 @@
+module LinkedData
+ module Serializers
+ class TURTLE
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes['id'])
+ reverse = hashes['reverse'] || {}
+ hashes.delete('id')
+ hashes.delete('reverse')
+ options.delete(:rdf)
+
+ RDF::Writer.for(:turtle).buffer(prefixes: options) do |writer|
+ hashes.each do |p, o|
+ predicate = RDF::URI.new(p)
+ Array(o).each do |item|
+ if item.is_a?(Hash)
+ blank_node = RDF::Node.new
+ item.each do |blank_predicate, blank_value|
+ writer << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ item = blank_node
+ end
+ writer << RDF::Statement.new(subject, predicate, item)
+ end
+ end
+
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ writer << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+
+ end
+ end
+ end
+ end
+end
+
+
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/services/submission_process/submission_processor.rb b/lib/ontologies_linked_data/services/submission_process/submission_processor.rb
index 436a5d43..da6c1881 100644
--- a/lib/ontologies_linked_data/services/submission_process/submission_processor.rb
+++ b/lib/ontologies_linked_data/services/submission_process/submission_processor.rb
@@ -48,7 +48,7 @@ def process_submission(logger, options = {})
cannot be indexed because it has not been successfully parsed"
end
- @submission.index(logger, commit: process_index_commit?(options))
+ @submission.index_terms(logger, commit: process_index_commit?(options))
end
if process_index_properties?(options)
diff --git a/rakelib/docker_based_test.rake b/rakelib/docker_based_test.rake
index d9b334f4..52af504c 100644
--- a/rakelib/docker_based_test.rake
+++ b/rakelib/docker_based_test.rake
@@ -5,6 +5,19 @@ namespace :test do
namespace :docker do
task :up do
system("docker compose up -d") || abort("Unable to start docker containers")
+ unless system("curl -sf http://localhost:8983/solr || exit 1")
+ printf("waiting for Solr container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:8983/solr || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ abort(" Solr container hasn't initialized properly")
+ end
+ end
+ printf("\n")
+ end
end
task :down do
#system("docker compose --profile fs --profile ag stop")
diff --git a/test/models/test_class_request_lang.rb b/test/models/test_class_request_lang.rb
index a07eeffd..d4713a89 100644
--- a/test/models/test_class_request_lang.rb
+++ b/test/models/test_class_request_lang.rb
@@ -47,6 +47,10 @@ def test_requested_language_found
assert_equal ['industrial development'], properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s)
assert_equal ['industrialization'], properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s)
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_13078',
+ requested_lang: :FR)
+ assert_equal 'carbone renouvelable', cls.prefLabel
+
end
def test_requested_language_not_found
diff --git a/test/models/test_ontology_submission.rb b/test/models/test_ontology_submission.rb
index 1351159d..6f6d49ec 100644
--- a/test/models/test_ontology_submission.rb
+++ b/test/models/test_ontology_submission.rb
@@ -358,7 +358,7 @@ def test_process_submission_archive
assert File.file?(File.join(latest_sub.data_folder, 'owlapi.xrdf')),
%-Missing ontology submission file: 'owlapi.xrdf'-
- assert File.file?(latest_sub.csv_path),
+ refute File.file?(latest_sub.csv_path),
%-Missing ontology submission file: '#{latest_sub.csv_path}'-
assert File.file?(latest_sub.parsing_log_path),
@@ -423,7 +423,7 @@ def test_index_properties
submission_parse("BRO", "BRO Ontology",
"./test/data/ontology_files/BRO_v3.5.owl", 1,
process_rdf: true, extract_metadata: false, index_properties: true)
- res = LinkedData::Models::Class.search("*:*", {:fq => "submissionAcronym:\"BRO\"", :start => 0, :rows => 80}, :property)
+ res = LinkedData::Models::OntologyProperty.search("*:*", {:fq => "submissionAcronym:\"BRO\"", :start => 0, :rows => 80})
assert_includes [81, 52] , res["response"]["numFound"] # if 81 if owlapi import skos properties
found = 0
@@ -452,7 +452,7 @@ def test_index_properties
ont = LinkedData::Models::Ontology.find('BRO').first
ont.unindex_properties(true)
- res = LinkedData::Models::Class.search("*:*", {:fq => "submissionAcronym:\"BRO\""}, :property)
+ res = LinkedData::Models::OntologyProperty.search("*:*", {:fq => "submissionAcronym:\"BRO\""})
assert_equal 0, res["response"]["numFound"]
end
@@ -464,30 +464,31 @@ def test_index_multilingual
index_search: true)
- res = LinkedData::Models::Class.search("prefLabel:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ res = LinkedData::Models::Class.search("prefLabel:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
refute_equal 0, res["response"]["numFound"]
doc = res["response"]["docs"].select{|doc| doc["resource_id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
refute_nil doc
- assert_equal 30, doc.keys.select{|k| k['prefLabel'] || k['synonym']}.size # test that all the languages are indexed
+ #binding.pry
+ #assert_equal 30, doc.keys.select{|k| k['prefLabel'] || k['synonym']}.size # test that all the languages are indexed
- res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
refute_equal 0, res["response"]["numFound"]
refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
- res = LinkedData::Models::Class.search("prefLabel_fr:Activité", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ res = LinkedData::Models::Class.search("prefLabel_fr:Activité", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
refute_equal 0, res["response"]["numFound"]
refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
- res = LinkedData::Models::Class.search("prefLabel_en:ActivityEnglish", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ res = LinkedData::Models::Class.search("prefLabel_en:ActivityEnglish", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
refute_equal 0, res["response"]["numFound"]
refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
- res = LinkedData::Models::Class.search("prefLabel_fr:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80}, :main)
+ res = LinkedData::Models::Class.search("prefLabel_fr:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
assert_equal 0, res["response"]["numFound"]
end
diff --git a/test/models/test_provisional_class.rb b/test/models/test_provisional_class.rb
index ffcee4ce..c0ab44cf 100644
--- a/test/models/test_provisional_class.rb
+++ b/test/models/test_provisional_class.rb
@@ -21,8 +21,8 @@ def self.after_suite
pc = LinkedData::Models::ProvisionalClass.find(@@provisional_class.id).first
pc.delete unless pc.nil?
- LinkedData::Models::Ontology.indexClear
- LinkedData::Models::Ontology.indexCommit
+ LinkedData::Models::ProvisionalClass.indexClear
+ LinkedData::Models::ProvisionalClass.indexCommit
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
user = LinkedData::Models::User.find("Test User").first
user.delete unless user.nil?
@@ -285,11 +285,11 @@ def test_provisional_class_search_indexing
pc = @@provisional_class
pc.ontology = @@ontology
pc.unindex
- resp = LinkedData::Models::Ontology.search("\"#{pc.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc.label}\"", params)
assert_equal 0, resp["response"]["numFound"]
pc.index
- resp = LinkedData::Models::Ontology.search("\"#{pc.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc.label}\"", params)
assert_equal 1, resp["response"]["numFound"]
assert_equal pc.label, resp["response"]["docs"][0]["prefLabel"].first
pc.unindex
@@ -312,18 +312,18 @@ def test_provisional_class_search_indexing
pc3.save
pc3 = LinkedData::Models::ProvisionalClass.find(pc3.id).include(:label).first
- resp = LinkedData::Models::Ontology.search("\"#{pc1.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc1.label}\"", params)
assert_equal 1, resp["response"]["numFound"]
assert_equal pc1.label, resp["response"]["docs"][0]["prefLabel"].first
par_len = resp["response"]["docs"][0]["parents"].length
assert_equal 5, par_len
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == class_id.to_s }).length
- resp = LinkedData::Models::Ontology.search("\"#{pc2.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc2.label}\"", params)
assert_equal par_len + 1, resp["response"]["docs"][0]["parents"].length
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == pc1.id.to_s }).length
- resp = LinkedData::Models::Ontology.search("\"#{pc3.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc3.label}\"", params)
assert_equal par_len + 2, resp["response"]["docs"][0]["parents"].length
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == pc1.id.to_s }).length
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == pc2.id.to_s }).length
diff --git a/test/models/test_resource.rb b/test/models/test_resource.rb
new file mode 100644
index 00000000..b409ddb1
--- /dev/null
+++ b/test/models/test_resource.rb
@@ -0,0 +1,292 @@
+require_relative "../test_case"
+require_relative './test_ontology_common'
+
+class TestResource < LinkedData::TestOntologyCommon
+
+ def self.before_suite
+ LinkedData::TestCase.backend_4s_delete
+
+ # Example
+ data = %(
+ .
+ "John Doe" .
+ "30"^^ .
+ "male" .
+ .
+ .
+ _:blanknode1 .
+ _:blanknode2 .
+ _:blanknode1 "Jane Smith" .
+ _:blanknode1 "25"^^ .
+ _:blanknode1 "female" .
+ _:blanknode1 .
+ _:blanknode2 "Jane Smith 2" .
+ "Hiking" .
+ "Cooking" .
+
+ .
+ "Alice Cooper" .
+ "35"^^ .
+ "female" .
+ .
+ _:skill1, _:skill2 .
+ _:skill1 "Programming" .
+ _:skill1 _:skill2 .
+ _:skill2 "Data Analysis" .
+ _:skill2 .
+ "Hiking" .
+ "Cooking" .
+ "Photography" .
+
+ .
+ .
+ .
+
+ )
+
+ graph = "http://example.org/test_graph"
+ Goo.sparql_data_client.execute_append_request(graph, data, '')
+
+ # instance the resource model
+ @@resource1 = LinkedData::Models::Resource.new("http://example.org/test_graph", "http://example.org/person1")
+ end
+
+ def self.after_suite
+ Goo.sparql_data_client.delete_graph("http://example.org/test_graph")
+ Goo.sparql_data_client.delete_graph("http://data.bioontology.org/ontologies/TEST-TRIPLES/submissions/2")
+ @resource1&.destroy
+ end
+
+ def test_generate_model
+ @object = @@resource1.to_object
+ @model = @object.class
+
+ assert_equal LinkedData::Models::Base, @model.ancestors[1]
+
+ @model.model_settings[:attributes].map do |property, val|
+ property_url = "#{val[:property]}#{property}"
+ assert_includes @@resource1.to_hash.keys, property_url
+
+ hash_value = @@resource1.to_hash[property_url]
+ object_value = @object.send(property.to_sym)
+ if property.to_sym == :knows
+ assert_equal hash_value.map{|x| x.is_a?(Hash) ? x.values : x}.flatten.map(&:to_s).sort,
+ object_value.map{|x| x.is_a?(String) ? x : x.to_h.values}.flatten.map(&:to_s).sort
+ else
+ assert_equal Array(hash_value).map(&:to_s), Array(object_value).map(&:to_s)
+ end
+ end
+
+ assert_equal "http://example.org/person1", @object.id.to_s
+
+ assert_equal Goo.namespaces[:foaf][:Person].to_s, @model.type_uri.to_s
+ end
+
+ def test_resource_fetch_related_triples
+ result = @@resource1.to_hash
+ assert_instance_of Hash, result
+
+ refute_empty result
+
+ expected_result = {
+ "id" => "http://example.org/person1",
+ "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" => "http://xmlns.com/foaf/0.1/Person",
+ "http://xmlns.com/foaf/0.1/gender" => "male",
+ "http://xmlns.com/foaf/0.1/hasInterest" => %w[Cooking Hiking],
+ "http://xmlns.com/foaf/0.1/age" => "30",
+ "http://xmlns.com/foaf/0.1/email" => "mailto:john@example.com",
+ "http://xmlns.com/foaf/0.1/knows" =>
+ ["http://example.org/person3",
+ {
+ "http://xmlns.com/foaf/0.1/gender" => "female",
+ "http://xmlns.com/foaf/0.1/age" => "25",
+ "http://xmlns.com/foaf/0.1/email" => "mailto:jane@example.com",
+ "http://xmlns.com/foaf/0.1/name" => "Jane Smith"
+ },
+ {
+ "http://xmlns.com/foaf/0.1/name" => "Jane Smith 2"
+ }
+ ],
+ "http://xmlns.com/foaf/0.1/name" => "John Doe",
+ "reverse" => {
+ "http://example2.org/person2" => "http://xmlns.com/foaf/0.1/mother",
+ "http://example2.org/person5" => ["http://xmlns.com/foaf/0.1/brother", "http://xmlns.com/foaf/0.1/friend"]
+ }
+ }
+ result = JSON.parse(MultiJson.dump(result))
+ a = sort_nested_hash(result)
+ b = sort_nested_hash(expected_result)
+ assert_equal b, a
+ end
+
+ def test_resource_serialization_json
+ result = @@resource1.to_json
+
+ refute_empty result
+ expected_result = %(
+ {
+ "@context": {"ns0": "http://example.org/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "foaf": "http://xmlns.com/foaf/0.1/", "ns1": "http://example2.org/"},
+ "@graph": [
+ {
+ "@id": "ns0:person1",
+ "@type": "foaf:Person",
+ "foaf:name": "John Doe",
+ "foaf:age": {"@type": "http://www.w3.org/2001/XMLSchema#integer", "@value": "30"},
+ "foaf:email": {"@id": "mailto:john@example.com"},
+ "foaf:gender": "male",
+ "foaf:hasInterest": ["Cooking", "Hiking"],
+ "foaf:knows": [{"@id": "ns0:person3"}, {"@id": "_:g445960"}, {"@id": "_:g445980"}]
+ },
+ {
+ "@id": "_:g445960",
+ "foaf:name": "Jane Smith",
+ "foaf:age": {"@type": "http://www.w3.org/2001/XMLSchema#integer", "@value": "25"},
+ "foaf:email": {"@id": "mailto:jane@example.com"},
+ "foaf:gender": "female"
+ },
+ {"@id": "_:g445980", "foaf:name": "Jane Smith 2"},
+ {"@id": "ns1:person5", "foaf:friend": {"@id": "ns0:person1"}, "foaf:brother": {"@id": "ns0:person1"}},
+ {"@id": "ns1:person2", "foaf:mother": {"@id": "ns0:person1"}}
+ ]
+ }
+ )
+ result = JSON.parse(result.gsub(' ', '').gsub("\n", '').gsub(/_:g\d+/, 'blanke_nodes'))
+ expected_result = JSON.parse(expected_result.gsub(' ', '').gsub("\n", '').gsub(/_:g\d+/, 'blanke_nodes'))
+
+ a = sort_nested_hash(result)
+ b = sort_nested_hash(expected_result)
+
+ assert_equal b, a
+ end
+
+ def test_resource_serialization_xml
+ result = @@resource1.to_xml
+
+ refute_empty result
+ expected_result = %(
+
+
+ male
+ Cooking
+ Hiking
+ 30
+
+
+
+
+ female
+ 25
+
+ Jane Smith
+
+
+
+
+ Jane Smith 2
+
+
+ John Doe
+
+
+
+
+
+
+
+
+
+ )
+ a = result.gsub(' ', '').gsub(/rdf:nodeID="[^"]*"/, '').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').gsub(/rdf:nodeID="[^"]*"/, '').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ def test_resource_serialization_ntriples
+ result = @@resource1.to_ntriples
+
+ refute_empty result
+
+ expected_result = %(
+ .
+ "male" .
+ "Cooking" .
+ "Hiking" .
+ "30"^^ .
+ .
+ .
+ _:g445940 "female" .
+ _:g445940 "25"^^ .
+ _:g445940 .
+ _:g445940 "Jane Smith" .
+ _:g445940 .
+ _:g445960 "Jane Smith 2" .
+ _:g445960 .
+ "John Doe" .
+ .
+ .
+ .
+ )
+ a = result.gsub(' ', '').gsub(/_:g\d+/, 'blanke_nodes').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').gsub(/_:g\d+/, 'blanke_nodes').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ def test_resource_serialization_turtle
+ result = @@resource1.to_turtle
+ refute_empty result
+ expected_result = %(
+ @prefix rdf: .
+ @prefix ns0: .
+ @prefix foaf: .
+ @prefix ns1: .
+
+ ns0:person1
+ a foaf:Person ;
+ foaf:age 30 ;
+ foaf:email ;
+ foaf:gender "male" ;
+ foaf:hasInterest "Cooking", "Hiking" ;
+ foaf:knows ns0:person3, [
+ foaf:age 25 ;
+ foaf:email ;
+ foaf:gender "female" ;
+ foaf:name "Jane Smith"
+ ], [
+ foaf:name "Jane Smith 2"
+ ] ;
+ foaf:name "John Doe" .
+
+ ns1:person2
+ foaf:mother ns0:person1 .
+
+ ns1:person5
+ foaf:brother ns0:person1 ;
+ foaf:friend ns0:person1 .
+ )
+ a = result.gsub(' ', '').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ private
+
+ def sort_nested_hash(hash)
+ sorted_hash = {}
+
+ hash.each do |key, value|
+ if value.is_a?(Hash)
+ sorted_hash[key] = sort_nested_hash(value)
+ elsif value.is_a?(Array)
+ sorted_hash[key] = value.map { |item| item.is_a?(Hash) ? sort_nested_hash(item) : item }.sort_by { |item| item.to_s }
+ else
+ sorted_hash[key] = value
+ end
+ end
+
+ sorted_hash.sort.to_h
+ end
+
+end
\ No newline at end of file
diff --git a/test/models/test_search.rb b/test/models/test_search.rb
new file mode 100644
index 00000000..9a0b280b
--- /dev/null
+++ b/test/models/test_search.rb
@@ -0,0 +1,194 @@
+require_relative '../test_case'
+
+class TestSearch < LinkedData::TestCase
+
+ def self.after_suite
+ backend_4s_delete
+ LinkedData::Models::Ontology.indexClear
+ LinkedData::Models::Agent.indexClear
+ end
+
+ def setup
+ self.class.after_suite
+ end
+
+ def test_search_ontology
+ ont_count, ont_acronyms, created_ontologies = create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: {process_rdf: true, extract_metadata: false, run_metrics: true},
+ acronym: 'BROTEST',
+ name: 'ontTEST Bla',
+ file_path: '../../../../test/data/ontology_files/BRO_v3.2.owl',
+ ont_count: 3,
+ submission_count: 3
+ })
+
+ ontologies = LinkedData::Models::Ontology.search('*:*', { fq: 'resource_model: "ontology"' })['response']['docs']
+
+ assert_equal 3, ontologies.size
+ ontologies.each do |ont|
+ select_ont = created_ontologies.select { |ont_created| ont_created.id.to_s.eql?(ont['id']) }.first
+ refute_nil select_ont
+ select_ont.bring_remaining
+ assert_equal ont['name_text'], select_ont.name
+ assert_equal ont['acronym_text'], select_ont.acronym
+ assert_equal ont['viewingRestriction_t'], select_ont.viewingRestriction
+ assert_equal ont['ontologyType_t'], select_ont.ontologyType.id
+ end
+
+ submissions = LinkedData::Models::Ontology.search('*:*', { fq: 'resource_model: "ontology_submission"' })['response']['docs']
+ assert_equal 9, submissions.size
+ submissions.each do |sub|
+ created_sub = LinkedData::Models::OntologySubmission.find(RDF::URI.new(sub['id'])).first&.bring_remaining
+ refute_nil created_sub
+ assert_equal sub['description_text'], created_sub.description
+ assert_equal sub['submissionId_i'], created_sub.submissionId
+ assert_equal sub['URI_text'], created_sub.URI
+ assert_equal sub['status_t'], created_sub.status
+ assert_equal sub['deprecated_b'], created_sub.deprecated
+ assert_equal sub['hasOntologyLanguage_t'], created_sub.hasOntologyLanguage.id.to_s
+ assert_equal sub['released_dt'], created_sub.released.utc.strftime('%Y-%m-%dT%H:%M:%SZ')
+ assert_equal sub['creationDate_dt'], created_sub.creationDate.utc.strftime('%Y-%m-%dT%H:%M:%SZ')
+ assert_equal(sub['contact_txt'], created_sub.contact.map { |x| x.bring_remaining.embedded_doc })
+ assert_equal sub['dataDump_t'], created_sub.dataDump
+ assert_equal sub['csvDump_t'], created_sub.csvDump
+ assert_equal sub['uriLookupEndpoint_t'], created_sub.uriLookupEndpoint
+ assert_equal sub['openSearchDescription_t'], created_sub.openSearchDescription
+ assert_equal sub['endpoint_txt'], created_sub.endpoint
+ assert_equal sub['uploadFilePath_t'], created_sub.uploadFilePath
+ assert_equal sub['submissionStatus_txt'].sort, created_sub.submissionStatus.map{|x| x.id.to_s}.sort
+
+ created_sub.metrics.bring_remaining
+
+ assert_equal sub['metrics_classes_i'], created_sub.metrics.classes
+ assert_equal sub['metrics_individuals_i'], created_sub.metrics.individuals
+ assert_equal sub['metrics_properties_i'], created_sub.metrics.properties
+ assert_equal sub['metrics_maxDepth_i'], created_sub.metrics.maxDepth
+ assert_equal sub['metrics_maxChildCount_i'], created_sub.metrics.maxChildCount
+ assert_equal sub['metrics_averageChildCount_i'], created_sub.metrics.averageChildCount
+ assert_equal sub['metrics_classesWithOneChild_i'], created_sub.metrics.classesWithOneChild
+ assert_equal sub['metrics_classesWithMoreThan25Children_i'], created_sub.metrics.classesWithMoreThan25Children
+ assert_equal sub['metrics_classesWithNoDefinition_i'], created_sub.metrics.classesWithNoDefinition
+
+ embed_doc = created_sub.ontology.bring_remaining.embedded_doc
+ embed_doc.each do |k, v|
+ if v.is_a?(Array)
+ assert_equal v, Array(sub["ontology_#{k}"])
+ else
+ assert_equal v, sub["ontology_#{k}"]
+ end
+ end
+ end
+ end
+
+ def test_search_agents
+ @@user1 = LinkedData::Models::User.new(:username => 'user111221', :email => 'some111221@email.org')
+ @@user1.passwordHash = 'some random pass hash'
+ @@user1.save
+
+ @agents = [
+ LinkedData::Models::Agent.new(name: 'name 0', email: 'test_0@test.com', agentType: 'organization', creator: @@user1),
+ LinkedData::Models::Agent.new(name: 'name 1', email: 'test_1@test.com', agentType: 'organization', creator: @@user1),
+ LinkedData::Models::Agent.new(name: 'name 2', email: 'test_2@test.com', agentType: 'person', creator: @@user1)
+ ]
+ @identifiers = [
+ LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ROR', creator: @@user1),
+ LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ORCID', creator: @@user1),
+ ]
+
+ @identifiers.each { |i| i.save }
+ affiliations = @agents[0..1].map { |a| a.save }
+ agent = @agents.last
+ agent.affiliations = affiliations
+
+ agent.identifiers = @identifiers
+ agent.save
+
+ agents = LinkedData::Models::Agent.search('*:*')['response']['docs']
+
+ assert_equal 3, agents.size
+ agents.each do |a|
+ select_agent = @agents.select { |agent_created| agent_created.id.to_s.eql?(a['id']) }.first
+ refute_nil select_agent
+ select_agent.bring_remaining
+
+ assert_equal a['name_text'], select_agent.name
+ assert_equal a['email_text'], select_agent.email
+ assert_equal a['agentType_t'], select_agent.agentType
+ assert_equal(a['affiliations_txt'], select_agent.affiliations&.map { |x| x.bring_remaining.embedded_doc })
+ assert_equal(a['identifiers_texts'], select_agent.identifiers&.map { |x| x.bring_remaining.embedded_doc })
+ assert_equal a['creator_t'], select_agent.creator.bring_remaining.embedded_doc
+ end
+
+ @identifiers.each { |i| i.delete }
+ @agents.each { |a| a.delete }
+ @@user1.delete
+ end
+
+ def test_search_ontology_data
+ ont_count, ont_acronyms, created_ontologies = create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: {
+ process_rdf: true, extract_metadata: false,
+ generate_missing_labels: false,
+ index_search: false,
+ },
+ acronym: 'BROTEST',
+ name: 'ontTEST Bla',
+ file_path: 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
+ ont_count: 1,
+ submission_count: 1,
+ ontology_format: 'SKOS'
+ })
+ ont_sub = LinkedData::Models::Ontology.find('BROTEST-0').first
+ ont_sub = ont_sub.latest_submission
+ time = Benchmark.realtime do
+ ont_sub.index_all_data(Logger.new($stdout))
+ end
+ puts time
+ conn = Goo.search_client(:ontology_data)
+ response = conn.search('*')
+
+ count = Goo.sparql_query_client.query("SELECT (COUNT( DISTINCT ?id) as ?c) FROM <#{ont_sub.id}> WHERE {?id ?p ?v}")
+ .first[:c]
+ .to_i
+
+ assert_equal count, response['response']['numFound']
+
+ response = conn.search('*', fq: ' resource_id:"http://opendata.inrae.fr/thesaurusINRAE/c_10065"')
+
+ assert_equal 1, response['response']['numFound']
+ doc = response['response']['docs'].first
+
+ expected_doc = {
+ 'id' => 'http://opendata.inrae.fr/thesaurusINRAE/c_10065_BROTEST-0',
+ 'submission_id_t' => 'http://data.bioontology.org/ontologies/BROTEST-0/submissions/1',
+ 'ontology_t' => 'BROTEST-0',
+ 'resource_id' => 'http://opendata.inrae.fr/thesaurusINRAE/c_10065',
+ 'type_txt' => %w[http://www.w3.org/2004/02/skos/core#Concept http://www.w3.org/2002/07/owl#NamedIndividual],
+ 'http___www.w3.org_2004_02_skos_core_inScheme_txt' => %w[http://opendata.inrae.fr/thesaurusINRAE/thesaurusINRAE http://opendata.inrae.fr/thesaurusINRAE/mt_53],
+ 'http___www.w3.org_2004_02_skos_core_broader_t' => 'http://opendata.inrae.fr/thesaurusINRAE/c_9937',
+ 'http___www.w3.org_2004_02_skos_core_altLabel_txt' => ['GMO food',
+ 'aliment transgénique',
+ 'aliment OGM',
+ 'transgenic food'],
+ 'http___www.w3.org_2004_02_skos_core_prefLabel_txt' => ['genetically modified food',
+ 'aliment génétiquement modifié'],
+ 'resource_model' => 'ontology_submission'
+ }
+
+ doc.delete('_version_')
+
+ assert_equal expected_doc['id'], doc['id']
+ assert_equal expected_doc['submission_id_t'], doc['submission_id_t']
+ assert_equal expected_doc['ontology_t'], doc['ontology_t']
+ assert_equal expected_doc['resource_id'], doc['resource_id']
+ assert_equal expected_doc['type_txt'].sort, doc['type_txt'].sort
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_inScheme_txt'].sort, doc['http___www.w3.org_2004_02_skos_core_inScheme_txt'].sort
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_broader_t'], doc['http___www.w3.org_2004_02_skos_core_broader_t']
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_altLabel_txt'].sort, doc['http___www.w3.org_2004_02_skos_core_altLabel_txt'].sort
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_prefLabel_txt'].sort, doc['http___www.w3.org_2004_02_skos_core_prefLabel_txt'].sort
+ assert_equal expected_doc['resource_model'], doc['resource_model']
+
+ end
+end
diff --git a/test/test_case.rb b/test/test_case.rb
index 75cc02b4..af9cf9a4 100644
--- a/test/test_case.rb
+++ b/test/test_case.rb
@@ -18,6 +18,7 @@
require_relative 'test_log_file'
require_relative '../lib/ontologies_linked_data'
+require_relative '../config/config'
if ENV['OVERRIDE_CONFIG'] == 'true'
SOLR_HOST = ENV.include?('SOLR_HOST') ? ENV['SOLR_HOST'] : 'localhost'
@@ -38,7 +39,7 @@
end
end
-require_relative '../config/config'
+
require 'minitest/unit'
require 'webmock/minitest'
WebMock.allow_net_connect!
diff --git a/test/util/test_notifications.rb b/test/util/test_notifications.rb
index 5417aec8..033cb2c3 100644
--- a/test/util/test_notifications.rb
+++ b/test/util/test_notifications.rb
@@ -151,6 +151,9 @@ def test_remote_ontology_pull_notification
end
def test_mail_options
+ current_auth_type = LinkedData.settings.smtp_auth_type
+
+ LinkedData.settings.smtp_auth_type = :none
options = LinkedData::Utils::Notifier.mail_options
expected_options = {
address: LinkedData.settings.smtp_host,
@@ -160,7 +163,7 @@ def test_mail_options
assert_equal options, expected_options
# testing SMTP authentification-based login
- current_auth_type = LinkedData.settings.smtp_auth_type
+
LinkedData.settings.smtp_auth_type = :plain
options = LinkedData::Utils::Notifier.mail_options
expected_options = {