diff --git a/.github/workflows/ruby-unit-tests.yml b/.github/workflows/ruby-unit-tests.yml
index 99b26fde..1ade7407 100644
--- a/.github/workflows/ruby-unit-tests.yml
+++ b/.github/workflows/ruby-unit-tests.yml
@@ -7,26 +7,37 @@ on:
jobs:
test:
strategy:
+ fail-fast: false
matrix:
- backend: ['ruby'] # ruby runs tests with 4store backend and ruby-agraph runs with AllegroGraph backend
+ goo-slice: [ '20', '100', '500' ]
+ ruby-version: [ '2.7' ]
+ triplestore: [ 'fs', 'ag', 'vo', 'gb' ]
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - name: Set up solr configsets
- run: ./test/solr/generate_ncbo_configsets.sh
- - name: create config.rb file
- run: cp config/config.test.rb config/config.rb
- - name: Build docker-compose
- run: docker-compose --profile 4store build #profile flag is set in order to build all containers in this step
- - name: Run unit tests
- # unit tests are run inside a container
- # http://docs.codecov.io/docs/testing-with-docker
- run: |
- ci_env=`bash <(curl -s https://codecov.io/env)`
- docker-compose run $ci_env -e CI --rm ${{ matrix.backend }} wait-for-it solr-ut:8983 -- bundle exec rake test TESTOPTS='-v'
- - name: Upload coverage reports to Codecov
- uses: codecov/codecov-action@v3
- with:
- flags: unittests
- verbose: true
- fail_ci_if_error: false # optional (default = false)
+ - uses: actions/checkout@v3
+ - name: create config.rb file
+ run: cp config/config.rb.sample config/config.rb
+ - name: Install Dependencies
+ run: sudo apt-get update && sudo apt-get -y install raptor2-utils
+ - name: Set up JDK 11
+ uses: actions/setup-java@v2
+ with:
+ java-version: '11'
+ distribution: 'adopt'
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.ruby-version }}
+ bundler-cache: true # runs 'bundle install' and caches installed gems automatically
+ - name: Run unit tests
+ # unit tests are run inside a container
+ # http://docs.codecov.io/docs/testing-with-docker
+ run: |
+ ci_env=`bash <(curl -s https://codecov.io/env)`
+ GOO_SLICES=${{ matrix.goo-slice }} bundle exec rake test:docker:${{ matrix.triplestore }} TESTOPTS="-v"
+ - name: Upload coverage reports to Codecov
+ uses: codecov/codecov-action@v3
+ with:
+ flags: unittests
+ verbose: true
+ fail_ci_if_error: false # optional (default = false)
diff --git a/Dockerfile b/Dockerfile
index f78fd9c6..42760153 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,6 +7,7 @@ RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends \
openjdk-11-jre-headless \
raptor2-utils \
wait-for-it \
+ libraptor2-dev \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /srv/ontoportal/ontologies_linked_data
@@ -15,7 +16,7 @@ COPY Gemfile* /srv/ontoportal/ontologies_linked_data/
WORKDIR /srv/ontoportal/ontologies_linked_data
-RUN gem update --system
+RUN gem update --system 3.4.22 # the 3.4.22 can be removed if we support Ruby version > 3.0
RUN gem install bundler
ENV BUNDLE_PATH=/srv/ontoportal/bundle
RUN bundle install
diff --git a/Gemfile b/Gemfile
index 12b56c2b..157ad290 100644
--- a/Gemfile
+++ b/Gemfile
@@ -21,6 +21,10 @@ gem 'rubyzip', '~> 1.0'
gem 'thin'
gem "oauth2", "~> 2.0"
gem 'request_store'
+gem 'jwt'
+gem 'json-ld', '~> 3.0.2'
+gem "parallel", "~> 1.24"
+
# Testing
group :test do
@@ -30,12 +34,13 @@ group :test do
gem 'simplecov'
gem 'simplecov-cobertura' # for codecov.io
gem 'test-unit-minitest'
+ gem 'webmock'
end
group :development do
gem 'rubocop', require: false
end
-
# NCBO gems (can be from a local dev path or from rubygems/git)
-gem 'goo', github: 'ontoportal-lirmm/goo', branch: 'ecoportal'
-gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'master'
+gem 'goo', github: 'ontoportal-lirmm/goo', branch: 'development'
+gem 'sparql-client', github: 'ontoportal-lirmm/sparql-client', branch: 'development'
+
diff --git a/Gemfile.lock b/Gemfile.lock
index 7f8e9cb5..b6ca2179 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,12 +1,15 @@
GIT
remote: https://github.com/ontoportal-lirmm/goo.git
- revision: c310e3854705b241a6259faad14cf6cd4eb97053
- branch: ecoportal
+ revision: 8f0a9a5bddba03d9d660a363c4e6618da479db9f
+ branch: development
specs:
goo (0.0.2)
addressable (~> 2.8)
pry
- rdf (= 1.0.8)
+ rdf (= 3.2.11)
+ rdf-raptor
+ rdf-rdfxml
+ rdf-vocab
redis
rest-client
rsolr
@@ -15,13 +18,12 @@ GIT
GIT
remote: https://github.com/ontoportal-lirmm/sparql-client.git
- revision: aed51baf4106fd0f3d0e3f9238f0aad9406aa3f0
- branch: master
+ revision: c96da3ad479724a31ccd6217ab9939dddfaca40e
+ branch: development
specs:
- sparql-client (1.0.1)
- json_pure (>= 1.4)
- net-http-persistent (= 2.9.4)
- rdf (>= 1.0)
+ sparql-client (3.2.2)
+ net-http-persistent (~> 4.0, >= 4.0.2)
+ rdf (~> 3.2, >= 3.2.11)
GEM
remote: https://rubygems.org/
@@ -32,22 +34,25 @@ GEM
multi_json (~> 1.3)
thread_safe (~> 0.1)
tzinfo (~> 0.3.37)
- addressable (2.8.5)
+ addressable (2.8.6)
public_suffix (>= 2.0.2, < 6.0)
ansi (1.5.0)
ast (2.4.2)
- base64 (0.1.1)
- bcrypt (3.1.19)
+ base64 (0.2.0)
+ bcrypt (3.1.20)
+ bigdecimal (3.1.7)
builder (3.2.4)
coderay (1.1.3)
- concurrent-ruby (1.2.2)
+ concurrent-ruby (1.2.3)
connection_pool (2.4.1)
+ crack (1.0.0)
+ bigdecimal
+ rexml
cube-ruby (0.0.3)
daemons (1.4.1)
- date (3.3.3)
+ date (3.3.4)
docile (1.4.0)
- domain_name (0.5.20190701)
- unf (>= 0.0.5, < 1.0.0)
+ domain_name (0.6.20240107)
email_spec (2.2.2)
htmlentities (~> 4.3.3)
launchy (~> 2.1)
@@ -76,7 +81,8 @@ GEM
faraday-patron (1.0.0)
faraday-rack (1.0.0)
faraday-retry (1.0.3)
- ffi (1.15.5)
+ ffi (1.16.3)
+ hashdiff (1.1.0)
hashie (5.0.0)
htmlentities (4.3.4)
http-accept (1.7.0)
@@ -84,14 +90,18 @@ GEM
domain_name (~> 0.5)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
- json (2.6.3)
- json_pure (2.6.3)
- jwt (2.7.1)
+ json (2.7.2)
+ json-ld (3.0.2)
+ multi_json (~> 1.12)
+ rdf (>= 2.2.8, < 4.0)
+ jwt (2.8.1)
+ base64
language_server-protocol (3.17.0.3)
launchy (2.5.2)
addressable (~> 2.8)
libxml-ruby (2.9.0)
- logger (1.5.3)
+ link_header (0.0.8)
+ logger (1.6.0)
macaddr (1.7.2)
systemu (~> 2.6.5)
mail (2.8.1)
@@ -99,10 +109,10 @@ GEM
net-imap
net-pop
net-smtp
- method_source (1.0.0)
- mime-types (3.5.1)
+ method_source (1.1.0)
+ mime-types (3.5.2)
mime-types-data (~> 3.2015)
- mime-types-data (3.2023.0808)
+ mime-types-data (3.2024.0305)
mini_mime (1.1.5)
minitest (4.7.5)
minitest-reporters (0.14.24)
@@ -112,16 +122,17 @@ GEM
powerbar
multi_json (1.15.0)
multi_xml (0.6.0)
- multipart-post (2.3.0)
- net-http-persistent (2.9.4)
- net-imap (0.3.7)
+ multipart-post (2.4.0)
+ net-http-persistent (4.0.2)
+ connection_pool (~> 2.2)
+ net-imap (0.4.10)
date
net-protocol
net-pop (0.1.2)
net-protocol
- net-protocol (0.2.1)
+ net-protocol (0.2.2)
timeout
- net-smtp (0.3.3)
+ net-smtp (0.5.0)
net-protocol
netrc (0.11.0)
oauth2 (2.0.9)
@@ -134,8 +145,8 @@ GEM
oj (2.18.5)
omni_logger (0.1.4)
logger
- parallel (1.23.0)
- parser (3.2.2.3)
+ parallel (1.24.0)
+ parser (3.3.1.0)
ast (~> 2.4.1)
racc
pony (1.13.1)
@@ -145,21 +156,34 @@ GEM
pry (0.14.2)
coderay (~> 1.1)
method_source (~> 1.0)
- public_suffix (5.0.3)
- racc (1.7.1)
+ public_suffix (5.0.5)
+ racc (1.7.3)
rack (1.6.13)
rack-test (0.8.3)
rack (>= 1.0, < 3)
rainbow (3.1.1)
rake (10.5.0)
- rdf (1.0.8)
- addressable (>= 2.2)
- redis (5.0.7)
- redis-client (>= 0.9.0)
- redis-client (0.17.0)
+ rdf (3.2.11)
+ link_header (~> 0.0, >= 0.0.8)
+ rdf-raptor (3.2.0)
+ ffi (~> 1.15)
+ rdf (~> 3.2)
+ rdf-rdfxml (3.2.2)
+ builder (~> 3.2)
+ htmlentities (~> 4.3)
+ rdf (~> 3.2)
+ rdf-xsd (~> 3.2)
+ rdf-vocab (3.2.7)
+ rdf (~> 3.2, >= 3.2.4)
+ rdf-xsd (3.2.1)
+ rdf (~> 3.2)
+ rexml (~> 3.2)
+ redis (5.2.0)
+ redis-client (>= 0.22.0)
+ redis-client (0.22.1)
connection_pool
- regexp_parser (2.8.1)
- request_store (1.5.1)
+ regexp_parser (2.9.0)
+ request_store (1.6.0)
rack (>= 1.4)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
@@ -169,20 +193,19 @@ GEM
rexml (3.2.6)
rsolr (1.1.2)
builder (>= 2.1.2)
- rubocop (1.56.3)
- base64 (~> 0.1.1)
+ rubocop (1.63.4)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
- parser (>= 3.2.2.3)
+ parser (>= 3.3.0.2)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 1.8, < 3.0)
rexml (>= 3.2.5, < 4.0)
- rubocop-ast (>= 1.28.1, < 2.0)
+ rubocop-ast (>= 1.31.1, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.29.0)
- parser (>= 3.2.1.0)
+ rubocop-ast (1.31.3)
+ parser (>= 3.3.1.0)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
rubyzip (1.3.0)
@@ -206,18 +229,19 @@ GEM
eventmachine (~> 1.0, >= 1.0.4)
rack (>= 1, < 3)
thread_safe (0.3.6)
- timeout (0.4.0)
+ timeout (0.4.1)
tzinfo (0.3.62)
- unf (0.1.4)
- unf_ext
- unf_ext (0.0.8.2)
- unicode-display_width (2.4.2)
+ unicode-display_width (2.5.0)
uuid (2.3.9)
macaddr (~> 1.0)
- version_gem (1.1.3)
+ version_gem (1.1.4)
+ webmock (3.23.0)
+ addressable (>= 2.8.0)
+ crack (>= 0.3.2)
+ hashdiff (>= 0.4.0, < 2.0.0)
PLATFORMS
- x86_64-darwin-21
+ x86_64-darwin-23
x86_64-linux
DEPENDENCIES
@@ -229,6 +253,8 @@ DEPENDENCIES
faraday (~> 1.9)
ffi
goo!
+ json-ld (~> 3.0.2)
+ jwt
libxml-ruby (~> 2.0)
minitest
minitest-reporters (>= 0.5.0)
@@ -236,6 +262,7 @@ DEPENDENCIES
oauth2 (~> 2.0)
oj (~> 2.0)
omni_logger
+ parallel (~> 1.24)
pony
pry
rack (~> 1.0)
@@ -251,6 +278,7 @@ DEPENDENCIES
sparql-client!
test-unit-minitest
thin
+ webmock
BUNDLED WITH
- 2.3.23
+ 2.4.22
diff --git a/bin/bubastis-1.4.0.jar b/bin/bubastis-1.4.0.jar
new file mode 100644
index 00000000..bc8dfeae
Binary files /dev/null and b/bin/bubastis-1.4.0.jar differ
diff --git a/bin/bubastis.jar b/bin/bubastis.jar
index 20eb80b8..6d416f57 120000
--- a/bin/bubastis.jar
+++ b/bin/bubastis.jar
@@ -1 +1 @@
-bubastis_1_3.jar
\ No newline at end of file
+./bubastis-1.4.0.jar
\ No newline at end of file
diff --git a/config/config.rb.sample b/config/config.rb.sample
index 31410611..7539c8d8 100644
--- a/config/config.rb.sample
+++ b/config/config.rb.sample
@@ -1,25 +1,102 @@
-LinkedData.config do |config|
- config.goo_port = 9000
- config.goo_host = "localhost"
- config.search_server_url = "http://localhost:8983/solr/term_search_core1"
- config.property_search_server_url = "http://localhost:8983/solr/prop_search_core1"
- config.repository_folder = "./test/data/ontology_files/repo"
- config.rest_url_prefix = "http://data.bioontology.org/"
- config.enable_security = false
- config.java_max_heap_size = '10240M'
- #PURL server config parameters
- config.enable_purl = false
- config.purl_host = "purl.bioontology.org"
- config.purl_port = 80
- config.purl_username = ""
- config.purl_password = ""
- config.purl_maintainers = ""
- config.purl_target_url_prefix = "http://bioportal.bioontology.org"
- config.sparql_endpoint_url = "http:://sparql_endpoint.com"
- Goo.configure do |conf|
- conf.main_languages = ['en']
+GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
+GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
+GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
+GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
+GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
+GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
+REDIS_HOST = ENV.include?("REDIS_HOST") ? ENV["REDIS_HOST"] : "localhost"
+REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
+SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr"
+SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr"
+GOO_SLICES = ENV["GOO_SLICES"] || 500
+begin
+ LinkedData.config do |config|
+ Goo.slice_loading_size = GOO_SLICES.to_i
+ config.goo_backend_name = GOO_BACKEND_NAME.to_s
+ config.goo_host = GOO_HOST.to_s
+ config.goo_port = GOO_PORT.to_i
+ config.goo_path_query = GOO_PATH_QUERY.to_s
+ config.goo_path_data = GOO_PATH_DATA.to_s
+ config.goo_path_update = GOO_PATH_UPDATE.to_s
+ config.goo_redis_host = REDIS_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.http_redis_host = REDIS_HOST.to_s
+ config.http_redis_port = REDIS_PORT.to_i
+ config.ontology_analytics_redis_host = REDIS_HOST.to_s
+ config.ontology_analytics_redis_port = REDIS_PORT.to_i
+ config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
+ config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
+ config.sparql_endpoint_url = "http:://sparql_endpoint.com"
+ # config.enable_notifications = false
+ #
+ config.java_max_heap_size = '20480M'
+ config.main_languages = ['en']
+
+ # Caches
+ config.goo_redis_host = REDIS_HOST.to_s
+ config.goo_redis_port = REDIS_PORT.to_i
+ config.enable_http_cache = false
+
+ # Email notifications
+ config.enable_notifications = false
+ config.email_sender = 'notifications@bioportal.lirmm.fr' # Default sender for emails
+ config.email_override = 'syphax.bouazzouni@lirmm.fr' # all email gets sent here. Disable with email_override_disable.
+ config.email_disable_override = true
+ config.smtp_host = 'localhost'
+ config.smtp_port = 1025
+ config.smtp_auth_type = :plain # :none, :plain, :login, :cram_md5
+ config.smtp_domain = 'lirmm.fr'
+ config.smtp_user = 'test'
+ config.smtp_password = 'test'
+ # Emails of the instance administrators to get mail notifications when new user or new ontology
+ # config.admin_emails = ['syphax.bouazzouni@lirmm.fr']
+
+ # Used to define other bioportal that can be mapped to
+ # Example to map to ncbo bioportal : {"ncbo" => {"api" => "http://data.bioontology.org", "ui" => "http://bioportal.bioontology.org", "apikey" => ""}
+ # Then create the mapping using the following class in JSON : "http://purl.bioontology.org/ontology/MESH/C585345": "ncbo:MESH"
+ # Where "ncbo" is the namespace used as key in the interportal_hash
+ config.interportal_hash = {
+ 'agroportal' => {
+ 'api' => 'http://data.agroportal.lirmm.fr',
+ 'ui' => 'http://agroportal.lirmm.fr',
+ 'apikey' => '1cfae05f-9e67-486f-820b-b393dec5764b'
+ },
+ 'ncbo' => {
+ 'api' => 'http://data.bioontology.org',
+ 'ui' => 'http://bioportal.bioontology.org',
+ 'apikey' => '4a5011ea-75fa-4be6-8e89-f45c8c84844e'
+ },
+ 'sifr' => {
+ 'api' => 'http://data.bioportal.lirmm.fr',
+ 'ui' => 'http://bioportal.lirmm.fr',
+ 'apikey' => '1cfae05f-9e67-486f-820b-b393dec5764b'
+ }
+ }
+
+ # oauth
+ config.oauth_providers = {
+ github: {
+ check: :access_token,
+ link: 'https://api.github.com/user'
+ },
+ keycloak: {
+ check: :jwt_token,
+ cert: 'KEYCLOAK_SECRET_KEY'
+ },
+ orcid: {
+ check: :access_token,
+ link: 'https://pub.orcid.org/v3.0/me'
+ },
+ google: {
+ check: :access_token,
+ link: 'https://www.googleapis.com/oauth2/v3/userinfo'
+ }
+ }
end
+rescue NameError => e
+ binding.pry
+ # puts '(CNFG) >> LinkedData not available, cannot load config'
end
-#sometimes tmp by default cannot allocate large files
-$TMP_SORT_FOLDER = "SOME TMP FOLDER"
+# sometimes tmp by default cannot allocate large files
+$TMP_SORT_FOLDER = 'SOME TMP FOLDER'
\ No newline at end of file
diff --git a/config/config.test.rb b/config/config.test.rb
deleted file mode 100644
index e4a628ac..00000000
--- a/config/config.test.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-###
-# This file is designed for use in docker based unit testing
-#
-# All the defaults are set in
-# https://github.com/ncbo/ontologies_linked_data/blob/master/lib/ontologies_linked_data/config/config.rb
-###
-
-GOO_BACKEND_NAME = ENV.include?("GOO_BACKEND_NAME") ? ENV["GOO_BACKEND_NAME"] : "4store"
-GOO_PATH_QUERY = ENV.include?("GOO_PATH_QUERY") ? ENV["GOO_PATH_QUERY"] : "/sparql/"
-GOO_PATH_DATA = ENV.include?("GOO_PATH_DATA") ? ENV["GOO_PATH_DATA"] : "/data/"
-GOO_PATH_UPDATE = ENV.include?("GOO_PATH_UPDATE") ? ENV["GOO_PATH_UPDATE"] : "/update/"
-GOO_PORT = ENV.include?("GOO_PORT") ? ENV["GOO_PORT"] : 9000
-GOO_HOST = ENV.include?("GOO_HOST") ? ENV["GOO_HOST"] : "localhost"
-REDIS_HOST = ENV.include?("REDIS_HOST") ? ENV["REDIS_HOST"] : "localhost"
-REDIS_PORT = ENV.include?("REDIS_PORT") ? ENV["REDIS_PORT"] : 6379
-SOLR_TERM_SEARCH_URL = ENV.include?("SOLR_TERM_SEARCH_URL") ? ENV["SOLR_TERM_SEARCH_URL"] : "http://localhost:8983/solr/term_search_core1"
-SOLR_PROP_SEARCH_URL = ENV.include?("SOLR_PROP_SEARCH_URL") ? ENV["SOLR_PROP_SEARCH_URL"] : "http://localhost:8983/solr/prop_search_core1"
-
-LinkedData.config do |config|
- config.goo_backend_name = GOO_BACKEND_NAME.to_s
- config.goo_host = GOO_HOST.to_s
- config.goo_port = GOO_PORT.to_i
- config.goo_path_query = GOO_PATH_QUERY.to_s
- config.goo_path_data = GOO_PATH_DATA.to_s
- config.goo_path_update = GOO_PATH_UPDATE.to_s
- config.goo_redis_host = REDIS_HOST.to_s
- config.goo_redis_port = REDIS_PORT.to_i
- config.http_redis_host = REDIS_HOST.to_s
- config.http_redis_port = REDIS_PORT.to_i
- config.ontology_analytics_redis_host = REDIS_HOST.to_s
- config.ontology_analytics_redis_port = REDIS_PORT.to_i
- config.search_server_url = SOLR_TERM_SEARCH_URL.to_s
- config.property_search_server_url = SOLR_PROP_SEARCH_URL.to_s
- config.sparql_endpoint_url = "http:://sparql_endpoint.com"
- # config.enable_notifications = false
-end
\ No newline at end of file
diff --git a/config/schemes/ontology_submission.yml b/config/schemes/ontology_submission.yml
index 473e2873..a9a362f1 100644
--- a/config/schemes/ontology_submission.yml
+++ b/config/schemes/ontology_submission.yml
@@ -1,323 +1,127 @@
-URI:
- extractedMetadata: true
- label: "URI identifier"
- helpText: "The URI of the ontology which is described by this metadata."
- description: [ "OMV: The URI of the ontology which is described by these metadata." ]
- display: "general"
- example: 'https://w3id.org/myontto'
-homepage:
- extractedMetadata: true
- metadataMappings: [ "cc:attributionURL", "mod:homepage", "doap:blog", "schema:mainEntityOfPage" ]
- helpText: "The URL of the homepage for the ontology."
- label: "Homepage"
+###Template
-hasOntologyLanguage:
- extractedMetadata: false
- display: 'general'
- description: [ "SCHEMA: Media type, typically MIME format (see IANA site) of the content, e.g. application/zip of a SoftwareApplication binary. In cases where a CreativeWork has several media type representations, 'encoding' can be used to indicate each MediaObject alongside particular fileFormat information. Unregistered or niche file formats can be indicated instead via the most appropriate URL, e.g. defining Web page or a Wikipedia entry.", "MOD: A language that is used to create an ontology." ]
- label: "Language"
+#propname:
+# display: "general" /
+# label: ""
+# helpText: ""
+# example: ''
+# description: [
+# "ACRO: description.",
+# "ACRO: description." ]
+# extractedMetadata: true / false
+# enforcedValues: {
+# "..",
+# ".." }
+# metadataMappings: [ "ns:propname", ".." ]
-publication:
- extractedMetadata: true
- description: [ "SCHEMA: A citation or reference to another creative work, such as another publication, web page, scholarly article, etc.","DCTERMS: A bibliographic reference for the resource.","OMV: List of bibliographic references describing the ontology and its applications.","FOAF: A document that this thing is the primary topic of" ]
- helpText: "The URL of bibliographic reference for the ontology."
- metadataMappings: [ "omv:reference", "dct:bibliographicCitation", "foaf:isPrimaryTopicOf", "schema:citation", "cito:citesAsAuthority", "schema:citation" ]
- label: "Publication"
+# AgroPortal properties ordered as MOD file
-naturalLanguage:
+### General
+
+#Acronym => Ontology object (omv:acronym)
+#Name => Ontology object (omv:name)
+
+#URI
+URI:
+ display: "general"
+ label: "URI"
+ helpText: "The URI of the ontology which is described by these metadata."
+ example: 'https://w3id.org/myontology'
+ description: [
+ "OMV: The URI of the ontology which is described by these metadata.",
+ "MOD: The Unique Resoource Identifier of this ontology, assigned by responsible authority."]
extractedMetadata: true
- description: [ "DUBLIN CORE: A language of the resource.","SCHEMA: The language of the content or performance or used in an action. Please use one of the language codes from the IETF BCP 47 standard. See also availableLanguage.","DOAP: ISO language code a project has been translated into." ]
- metadataMappings: [ "dc:language", "dct:language", "doap:language", "schema:inLanguage" ]
- helpText: "The language of the content of the ontology.<br>Consider using a <a target="_blank" href="http://www.lexvo.org/">Lexvo URI</a> with ISO639-3 code.<br>e.g.: http://lexvo.org/id/iso639-3/eng"
- enforcedValues: {
- "http://lexvo.org/id/iso639-3/eng": "English",
- "http://lexvo.org/id/iso639-3/fra": "French",
- "http://lexvo.org/id/iso639-3/spa": "Spanish",
- "http://lexvo.org/id/iso639-3/por": "Portuguese",
- "http://lexvo.org/id/iso639-3/ita": "Italian",
- "http://lexvo.org/id/iso639-3/deu": "German" }
- label: "Natural language"
+ metadataMappings: [ "mod:URI", "omv:URI" ]
-documentation:
+#Version IRI
+versionIRI:
+ display: "general"
+ label: "Version IRI"
+ helpText: "The property that identifies the version IRI of an ontology."
+ example: 'https://w3id.org/myontology/3.2.0'
+ description: [
+ "OWL: The property that identifies the version IRI of an ontology." ]
extractedMetadata: true
- description: [ "DCTERMS: A link to the documentation page on a thing","DCAT: A Web page that can be navigated to in a Web browser to gain access to the dataset, its distributions and/or additional information.", "OMV: URL for further documentation.","RDFS: Further information about the subject resource.", "DOAP: URL of Wiki for collaborative discussion of project. ","VANN: A reference to a resource that provides information on how this resource is to be used","MOD: A link to the documentation page on a thing." ]
- metadataMappings: [ "rdfs:seeAlso", "foaf:page", "vann:usageNote", "mod:document", "dcat:landingPage", "doap:wiki" ]
- helpText: "URL for further documentation."
- label: "Documentation"
+
+#Version information
version:
- extractedMetadata: true
- helpText: "The version of the released ontology"
- metadataMappings: [ "owl:versionInfo", "mod:version", "doap:release", "pav:version", "schema:version", "oboInOwl:data-version", "oboInOwl:version", "adms:last" ]
- description: [ "MOD: The version of the released ontology.",
- "SCHEMA: The version of the CreativeWork embodied by a specified resource.",
- "OMV: The version information of the ontology.",
- "OWL: The annotation property that provides version information for an ontology or another OWL construct. ",
- "PAV: The version number of a resource.",
- "DOAP: A project release",
- "ADMS : A link to the current or latest version of the Asset" ]
+ display: 'general'
+ label: "Version information"
+ helpText: "The version information of the ontology."
example: "v.3.2.0"
- label: "Version"
-description:
- label: "Description"
- extractedMetadata: true
- description: [ "DCTERMS: An account of the resource","SCHEMA: A description of the item.","OMV: Free text description of an ontology.","DOAP: Plain text description of a project, of 2-4 sentences in length.","RDFS: A human-readable description of a resource." ]
- helpText: "Free text description of the ontology."
- metadataMappings: [ "dc:description", "dct:description", "doap:description", "schema:description", "oboInOwl:remark" ]
-
+ description: [
+ "OMV: The version information of the ontology.",
+ "MOD: The current version of the ontology. Possibly using Semantic versioning.",
+ "OWL: The annotation property that provides version information for an ontology or another OWL construct. ",
+ "PAV: The version number of a resource.",
+ "DOAP: A project release",
+ "SCHEMA: The version of the CreativeWork embodied by a specified resource."]
+ extractedMetadata: true
+ metadataMappings: [ "omv:version", "mod:version", "owl:versionInfo", "pav:version", "doap:release", "schema:version", "oboInOwl:data-version", "oboInOwl:version" ]
+
+#Status
status:
- extractedMetadata: true
+ display: "general"
label: "Status"
- metadataMappings: [ "adms:status", "idot:state" ]
- helpText: "Information about the ontology status (alpha, beta, production, retired)."
- enforcedValues: [ "alpha", "beta", "production", "retired" ]
+ helpText: "The status of the current version of the ontology (alpha, beta, production, retired)."
+ example: 'production'
+ description: [
+ "MOD: The status of the current version of the ontology (alpha, beta, production, retired).",
+ "OMV: It specifies the tracking information for the contents of the ontology. Pre-defined values.",
+ "IDOT: State of a resource (physical location providing access to data or information about the identified entity). This should be based on a recent manual or automatic check of the resource. Possible values are: 'up', 'down', 'probably up', 'obsolete resource', 'restricted access' and 'unknown'.",
+ "ADMS : Links to the status of the Asset or Asset Distribution in the context of a particular workflow process. Since Status is defined using a skos:Concept, that is the defined range for this property." ]
+ extractedMetadata: true
+ enforcedValues: [
+ "alpha",
+ "beta",
+ "production",
+ "retired" ]
+ metadataMappings: [ "omv:status", "mod:status", "adms:status", "idot:state" ]
+
+#Deprecated
+deprecated:
display: "general"
- description: [ "OMV: It specifies the tracking information for the contents of the ontology. Pre-defined values.",
- "IDOT: State of a resource (physical location providing access to data or information about the identified entity). This should be based on a recent manual or automatic check of the resource. Possible values are: 'up', 'down', 'probably up', 'obsolete resource', 'restricted access' and 'unknown'.",
- "ADMS : Links to the status of the Asset or Asset Distribution in the context of a particular workflow process. Since Status is defined using a skos:Concept, that is the defined range for this property"
- ]
-
-contact:
- helpText: "The people to contact when questions about the ontology. Composed of the contacts name and email."
- label: "Contact"
- description: "DCAT: Relevant contact information for the cataloged resource. Use of vCard is recommended"
-
-creationDate:
- extractedMetadata: true
- description: [ "DCTERMS: Date of submission of the resource.", "SCHEMA: Date of first broadcast/publication." ]
- metadataMappings: [ "dct:dateSubmitted", "schema:datePublished" ]
- label: "Creation date"
-
-released:
- extractedMetadata: true
- description: [ "DCTERMS: Date of submission of the resource.",
- "SCHEMA: Date of first broadcast/publication." ]
- label: "Release date"
- helpText: "Date of the ontology release."
- metadataMappings: [ "omv:creationDate", "dc:date", "dct:date", "dct:issued", "mod:creationDate", "doap:created", "schema:dateCreated","prov:generatedAtTime", "pav:createdOn", "pav:authoredOn", "pav:contributedOn", "oboInOwl:date", "oboInOwl:hasDate" ]
-
-
-numberOfClasses:
- metadataMappings: [ "void:classes", "voaf:classNumber", "mod:noOfClasses" ]
- description: [ "MOD: The total number of classes in an ontology.",
- "OMV: Number of classes in ontology.",
- "VOAF: The number of classes defined in the vocabulary namespace. Classes imported from other namespaces are not taken into account.",
- "VOID: The total number of distinct classes in a void:Dataset. In other words, the number of distinct resources occurring as objects of rdf:type." ]
- display: "metrics"
- helpText: "Number of classes in this ontology. Automatically computed by OWLAPI."
- label: "Number of classes"
-
-numberOfIndividuals:
- metadataMappings: [ "mod:noOfIndividuals" ]
- description: [ "MOD: The total number of individuals in an ontology.",
- "OMV: Number of individuals in the ontology.",
- "VOID: The total number of entities that are described in a void:Dataset." ]
- display: "metrics"
- helpText: "Number of individuals in this ontology. Automatically computed by OWLAPI."
- label: "Number of individuals"
-
-numberOfProperties:
- metadataMappings: [ "void:properties", "voaf:propertyNumber", "mod:noOfProperties" ]
- description: [ "MOD: The total number of properties in an ontology.",
- "OMV: Number of properties in the ontology.",
- "VOAF: The number of properties defined in the vocabulary namespace. Properties imported from other namespaces are not taken into account.",
- "VOID: The total number of distinct properties in a void:Dataset. In other words, the number of distinct resources that occur in the predicate position of triples in the dataset." ]
- display: "metrics"
- helpText: "Number of properties in this ontology. Automatically computed by OWLAPI."
- label: "Number of properties"
-
-
-modificationDate:
- extractedMetadata: true
- description: [ "DCTERMS: Date on which the resource was changed",
- "SCHEMA: The date on which the CreativeWork was most recently modified or when the item's entry was modified within a DataFeed.",
- "OMV: Date of the last modification made to the ontology",
- "PAV: The date of the last update of the resource" ]
- metadataMappings: [ "dct:modified", "schema:dateModified", "pav:lastUpdateOn", "mod:updated" ]
- helpText: "Date of the last modification made to the ontology"
- label: "Modification date"
-
-entities:
- extractedMetadata: true
- description: [ "VOID: The total number of entities that are described in a void:Dataset." ]
- label: "Number of entities"
- display: "metrics"
- helpText: "Number of entities in this ontology."
-
-numberOfAxioms:
- extractedMetadata: true
- description: [ "MOD: The total number of axioms in an ontology.",
- "OMV: Number of axioms in ontology." ]
- metadataMappings: [ "mod:noOfAxioms", "void:triples" ]
- display: "metrics"
- helpText: "Number of axioms in this ontology."
- label: "Number of axioms or triples"
-
-keyClasses:
- extractedMetadata: false
- description: [ "OMV: Representative classes in the ontology.",
- "FOAF: The primary topic of some page or document.",
- "SCHEMA: Indicates the primary entity described in some page or other CreativeWork." ]
- display: "content"
- metadataMappings: [ "foaf:primaryTopic", "void:exampleResource", "schema:mainEntity" ]
- helpText: "Representative classes in the ontology."
- label: "Key classes"
-
-keywords:
- extractedMetadata: true
- description: [ "DCTERMS: A keyword(s) is used to describe the content of an ontology",
- "SCHEMA: Keywords or tags used to describe some item. Multiple textual entries in a keywords list are typically delimited by commas, or by repeating the property.",
- "OMV: List of keywords related to an ontology.",
- "DCAT: A keyword or tag describing a resource.",
- "MOD: A keyword(s) is used to describe the content of an ontology." ]
- helpText: "List of keywords related to the ontology."
- metadataMappings: [ "mod:keyword", "dcat:keyword", "schema:keywords" ]
- label: "Keywords"
-
-knownUsage:
- extractedMetadata: true
- description: [ "OMV: The applications where the ontology is being used." ]
- display: "usage"
- helpText: "The applications where the ontology is being used."
- example: "Used to annotate phenotypes and patterns of gene expression"
- label: "Known usage"
-
-notes:
- extractedMetadata: true
- description: [ "RDFS: A description of the subject resource.",
- "OMV: Additional information about the ontology that is not included somewhere else (e.g. information that you do not want to include in the documentation)." ]
- metadataMappings: [ "rdfs:comment", "adms:versionNotes" ]
- helpText: "Additional information about the ontology that is not included somewhere else (e.g. information that you do not want to include in the documentation)."
- label: "Notes"
-
-conformsToKnowledgeRepresentationParadigm:
- extractedMetadata: true
- description: [ "MOD: A representation formalism that is followed to describe knowledge in an ontology. Example includes description logics, first order logic, etc.",
- "OMV: Information about the paradigm model used to create the ontology.",
- "DCTERMS: An established standard to which the described resource conforms." ]
- metadataMappings: [ "mod:KnowledgeRepresentationFormalism", "dct:conformsTo" ]
- display: "methodology"
- helpText: "A representation formalism that is followed to describe knowledge in an ontology. Example includes description logics, first order logic, etc."
- label: "Knowledge representation paradigm"
-
-hasContributor:
- extractedMetadata: false
- description: [ "DCTERMS: An entity responsible for making contributions to the resource",
- "SCHEMA: A secondary contributor to the CreativeWork or Event.",
- "OMV: Contributors to the creation of the ontology.",
- "PAV: The resource was contributed to by the given agent.",
- "SCHEMA: A secondary contributor to the CreativeWork or Event.",
- "DOAP: Project contributor",
- "OMV: Contributors to the creation of the ontology" ]
- label: "Contributors"
- metadataMappings: [ "dc:contributor", "dct:contributor", "doap:helper", "schema:contributor", "pav:contributedBy" ]
- helpText: "Contributors to the creation of the ontology."
-
-hasCreator:
- extractedMetadata: false
- description: [ "OMV: Main responsible for the creation of the ontology",
- "DCTERMS: An entity primarily responsible for making the resource",
- "FOAF: An agent that made this thing",
- "PROV: Attribution is the ascribing of an entity to an agent.",
- "PAV: An agent that originated or gave existence to the work that is expressed by the digital resource.",
- "PAV: An agent primary responsible for making the digital artifact or resource representation.",
- "DOAP: Maintainer of a project, a project leader.",
- "SCHEMA: The author of this content or rating." ]
- label: "Creators"
- metadataMappings: [ "dc:creator", "dct:creator", "foaf:maker", "prov:wasAttributedTo", "doap:maintainer", "pav:authoredBy", "pav:createdBy", "schema:author", "schema:creator" ]
- helpText: "Main responsible for the creation of the ontology."
-
-designedForOntologyTask:
- extractedMetadata: true
- description: [ "DCTERMS: The purpose for which the ontology was originally designed",
- "OMV: The purpose for which the ontology was originally designed." ]
- display: "usage"
- label: "Designed for task"
- helpText: "The purpose for which the ontology was originally designed."
- enforcedValues: {
- "http://omv.ontoware.org/2005/05/ontology#AnnotationTask": "Annotation Task",
- "http://omv.ontoware.org/2005/05/ontology#ConfigurationTask": "Configuration Task",
- "http://omv.ontoware.org/2005/05/ontology#FilteringTask": "Filtering Task",
- "http://omv.ontoware.org/2005/05/ontology#IndexingTask": "Indexing Task",
- "http://omv.ontoware.org/2005/05/ontology#IntegrationTask": "Integration Task",
- "http://omv.ontoware.org/2005/05/ontology#MatchingTask": "Matching Task",
- "http://omv.ontoware.org/2005/05/ontology#MediationTask": "Mediation Task",
- "http://omv.ontoware.org/2005/05/ontology#PersonalizationTask": "Personalization Task",
- "http://omv.ontoware.org/2005/05/ontology#QueryFormulationTask": "Query Formulation Task",
- "http://omv.ontoware.org/2005/05/ontology#QueryRewritingTask": "Query Rewriting Task",
- "http://omv.ontoware.org/2005/05/ontology#SearchTask": "Search Task"
- }
-
-wasGeneratedBy:
- extractedMetadata: true
- description: [ "PROV: Generation is the completion of production of a new entity by an activity. This entity did not exist before generation and becomes available for usage after this generation" ]
- display: "people"
- label: "Was generated by"
- helpText: "People who generated the ontology."
-
-wasInvalidatedBy:
- extractedMetadata: true
- label: "Was invalidated by"
- description: [ "PROV: Invalidation is the start of the destruction, cessation, or expiry of an existing entity by an activity" ]
- display: "people"
- helpText: "People who invalidated the ontology."
-
-curatedBy:
- extractedMetadata: false
- description: [ "PAV: Specifies an agent specialist responsible for shaping the expression in an appropriate format. Often the primary agent responsible for ensuring the quality of the representation.",
- "MOD: A curator who restructure the previously authored content and shape it to be appropriate for the intended representation (e.g. by normalizing the fields for being represented in a spreadsheet)." ]
- display: "people"
- label: "Curator"
- metadataMappings: [ "mod:evaluatedBy" ]
- helpText: "People who curated the ontology."
- example: 'Yvonne M. Bradford (0000-0002-9900-7880)'
-
-endorsedBy:
- extractedMetadata: false
- label: "Endorsed by"
- description: [ "MOD: An ontology endorsed by an agent.",
- "OMV: The parties that have expressed support or approval to this ontology." ]
- metadataMappings: [ "mod:endorsedBy" ]
- helpText: "The parties that have expressed support or approval to this ontology"
- display: "people"
- example: 'INRAE (003vg9w96)'
-
-fundedBy:
- extractedMetadata: false
- label: "Funded or sponsored by"
- description: [ "MOD: An ontology that is sponsored by and developed under a project.",
- "FOAF: An organization funding a project or person.",
- "SCHEMA: The Organization on whose behalf the creator was working." ]
- metadataMappings: [ "mod:sponsoredBy", "schema:sourceOrganization" ]
- display: "people"
- helpText: "The organization funding the ontology development."
- example: [ 'Yvonne M. Bradford (0000-0002-9900-7880','INRAE (003vg9w96)' ]
-
-translator:
+ label: "Deprecated"
+ helpText: "The annotation property that indicates that a given entity has been deprecated."
+ example: 'false'
+ description: [
+ "OWL: The annotation property that indicates that a given entity has been deprecated.",
+ "MOD: The classes and properties of an ontology that are no longer in use.",
+ "IDOT: Indicates if the current dataset is obsolete (not provided any more to the public community). Value can either be 'true' or 'false' (xsd:boolean). The statement is usually omitted if 'false'.",
+ "DCAT: An annotation with the owl:deprecated annotation property and the value equal to \"true\"^^xsd:boolean can be used to specify that an IRI is deprecated" ]
+ extractedMetadata: true
+ enforcedValues: [
+ "true",
+ "false" ]
+ metadataMappings: [ "owl:deprecated", "mod:obsolete", "idot:obsolete" ]
+
+#Representation language
+hasOntologyLanguage:
+ display: "general"
+ label: "Representation language"
+ helpText: "The ontology language."
+ description: [
+ "MOD: A representation language that is used to create an ontology (e.g., OWL, RDF-S, SKOS).",
+ "OMV: The ontology language.",
+ "SCHEMA : Media type, typically MIME format (see IANA site) of the content." ]
extractedMetadata: false
- label: "Translator"
- description: "SCHEMA: Organization or person who adapts a creative work to different languages, regional differences and technical requirements of a target market, or that translates during some event."
- metadataMappings: [ "doap:translator" ]
- display: "people"
- helpText: "Organization or person who adapted the ontology to different languages, regional differences and technical requirements"
-
-hasDomain:
- extractedMetadata: true
- label: "Subject"
- description: [ "DCTERMS: The topic of the resource.",
- "SCHEMA: The subject matter of the content.",
- "FOAF: A topic of some page or document.",
- "OMV: Typically, the domain can refer to established topic hierarchies such as the general purpose topic hierarchy DMOZ or the domain specific topic hierarchy ACM for the computer science domain",
- "DCAT: A main category of the resource." ]
- helpText: "Typically, the domain can refer to established topic hierarchies such as the general purpose topic hierarchy DMOZ or the domain specific topic hierarchy ACM for the computer science domain"
- metadataMappings: [ "dc:subject", "dct:subject", "foaf:topic", "dcat:theme", "schema:about" ]
- display: "usage"
-
+ enforcedValues: [
+ "OBO",
+ "OWL",
+ "SKOS",
+ "UMLS" ]
+ metadataMappings: [ "mod:hasRepresentationLanguage", "omv:hasOntologyLanguage", "schema:fileFormat" ]
+
+#Formality level
hasFormalityLevel:
- extractedMetadata: true
+ display: "general"
label: "Formality level"
- metadataMappings: [ "mod:ontologyFormalityLevel" ]
helpText: "Level of formality of the ontology."
+ description: [
+ "MOD: The level of formality of an ontology (as defined by the NKOS KOS Types Vocabulary).",
+ "OMV: Level of formality of the ontology." ]
+ extractedMetadata: true
enforcedValues: {
"http://w3id.org/nkos/nkostype#classification_schema": "Classification scheme",
"http://w3id.org/nkos/nkostype#dictionary": "Dictionary",
@@ -333,38 +137,18 @@ hasFormalityLevel:
"http://w3id.org/nkos/nkostype#terminology": "Terminology",
"http://w3id.org/nkos/nkostype#thesaurus": "Thesaurus"
}
- display: "general"
- description: "OMV: The level of formality of an ontology."
-
-
-hasLicense:
- extractedMetadata: true
- label: "License"
- description: [ "CC: A Work has a License.",
- "DC: Information about rights held in and over the resource.",
- "SCHEMA: A license document that applies to this content, typically indicated by URL.",
- "DCTERMS: A legal document giving official permission to do something with the resource. Recommended practice is to identify the license document with a URI. If this is not possible or feasible, a literal value that identifies the license may be provided.",
- "OMV: Underlying license model." ]
- metadataMappings: [ "dc:rights", "dct:rights", "dct:license", "cc:license", "schema:license" ]
- helpText: "Underlying license model.<br>Consider using a <a target="_blank" href="http://rdflicense.appspot.com/">URI to describe your License</a><br>Consider using a <a target="_blank" href="http://licentia.inria.fr/">INRIA licentia</a> to choose your license"
- enforcedValues: {
- "https://creativecommons.org/licenses/by/4.0/": "CC Attribution 4.0 International",
- "https://creativecommons.org/licenses/by/3.0/": "CC Attribution 3.0",
- "https://creativecommons.org/publicdomain/zero/1.0/": "CC Public Domain Dedication",
- "http://www.gnu.org/licenses/gpl-3.0": "GNU General Public License 3.0",
- "http://www.gnu.org/licenses/gpl-2.0": "GNU General Public License 2.0",
- "https://opensource.org/licenses/Artistic-2.0": "Open Source Artistic license 2.0",
- "https://opensource.org/licenses/MIT": "MIT License",
- "https://opensource.org/licenses/BSD-3-Clause": "BSD 3-Clause License",
- "http://www.apache.org/licenses/LICENSE-2.0": "Apache License 2.0"
- }
-
+ metadataMappings: [ "omv:hasFormalityLevel", "mod:formalityLevel" ]
+#Syntax
hasOntologySyntax:
+ display: "general"
+ label: "Syntax"
+ helpText: "The presentation syntax for the ontology langage."
+ description: [
+ "MOD: The syntax of this current ontology distribution (as defined by W3C formats).",
+ "OMV: The presentation syntax for the ontology langage.",
+ "DCTERMS : The file format, physical medium, or dimensions of the resource." ]
extractedMetadata: true
- metadataMappings: [ "mod:syntax", "dc:format", "dct:format" ]
- label: "Ontology Syntax"
- helpText: "The presentation syntax for the ontology langage.<br>Properties taken from <a target="_blank" href="https://www.w3.org/ns/formats/">W3C URIs for file format</a>"
enforcedValues: {
"http://www.w3.org/ns/formats/JSON-LD": "JSON-LD",
"http://www.w3.org/ns/formats/N3": "N3",
@@ -386,15 +170,45 @@ hasOntologySyntax:
"http://www.w3.org/ns/formats/TriG": "TriG",
"http://purl.obolibrary.org/obo/oboformat/spec.html": "OBO"
}
- description: [ "DUBLIN CORE: The file format, physical medium, or dimensions of the resource.", "MOD: The syntax followed in the creation of an ontology." ]
+ metadataMappings: [ "omv:hasOntologySyntax", "mod:hasSyntax", "dc:format", "dcterms:format" ]
-isOfType:
+#Natural language
+naturalLanguage:
+ display: "general"
+ label: "Natural language"
+ helpText: "The language of the content of the ontology (with values in Lexvo/iso639-1)."
+ description: [
+ "DCTERMS: A language of the resource. Recommended practice is to use either a non-literal value representing a language from a controlled vocabulary such as ISO 639-2 or ISO 639-3, or a literal value consisting of an IETF Best Current Practice 47 language tag.",
+ "OMV: The language of the content of the ontology, i.e. English, French, etc.",
+ "DOAP: ISO language code a project has been translated into.",
+ "SCHEMA: The language of the content or performance or used in an action. Please use one of the language codes from the IETF BCP 47 standard." ]
extractedMetadata: true
- description: [ "OMV: The nature of the content of the ontology ",
+ enforcedValues: {
+ "http://lexvo.org/id/iso639-1/en": "English",
+ "http://lexvo.org/id/iso639-1/fr": "French",
+ "http://lexvo.org/id/iso639-1/es": "Spanish",
+ "http://lexvo.org/id/iso639-1/pt": "Portuguese",
+ "http://lexvo.org/id/iso639-1/it": "Italian",
+ "http://lexvo.org/id/iso639-1/de": "German",
+ "http://lexvo.org/id/iso639-1/ar": "Arabic",
+ "http://lexvo.org/id/iso639-1/zh": "Chinese",
+ "http://lexvo.org/id/iso639-1/hi": "Hindi",
+ "http://lexvo.org/id/iso639-1/nl": "Dutch",
+ "http://lexvo.org/id/iso639-1/fi": "Finnish",
+ "http://lexvo.org/id/iso639-1/el": "Greek",
+ "http://lexvo.org/id/iso639-1/ja": "Japanese"
+ }
+ metadataMappings: [ "omv:naturalLanguage", "dc:language", "dcterms:language", "doap:language", "schema:inLanguage" ]
+
+#Generic type
+isOfType:
+ display: "general"
+ label: "Generic type"
+ helpText: "The nature of the content of the ontology."
+ description: [
+ "OMV: The nature of the content of the ontology.",
"DCTERMS: The nature or genre of the resource." ]
- metadataMappings: [ "dc:type", "dct:type" ]
- label: "Generic Type"
- helpText: "The nature of the content of the ontology.<br>Properties taken from <a target="_blank" href="http://wiki.dublincore.org/index.php/NKOS_Vocabularies#KOS_Types_Vocabulary">DCMI KOS type vocabularies</a>"
+ extractedMetadata: true
enforcedValues: {
"http://omv.ontoware.org/2005/05/ontology#ApplicationOntology": "Application Ontology",
"http://omv.ontoware.org/2005/05/ontology#CoreOntology": "Core Ontology",
@@ -403,289 +217,589 @@ isOfType:
"http://omv.ontoware.org/2005/05/ontology#UpperLevelOntology": "Upper Level Ontology",
"http://omv.ontoware.org/2005/05/ontology#Vocabulary": "Vocabulary"
}
-
-usedOntologyEngineeringMethodology:
+ metadataMappings: [ "omv:isOfType", "dc:type", "dcterms:type" ]
+
+#Other identifier
+identifier:
+ display: "general"
+ label: "Other identifier"
+ helpText: "An unambiguous reference to the resource within a given context. Recommended practice is to identify the resource by means of a string conforming to an identification system."
+ example: 'https://doi.org/10.15454/1.4690062322351956E12'
+ description: [
+ "DCTERMS: An unambiguous reference to the resource within a given context. Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs.",
+ "SKOS: A notation is a string of characters such as\"T58.5\"or\"303.4833\"used to uniquely identify a concept within the scope of a given concept scheme.",
+ "ADMS: adms:identifier is used to link any resource to an instance of adms:Identifier which is its range. N.B. it is not appropriate to use dcterms:identifer to link to the Identifier class as its range is rdfs:Literal. ADMS uses this to provide any identifier for the Asset.",
+ "SCHEMA: The identifier property represents any kind of identifier for any kind of Thing, such as ISBNs, GTIN codes, UUIDs etc. Schema.org provides dedicated properties for representing many of these, either as textual strings or as URL (URI) links. " ]
extractedMetadata: true
- label: "Engineering methodology"
- description: [ "MOD: A methodology following which an ontology is created",
- "SCHEMA: The publishingPrinciples property indicates (typically via URL) a document describing the editorial principles of an Organization (or individual, e.g. a Person writing a blog) that relate to their activities as a publisher, e.g. ethics or diversity policies.",
- "ADMS: More information about the format in which an Asset Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)." ]
- metadataMappings: [ "mod:methodologyUsed", "adms:representationTechnique", "schema:publishingPrinciples" ]
- display: "methodology"
- helpText: "Information about the method model used to create the ontology"
+ metadataMappings: [ "dc:identifier", "dcterms:identifier", "skos:notation", "adms:identifier" ]
+
-usedOntologyEngineeringTool:
+### Licensing
+
+#Access rights => Ontology object (bpm:viewingRestriction)
+
+#License
+hasLicense:
+ display: "licensing"
+ label: "License"
+ helpText: "A legal document giving official permission to do something with the resource. Recommended practice is to identify the license document with a URI. "
+ description: [
+ "DCTERMS: A legal document giving official permission to do something with the resource. Recommended practice is to identify the license document with a URI. If this is not possible or feasible, a literal value that identifies the license may be provided.",
+ "OMV: Underlying license model.",
+ "SCHEMA: A license document that applies to this content, typically indicated by URL.",
+ "CC: A Work has a License.",
+ "DC: Information about rights held in and over the resource." ]
extractedMetadata: true
- label: "Created With"
- description: [ "PAV: The software/tool used by the creator (pav:createdBy) when making the digital resource, for instance a word processor or an annotation tool.",
- "MOD: The tool used for the creation of an ontology.",
- "OMV: Information about the tool used to create the ontology." ]
- metadataMappings: [ "mod:toolUsed", "pav:createdWith", "oboInOwl:auto-generated-by" ]
- helpText: "Information about the tool used to create the ontology"
enforcedValues: {
- "http://protege.stanford.edu": "Protégé",
- "OWL API": "OWL API",
- "http://oboedit.org/": "OBO-Edit",
- "SWOOP": "SWOOP",
- "OntoStudio": "OntoStudio",
- "Altova": "Altova",
- "SemanticWorks": "SemanticWorks",
- "OilEd": "OilEd",
- "IsaViz": "IsaViz",
- "WebODE": "WebODE",
- "OntoBuilder": "OntoBuilder",
- "WSMO Studio": "WSMO Studio",
- "VocBench": "VocBench",
- "TopBraid": "TopBraid",
- "NeOn-Toolkit": "NeOn-Toolkit"
+ "https://creativecommons.org/licenses/by/4.0/": "CC Attribution 4.0 International",
+ "https://creativecommons.org/licenses/by/3.0/": "CC Attribution 3.0",
+ "https://creativecommons.org/publicdomain/zero/1.0/": "CC Public Domain Dedication",
+ "http://www.gnu.org/licenses/gpl-3.0": "GNU General Public License 3.0",
+ "http://www.gnu.org/licenses/gpl-2.0": "GNU General Public License 2.0",
+ "https://opensource.org/licenses/Artistic-2.0": "Open Source Artistic license 2.0",
+ "https://opensource.org/licenses/MIT": "MIT License",
+ "https://opensource.org/licenses/BSD-3-Clause": "BSD 3-Clause License",
+ "http://www.apache.org/licenses/LICENSE-2.0": "Apache License 2.0"
}
+ metadataMappings: [ "dc:rights", "dcterms:rights", "dcterms:license", "cc:license", "schema:license" ]
-useImports:
+#Use guidelines
+useGuidelines:
+ display: "licensing"
+ label: "Use guidelines"
+ helpText: "A related resource which defines how the ontology should be used."
+ description: [
+ "CC: A related resource which defines non-binding use guidelines for the work." ]
extractedMetadata: true
- label: "Imports"
- description: [ "OWL: References another OWL ontology containing definitions, whose meaning is considered to be part of the meaning of the importing ontology.",
- "OMV: References another ontology metadata instance that describes an ontology containing definitions, whose meaning is considered to be part of the meaning of the ontology described by this ontology metadata instance.",
- "DCTERMS: A related resource that is required by the described resource to support its function, delivery, or coherence.",
- "VOAF: Indicates that the subject vocabulary extends the expressivity of the object vocabulary by declaring subsumption relationships, using object vocabulary class as domain or range of a subject vocabulary property, defining local restrictions etc ...." ]
- metadataMappings: [ "owl:imports", "door:imports", "void:vocabulary", "voaf:extends", "dct:requires", "oboInOwl:import" ]
- helpText: "References another ontology metadata instance that describes an ontology containing definitions, whose meaning is considered to be part of the meaning of the ontology described by this ontology metadata instance"
-hasPriorVersion:
+#More permissions
+morePermissions:
+ display: "licensing"
+ label: "More permissions"
+ helpText: "A related resource which describes additional permissions or alternative licenses."
+ description: [
+ "CC: A related resource which describes additional permissions or alternative licenses for a Work which may be available." ]
extractedMetadata: true
- label: "Prior version"
- description: [ "OWL: This identifies the specified ontology as a prior version of the containing ontology.",
- "OMV: Contains a reference to another ontology metadata instance.",
- "DCTERMS: A related resource of which the described resource is a version, edition, or adaptation.",
- "PROV: A revision is a derivation for which the resulting entity is a revised version of some original. The implication here is that the resulting entity contains substantial content from the original.",
- "DOOR: Prior version relation from OWL.",
- "ADMS: A link to the previous version of the Asset" ]
- metadataMappings: [ "owl:priorVersion", "dct:isVersionOf", "door:priorVersion", "prov:wasRevisionOf", "adms:prev", "pav:previousVersion", "pav:hasEarlierVersion" ]
- helpText: "An URI to the prior version of the ontology"
- example: 'http://data.agroportal.lirmm.fr/ontologies/GO/submissions/208'
+
+#Rights holder
+copyrightHolder:
+ display: "licensing"
+ label: "Rights holder"
+ helpText: "The party holding the legal copyright to the ontology."
+ description: [
+ "SCHEMA: The party holding the legal copyright to the CreativeWork.",
+ "DCTERMS: A person or organization owning or managing rights over the resource." ]
+ extractedMetadata: false
+
+### Description
-isBackwardCompatibleWith:
+#Description
+description:
+ display: "description"
+ label: "Description"
+ helpText: "Free text description of an ontology."
+ example: ''
+ description: [
+ "DCTERMS: An account of the resource.",
+ "SCHEMA: A description of the item.",
+ "OMV: Free text description of an ontology.",
+ "DOAP: Plain text description of a project, of 2-4 sentences in length.","RDFS: A human-readable description of a resource." ]
extractedMetadata: true
- label: "Backward compatible"
- description: [ "OWL: This identifies the specified ontology as a prior version of the containing ontology, and further indicates that it is backward compatible with it.",
- "OMV: The ontology metadata instance which describes an ontology that is a compatible prior version of the ontology described by this ontology metadata Instance.",
- "DOOR: The relation of being a compatible new version from owl" ]
- metadataMappings: [ "owl:backwardCompatibleWith", "door:backwardCompatibleWith" ]
- display: "relations"
- helpText: "URI of an ontology that has its prior version compatible with the described ontology"
+ metadataMappings: [ "omv:description", "dc:description", "dcterms:description", "doap:description", "schema:description", "oboInOwl:remark" ]
-isIncompatibleWith:
+#Homepage
+homepage:
+ display: "description"
+ label: "Homepage"
+ helpText: "Ontology homepage."
+ description: [
+ "FOAF: A homepage for some thing.",
+ "MOD: An unambiguous reference to the resource within a given context.",
+ "DOAP: URI of a blog related to a project.",
+ "CC: The URL the creator of a Work would like used when attributing re-use.",
+ "SCHEMA: Indicates a page (or other CreativeWork) for which this thing is the main entity being described." ]
extractedMetadata: true
- label: "Incompatible"
- description: [ "OWL: This indicates that the containing ontology is a later version of the referenced ontology, but is not backward compatible with it.",
- "OMV: The described ontology is a later version of the ontology described by the metadata specified, but is not backward compatible with it. It can be used to explicitly state that ontology cannot upgrade to use the new version without checking whether changes are required." ]
- metadataMappings: [ "owl:incompatibleWith", "door:owlIncompatibleWith" ]
- display: "relations"
- helpText: "URI of an ontology that is a prior version of this ontology, but not compatible"
+ metadataMappings: [ "foaf:homepage", "cc:attributionURL", "mod:homepage", "doap:blog", "schema:mainEntityOfPage" ]
-deprecated:
+#Documentation
+documentation:
+ display: "description"
+ label: "Documentation"
+ helpText: "URL for further documentation."
+ description: [
+ "MOD: A link to the documentation page on a thing.",
+ "DCAT: A Web page that can be navigated to in a Web browser to gain access to the dataset, its distributions and/or additional information.",
+ "OMV: URL for further documentation.",
+ "RDFS: Further information about the subject resource.",
+ "DOAP: URL of Wiki for collaborative discussion of project.",
+ "VANN: A reference to a resource that provides information on how this resource is to be used.",
+ "FOAF: A page or document about this thing." ]
+ extractedMetadata: true
+ metadataMappings: [ "omv:documentation", "rdfs:seeAlso", "foaf:page", "vann:usageNote", "mod:document", "dcat:landingPage", "doap:wiki" ]
+
+#Notes or comments
+notes:
+ display: "description"
+ label: "Notes"
+ helpText: "Additional information about the ontology that is not included somewhere else."
+ description: [
+ "RDFS: A description of the subject resource.",
+ "OMV: Additional information about the ontology that is not included somewhere else (e.g. information that you do not want to include in the documentation).",
+ "ADMS: A description of changes between this version and the previous version of the Asset." ]
extractedMetadata: true
- label: "Deprecated"
- metadataMappings: [ "idot:obsolete" ]
- helpText: "To specify if the ontology IRI is deprecated"
- description: [ "DCAT: An annotation with the owl:deprecated annotation property and the value equal to \"true\"^^xsd:boolean can be used to specify that an IRI is deprecated",
- "OWL: The annotation property that indicates that a given entity has been deprecated.",
- "IDOT: Indicates if the current dataset is obsolete (not provided any more to the public community). Value can either be 'true' or 'false' (xsd:boolean). The statement is usually omitted if 'false'."
- ]
+ metadataMappings: [ "omv:notes", "rdfs:comment", "adms:versionNotes" ]
-versionIRI:
+#Keywords
+keywords:
+ display: "description"
+ label: "Keywords"
+ helpText: "List of keywords related to an ontology."
+ description: [
+ "SCHEMA: Keywords or tags used to describe some item. Multiple textual entries in a keywords list are typically delimited by commas, or by repeating the property.",
+ "OMV: List of keywords related to an ontology.",
+ "DCAT: A keyword or tag describing a resource.",
+ "MOD: A keyword(s) is used to describe the content of an ontology." ]
extractedMetadata: true
- display: "general"
- label: "Version IRI"
- helpText: "Identifies the version IRI of an ontology."
- description: [ "OWL: The property that identifies the version IRI of an ontology" ]
+ metadataMappings: [ "omv:keywords", "mod:keyword", "dcat:keyword", "schema:keywords" ]
-ontologyRelatedTo:
+#Hidden label
+hiddenLabel:
+ display: "description"
+ label: "Hidden label"
+ helpText: "A lexical label for a resource that should be hidden when generating visual displays of the resource, but should still be accessible to free text search operations."
+ description: [
+ "SKOS: A lexical label for a resource that should be hidden when generating visual displays of the resource, but should still be accessible to free text search operations." ]
extractedMetadata: true
- label: "Generally related to"
- description: [ "DCTERMS: A related resource",
- "VOAF: Indicates that the subject vocabulary uses or extends some class or property of the object vocabulary" ]
- metadataMappings: [ "dc:relation", "dct:relation", "voaf:reliesOn" ]
- helpText: "An ontology that uses or extends some class or property of the described ontology"
-
-comesFromTheSameDomain:
+ metadataMappings: [ "skos:hiddenLabel" ]
+
+#Alternative name
+alternative:
+ display: "description"
+ label: "Alternative name"
+ helpText: "An alternative name for the resource. The distinction between titles and alternative titles is application-specific."
+ description: [
+ "DCTERMS: An alternative name for the resource. The distinction between titles and alternative titles is application-specific.",
+ "SKOS: The preferred and alternative labels are useful when generating or creating human-readable representations of a knowledge organization system.",
+ "SCHEMA: An alias for the item. A short label that is used by some communities to refer to a dataset",
+ "IDOT: A short label that is used by some communities to refer to a dataset (see 'preferredPrefix')." ]
extractedMetadata: true
- description: "DOOR: If the two ontologies come from the same domain (without any other details)."
- display: "relations"
- helpText: "Ontologies that come from the same domain"
- label: "From the same domain than"
+ metadataMappings: [ "dcterms:alternative", "skos:altLabel", "idot:alternatePrefix", "schema:alternativeHeadline", "schema:alternateName" ]
+#Abstract
+abstract:
+ display: "description"
+ label: "Abstract"
+ helpText: "A summary or abstrct of the ontology."
+ description: [
+ "DCTERMS: A summary of the resource." ]
+ extractedMetadata: true
+ metadataMappings: [ "dcterms:abstract" ]
-similarTo:
+#Bibliographic reference
+publication:
+ display: "description"
+ label: "Bibliographic reference"
+ helpText: "List of bibliographic references describing the ontology and its applications."
+ description: [
+ "SCHEMA: A citation or reference to another creative work, such as another publication, web page, scholarly article, etc.",
+ "DCTERMS: A bibliographic reference for the resource.",
+ "OMV: List of bibliographic references describing the ontology and its applications.",
+ "FOAF: A document that this thing is the primary topic of.",
+ "CITO: The citing entity cites the cited entity as one that provides an authoritative description or definition of the subject under discussion." ]
extractedMetadata: true
- label: "Similar to"
- description: [ "VOAF: Used to assert that two vocabularies are similar in scope and objectives, independently of the fact that they otherwise refer to each other.",
- "DOOR: Represents the meaning of 'how an ontology overlap/cover parts of the same area of interest of another ontology." ]
- metadataMappings: [ "voaf:similar" ]
- display: "relations"
- helpText: "Vocabularies that are similar in scope and objectives, independently of the fact that they otherwise refer to each other."
+ metadataMappings: [ "omv:reference", "dcterms:bibliographicCitation", "foaf:isPrimaryTopicOf", "schema:citation", "cito:citesAsAuthority" ]
-isAlignedTo:
- extractedMetadata: true
- label: "Has equivalences with"
- description: [ "VOAF: Indicates that the subject vocabulary declares some equivalent classes or properties with the object vocabulary.",
- "DOOR: Links two ontologies if there exists an alignment which covers a substantial part of the vocabulary (i.e., a proportion greater than a threshold).",
- "NKOS: A related resource with which the described resource is aligned." ]
- metadataMappings: [ "voaf:hasEquivalencesWith", "nkos:alignedWith" ]
- helpText: "Ontologies that have an alignment which covers a substantial part of the described ontology"
+### Dates
-explanationEvolution:
+#Creation date
+released:
+ display: "dates"
+ label: "Creation date"
+ helpText: "Date of original (or first) creation of the resource."
+ description: [
+ "DCTERMS:date: A point or period of time associated with an event in the lifecycle of the resource.",
+ "DCTERMS:created: Date of creation of the resource.",
+ "DCTERMS:issued: Date of formal issuance (e.g., publication) of the resource.",
+ "PROV: Generation is the completion of production of a new entity by an activity. This entity did not exist before generation and becomes available for usage after this generation.",
+ "PAV:authoredOn: The date this resource was authored.",
+ "PAV:contributedOn: The date this resource was contributed to.",
+ "PAV:createdOn: The date of creation of the resource representation.",
+ "DOAP: Date when something was created, in YYYY-MM-DD form. e.g. 2004-04-05.",
+ "SCHEMA: The date on which the CreativeWork was created or the item was added to a DataFeed.",
+ "DOAP: Date when something was created, in YYYY-MM-DD form. e.g. 2004-04-05." ]
+ extractedMetadata: true
+ metadataMappings: [ "dcterms:created", "dcterms:date", "dcterms:issued", "doap:created", "mod:creationDate", "oboInOwl:hasDate", "oboInOwl:date", "omv:creationDate", "pav:createdOn", "pav:authoredOn", "pav:contributedOn", "prov:generatedAtTime", "schema:dateCreated"]
+
+#Modification date
+modificationDate:
+ display: "dates"
+ label: "Modification date"
+ helpText: "Date of the last modification made to the ontology."
+ description: [
+ "DCTERMS: Date on which the resource was changed.",
+ "SCHEMA: The date on which the CreativeWork was most recently modified or when the item's entry was modified within a DataFeed.",
+ "OMV: Date of the last modification made to the ontology.",
+ "PAV: The date of the last update of the resource." ]
+ extractedMetadata: true
+ metadataMappings: [ "omv:modificationDate", "dcterms:modified", "schema:dateModified", "pav:lastUpdateOn" ]
+
+#Validity date
+valid:
+ display: "dates"
+ label: "Validity date"
+ helpText: "Date (often a range) of validity of a resource."
+ description: [
+ "DCTERMS: Date (often a range) of validity of a resource.",
+ "SCHEMA: The end date and time of the item.",
+ "PROV: Invalidation is the start of the destruction, cessation, or expiry of an existing entity by an activity. The entity is no longer available for use (or further invalidation) after invalidation. Any generation or usage of an entity precedes its invalidation." ]
extractedMetadata: true
- description: [ "DOOR: Evolution which involves only at the syntactic level.",
- "PROV: An entity that is a specialization of another shares all aspects of the latter, and additionally presents more specific aspects of the same thing as the latter.",
- "VOAF:Indicates that the subject vocabulary defines some subclasses or subproperties of the object vocabulary, or local restrictions on those" ]
- metadataMappings: [ "voaf:specializes", "prov:specializationOf" ]
- display: "relations"
- label: "Specialization of"
- helpText: "If the ontology is a latter version that is semantically equivalent to another ontology."
-
-generalizes:
+ metadataMappings: [ "dcterms:valid", "prov:invaliatedAtTime", "schema:endDate" ]
+
+#Curation date
+curatedOn:
+ display: "dates"
+ label: "Curation date"
+ helpText: "Specifies the date this resource was curated. pav:curatedBy gives the agents that performed the curation."
+ description: [
+ "PAV: Specifies the date this resource was curated. pav:curatedBy gives the agents that performed the curation." ]
extractedMetadata: true
- description: [ "VOAF: Indicates that the subject vocabulary generalizes by some superclasses or super properties the object vocabulary.",
- "PROV: Inverse property of specializationOf." ]
- display: "relations"
- label: "Generalization of"
- helpText: "Vocabulary that is generalized by some superclasses or superproperties by the described ontology"
+ metadataMappings: [ "pav:curatedOn" ]
-hasDisparateModelling:
+#Submission date
+creationDate:
+ display: "dates"
+ label: "Submission date"
+ helpText: "Date of the submission/release in the portal."
+ description: [
+ "DCTERMS: Date of submission of the resource.",
+ "SCHEMA: Date of first broadcast/publication." ]
extractedMetadata: true
- description: "DOOR: Disagreements related to the conceptualization of the ontologies. Two ontologies are considered to have disparate modeling if they represent corresponding entities in different ways, e.g. as an instance in one case and a class in the other.."
- display: "relations"
- label: "Disparate modelling with"
- helpText: "URI of an ontology that is considered to have a different model, because they represent corresponding entities in different ways.<br>e.g. an instance in one case and a class in the other for the same concept"
+ metadataMappings: [ "dcterms:dateSubmitted", "schema:datePublished" ]
-hiddenLabel:
- extractedMetadata: true
- label: "Hidden or past name"
- description: [ "MOD: Hidden or past name",
- "SKOS: A lexical label for a resource that should be hidden when generating visual displays of the resource, but should still be accessible to free text search operations." ]
- helpText: "The hidden labels are useful when a user is interacting with a knowledge organization system via a text-based search function. The user may, for example, enter mis-spelled words when trying to find a relevant concept. If the mis-spelled query can be matched against a hidden label, the user will be able to find the relevant concept, but the hidden label won't otherwise be visible to the user"
+### Persons and organizations
-coverage:
- extractedMetadata: true
- label: "Coverage"
- description: [ "DCTERMS: The spatial or temporal topic of the resource, the spatial applicability of the resource, or the jurisdiction under which the resource is relevant. Spatial topic and spatial applicability may be a named place or a location specified by its geographic coordinates. Temporal topic may be a named period, date, or date range. A jurisdiction may be a named administrative entity or a geographic place to which the resource applies.",
- "SCHEMA: The 'spatial' property can be used in cases when more specific properties (e.g. locationCreated, spatialCoverage, contentLocation) are not known to be appropriate." ]
- metadataMappings: [ "dc:coverage", "schema:spatial" ]
- display: "usage"
- helpText: "The spatial or temporal topic of the ontology, the spatial applicability of the ontology, or the jurisdiction under which the ontology is relevant."
+#Contact
+contact:
+ display: "persons and organizations"
+ label: "Contact"
+ helpText: "The persons who can be contacted to enquire about an ontology. Composed of the contacts name and email."
+ description: [
+ "DCAT: Relevant contact information for the cataloged resource. Use of vCard is recommended."]
+ extractedMetadata: false
+ metadataMappings: [ "dcat:contactPoint" ]
-publisher:
+#Creator
+hasCreator:
+ display: "persons and organizations"
+ label: "Creator"
+ helpText: "Main responsible for the creation of the ontology."
+ description: [
+ "OMV: Main responsible for the creation of the ontology",
+ "DCTERMS: An entity primarily responsible for making the resource",
+ "FOAF: An agent that made this thing",
+ "PROV: Attribution is the ascribing of an entity to an agent.",
+ "PAV:authoredBy: An agent that originated or gave existence to the work that is expressed by the digital resource.",
+ "PAV:cretaedBy: An agent primary responsible for making the digital artifact or resource representation.",
+ "DOAP: Maintainer of a project, a project leader.",
+ "SCHEMA:author: The author of this content or rating.",
+ "SCHEMA:creator: The creator/author of this CreativeWork." ]
extractedMetadata: false
+ metadataMappings: [ "omv:hasCreator", "dc:creator", "dcterms:creator", "foaf:maker", "prov:wasAttributedTo", "doap:maintainer", "pav:authoredBy", "pav:createdBy", "schema:author", "schema:creator" ]
+
+#Contributor
+hasContributor:
+ display: "persons and organizations"
+ label: "Contributor"
+ helpText: "Contributors to the creation of the ontology."
+ description: [
+ "DCTERMS: An entity responsible for making contributions to the resource.",
+ "SCHEMA: A secondary contributor to the CreativeWork or Event.",
+ "OMV: Contributors to the creation of the ontology.",
+ "PAV: The resource was contributed to by the given agent.",
+ "DOAP: Project contributor" ]
+ extractedMetadata: false
+ metadataMappings: [ "omv:hasContributor", "dc:contributor", "dcterms:contributor", "doap:helper", "schema:contributor", "pav:contributedBy" ]
+
+#Curator
+curatedBy:
+ display: "persons and organizations"
+ label: "Curator"
+ helpText: "Specifies an agent specialist responsible for curating/evaluating the ontology."
+ description: [
+ "PAV: Specifies an agent specialist responsible for shaping the expression in an appropriate format. Often the primary agent responsible for ensuring the quality of the representation.",
+ "MOD: An ontology that is evaluated by an agent." ]
+ extractedMetadata: false
+ metadataMappings: [ "mod:evaluatedBy", "pav:curatedBy" ]
+
+#Translator
+translator:
+ display: "persons and organizations"
+ label: "Translator"
+ helpText: "Organization or person who adapts a creative work to different languages."
+ description: [
+ "SCHEMA: Organization or person who adapts a creative work to different languages, regional differences and technical requirements of a target market, or that translates during some event." ]
+ extractedMetadata: false
+ metadataMappings: [ "schema:translator" ]
+
+#Publisher
+publisher:
+ display: "persons and organizations"
label: "Publisher"
- description: [ "DCTERMS: An entity responsible for making the resource available.",
- "SCHEMA: The publisher of creative work.",
- "ADMS: The name of the agency that issued the identifier." ]
- metadataMappings: [ "dc:publisher", "schema:publisher" ]
- display: "license"
helpText: "An entity responsible for making the ontology available."
+ description: [
+ "DCTERMS: An entity responsible for making the resource available.",
+ "SCHEMA: The publisher of creative work.",
+ "ADMS: The name of the agency that issued the identifier." ]
+ extractedMetadata: false
+ metadataMappings: [ "dc:publisher", "dcterms:publisher", "schema:publisher", "adms:schemaAgency" ]
+#Funded or sponsored by
+fundedBy:
+ display: "persons and organizations"
+ label: "Funded or sponsored by"
+ helpText: "An organization funding a project or person."
+ description: [
+ "MOD: An ontology that is sponsored by and developed under a project.",
+ "FOAF: An organization funding a project or person.",
+ "SCHEMA: The organization on whose behalf the creator was working." ]
+ extractedMetadata: false
+ metadataMappings: [ "foaf:fundedBy", "mod:sponsoredBy", "schema:sourceOrganization" ]
+
+#Endorsed by
+endorsedBy:
+ display: "persons and organizations"
+ label: "Endorsed by"
+ helpText: "The parties that have expressed support or approval to this ontology."
+ description: [
+ "MOD: An ontology endorsed by an agent.",
+ "OMV: The parties that have expressed support or approval to this ontology." ]
+ extractedMetadata: false
+ metadataMappings: [ "omv:endorsedBy", "mod:endorsedBy" ]
+
+### Community
-identifier:
- extractedMetadata: true
- label: "Other identifier"
- description: [ "DCTERMS: An unambiguous reference to the resource within a given context. Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs.",
- "SKOS: A notation is a string of characters such as\"T58.5\"or\"303.4833\"used to uniquely identify a concept within the scope of a given concept scheme.",
- "ADMS: adms:identifier is used to link any resource to an instance of adms:Identifier which is its range. N.B. it is not appropriate to use dcterms:identifer to link to the Identifier class as its range is rdfs:Literal. ADMS uses this to provide any identifier for the Asset.",
- "SCHEMA: The identifier property represents any kind of identifier for any kind of Thing, such as ISBNs, GTIN codes, UUIDs etc. Schema.org provides dedicated properties for representing many of these, either as textual strings or as URL (URI) links. " ]
- metadataMappings: [ "dc:identifier", "skos:notation", "adms:identifier" ]
- helpText: "An unambiguous reference to the ontology. Use the ontology URI if not provided in the ontology metadata."
- example: 'https://doi.org/10.15454/1.4690062322351956E12'
+#User notes or reviews => Ontology object (bpm:notes)
+#Evaluation => Ontology object (bpm:reviews)
+#Group => Ontology object (bpm:group)
+#Used in project => Ontology object (bpm:project)
-source:
+#Target audience
+audience:
+ display: "community"
+ label: "Audience"
+ helpText: "A set of users/agents for whom the ontology is intended or useful."
+ description: [
+ "DCTERMS: a class of entity for whom the resource is intended or useful.",
+ "DOAP: Description of target user base.",
+ "SCHEMA: An intended audience, i.e. a group for whom something was created." ]
extractedMetadata: true
- description: [ "CTERMS: A related resource from which the described resource is derived",
- "SCHEMA: A resource from which this work is derived or from which it is a modification or adaptation.",
- "PROV: A derivation is a transformation of an entity into another, an update of an entity resulting in a new one, or the construction of a new entity based on a pre-existing entity.",
- "PROV: Influence is the capacity of an entity, activity, or agent to have an effect on the character, development, or behavior of another by means of usage, start, end, generation, invalidation, communication, derivation, attribution, association, or delegation.",
- "PAV: Derived from a different resource.",
- "NKOS: A resource used as the source for a derivative resource." ]
- display: "links"
- label: "Source"
- metadataMappings: [ "dc:source", "prov:wasInfluencedBy", "prov:wasDerivedFrom", "pav:derivedFrom", "schema:isBasedOn", "nkos:basedOn", "mod:sourceOntology" ]
- helpText: "A related resource from which the described resource is derived."
+ metadataMappings: [ "dcterms:audience", "doap:audience", "schema:audience" ]
-abstract:
+#Analytics => Ontology object (bpm:analytics)
+
+#Repository
+repository:
+ display: "community"
+ label: "Repository"
+ helpText: "Ontology source code repository."
+ example: 'https://github.com/Planteome/plant-trait-ontology'
+ description: [
+ "DOAP: Source code repository." ]
extractedMetadata: true
- label: "Abstract"
- description: "DCTERMS: A summary of the resource"
- helpText: "A summary of the ontology"
+ metadataMappings: [ "doap:repository" ]
-alternative:
+#Bug database
+bugDatabase:
+ display: "community"
+ label: "Bug database"
+ helpText: "Bug tracker for a project."
+ example: 'https://github.com/Planteome/plant-trait-ontology/issues'
+ description: [
+ "DOAP: Bug tracker for a project." ]
+ extractedMetadata: true
+ metadataMappings: [ "doap:bug-database" ]
+
+#Mailing list
+mailingList:
+ display: "community"
+ label: "Mailing list"
+ helpText: "Ontology support mailing list or email address."
+ description: [
+ "DOAP: Mailing list home page or email address." ]
extractedMetadata: true
- description: [ "DCTERMS: An alternative name for the resource. The distinction between titles and alternative titles is application-specific.",
- "SKOS: The preferred and alternative labels are useful when generating or creating human-readable representations of a knowledge organization system.",
- "SCHEMA: An alias for the item. A short label that is used by some communities to refer to a dataset",
- "IDOT: A short label that is used by some communities to refer to a dataset (see 'preferredPrefix')." ]
- label: "Alternative name"
- metadataMappings: [ "skos:altLabel", "idot:alternatePrefix", "schema:alternativeHeadline", "schema:alternateName" ]
- helpText: "An alternative title for the ontology"
+ metadataMappings: [ "doap:mailing-list" ]
-hasPart:
+#To Do List
+toDoList:
+ display: "community"
+ label: "To do list"
+ helpText: "Describes future tasks planned by a resource curator."
+ description: [
+ "VOAF: Describes future tasks planned by a resource curator. This property is primarily intended to be used for vocabularies or datasets, but the domain is left open, it can be used for any resource. Use iCalendar Vtodo class and its properties to further describe the task calendar, priorities etc." ]
extractedMetadata: true
- label: "Has part (has views)"
- description: [ "DCTERMS: A related resource that is included either physically or logically in the described resource.",
- "SCHEMA: Indicates an item or CreativeWork that is part of this item, or CreativeWork (in some sense).",
- "ADMS: Links to a sample of an Asset (which is itself an Asset)." ]
- metadataMappings: [ "schema:hasPart", "oboInOwl:hasSubset", "adms:includedAsset" ]
- display: "relations"
- helpText: "A related ontology that is included either physically or logically in the described ontology."
+ metadataMappings: [ "mod:toDoList", "voaf:toDoList" ]
-isFormatOf:
+#Award
+award:
+ display: "community"
+ label: "Award"
+ helpText: "An award won by or for this item."
+ description: [
+ "SCHEMA: An award won by or for this item." ]
extractedMetadata: true
- label: "Is format of"
- description: "DCTERMS: A related resource that is substantially the same as the described resource, but in another format"
- display: "links"
- helpText: "URL to the original document that describe this ontology in a not ontological format (i.e.: the OBO original file)"
+ metadataMappings: [ "schema:award" ]
+### Usage
-hasFormat:
+#Known usage
+knownUsage:
+ display: "usage"
+ label: "Known usage"
+ helpText: "The applications where the ontology is being used."
+ example: "Used to annotate phenotypes and patterns of gene expression."
+ description: [
+ "OMV: The applications where the ontology is being used.",
+ "MOD: Type of applications or usage of the ontology." ]
extractedMetadata: true
- label: "Has format"
- description: "DCTERMS: A related resource that is substantially the same as the described resource, but in another format"
- display: "links"
- helpText: "URL to a document that describe this ontology in a not ontological format (i.e.: the OBO original file) generated from this ontology."
+ metadataMappings: [ "mod:knownUsage", "omv:knownUsage" ]
-
-audience:
+#Designed for task
+designedForOntologyTask:
+ display: "usage"
+ label: "Designed for task (as defined by OMV)."
+ helpText: "The purpose or tasks for which the ontology was originally designed."
+ description: [
+ "MOD: The purpose or tasks for which the ontology was originally designed.",
+ "OMV: The purpose for which the ontology was originally designed." ]
extractedMetadata: true
- label: "Audience"
- description: [ "DCTERMS: a class of entity for whom the resource is intended or useful (public visé ou recommandé pour la ressource).",
- "DOAP: Description of target user base.",
- "SCHEMA: An intended audience, i.e. a group for whom something was created" ]
- metadataMappings: [ "doap:audience", "schema:audience" ]
- display: "community"
- helpText: "Description of the target user base of the ontology."
+ enforcedValues: {
+ "http://omv.ontoware.org/2005/05/ontology#AnnotationTask": "Annotation Task",
+ "http://omv.ontoware.org/2005/05/ontology#ConfigurationTask": "Configuration Task",
+ "http://omv.ontoware.org/2005/05/ontology#FilteringTask": "Filtering Task",
+ "http://omv.ontoware.org/2005/05/ontology#IndexingTask": "Indexing Task",
+ "http://omv.ontoware.org/2005/05/ontology#IntegrationTask": "Integration Task",
+ "http://omv.ontoware.org/2005/05/ontology#MatchingTask": "Matching Task",
+ "http://omv.ontoware.org/2005/05/ontology#MediationTask": "Mediation Task",
+ "http://omv.ontoware.org/2005/05/ontology#PersonalizationTask": "Personalization Task",
+ "http://omv.ontoware.org/2005/05/ontology#QueryFormulationTask": "Query Formulation Task",
+ "http://omv.ontoware.org/2005/05/ontology#QueryRewritingTask": "Query Rewriting Task",
+ "http://omv.ontoware.org/2005/05/ontology#SearchTask": "Search Task"
+ }
+ metadataMappings: [ "omv:designedForOntologyTask", "mod:designedForTask" ]
-valid:
+#Subject
+hasDomain:
+ display: "usage"
+ label: "Subject"
+ helpText: "The topics of the ontology."
+ example: ''
+ description: [
+ "DCTERMS: The topic of the resource.",
+ "SCHEMA: The subject matter of the content.",
+ "FOAF: A topic of some page or document.",
+ "OMV: Typically, the domain can refer to established topic hierarchies such as the general purpose topic hierarchy (DMOZ or the domain specific topic hierarchy ACM for the computer science domain.",
+ "DCAT: A main category of the resource." ]
+ extractedMetadata: true
+ metadataMappings: [ "omv:hasDomain", "dc:subject", "dcterms:subject", "foaf:topic", "dcat:theme", "schema:about" ]
+
+#Coverage
+coverage:
+ display: "usage"
+ label: "Coverage"
+ helpText: "The spatial or temporal topic of the ontology, the spatial applicability of the ontology, or the jurisdiction under which the ontology is relevant."
+ description: [
+ "DCTERMS: The spatial or temporal topic of the resource, the spatial applicability of the resource, or the jurisdiction under which the resource is relevant. Spatial topic and spatial applicability may be a named place or a location specified by its geographic coordinates. Temporal topic may be a named period, date, or date range. A jurisdiction may be a named administrative entity or a geographic place to which the resource applies.",
+ "SCHEMA: The 'spatial' property can be used in cases when more specific properties (e.g. locationCreated, spatialCoverage, contentLocation) are not known to be appropriate." ]
extractedMetadata: true
- description: [ "DCTERMS: Date (often a range) of validity of a resource.",
- "SCHEMA: The end date and time of the item.",
- "PROV: Invalidation is the start of the destruction, cessation, or expiry of an existing entity by an activity. The entity is no longer available for use (or further invalidation) after invalidation. Any generation or usage of an entity precedes its invalidation." ]
- label: "Valid until"
- metadataMappings: [ "prov:invaliatedAtTime", "schema:endDate" ]
- display: "dates"
- helpText: "Date (often a range) of validity of the ontology."
+ metadataMappings: [ "dc:coverage", "dcterms:coverage", "schema:spatial" ]
+
+#Example of use
+example:
+ display: "usage"
+ label: "Example of use"
+ helpText: "A reference to a resource that provides an example of how this ontology can be used."
+ description: [
+ "VANN: A reference to a resource that provides an example of how this resource can be used.",
+ "SCHEMA: Example/instance/realization/derivation of the concept of this creative work. eg. The paperback edition, first edition, or eBook." ]
+ extractedMetadata: true
+ metadataMappings: [ "vann:example", "schema:workExample" ]
+
+### Methodology and provenance
-accrualMethod:
+#Knowledge representation paradigm
+conformsToKnowledgeRepresentationParadigm:
+ display: "methodology"
+ label: "Knowledge representation paradigm"
+ helpText: "OMV: Information about the paradigm model used to create the ontology."
+ example: ''
+ description: [
+ "MOD: A representation formalism that is followed to describe knowledge in an ontology. Example includes description logics, first order logic, etc.",
+ "OMV: Information about the paradigm model used to create the ontology.",
+ "DCTERMS: An established standard to which the described resource conforms." ]
+ extractedMetadata: true
+ metadataMappings: [ "omv:conformsToKnowledgeRepresentationParadigm", "mod:KnowledgeRepresentationFormalism", "dcterms:conformsTo" ]
+
+#Engineering methodology
+usedOntologyEngineeringMethodology:
+ display: "methodology"
+ label: "Engineering methodology"
+ helpText: "Information about the method model used to create the ontology."
+ description: [
+ "MOD: A methodology following which an ontology is created.",
+ "OMV: Information about the method model used to create the ontology.",
+ "SCHEMA: The publishingPrinciples property indicates (typically via URL) a document describing the editorial principles of an Organization (or individual, e.g. a Person writing a blog) that relate to their activities as a publisher, e.g. ethics or diversity policies.",
+ "ADMS: More information about the format in which an Asset Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)." ]
+ extractedMetadata: true
+ metadataMappings: [ "omv:usedOntologyEngineeringMethodology", "mod:methodologyUsed", "adms:representationTechnique", "schema:publishingPrinciples" ]
+
+#Created with
+usedOntologyEngineeringTool:
+ display: "methodology"
+ label: "Created with"
+ helpText: "Information about the tool used to create the ontology."
+ description: [
+ "PAV: The software/tool used by the creator (pav:createdBy) when making the digital resource, for instance a word processor or an annotation tool.",
+ "MOD: The tool used for the creation of an ontology.",
+ "OMV: Information about the tool used to create the ontology." ]
extractedMetadata: true
+ enforcedValues: [
+ "Protégé",
+ "OWL API",
+ "OBO-Edit",
+ "SWOOP",
+ "OntoStudio",
+ "Altova",
+ "OilEd",
+ "IsaViz",
+ "WebODE",
+ "OntoBuilder",
+ "WSMO Studio",
+ "VocBench",
+ "TopBraid",
+ "NeOn-Toolkit",
+ "Another tool" ]
+ metadataMappings: [ "omv:usedOntologyEngineeringTool", "mod:toolUsed", "pav:createdWith", "oboInOwl:auto-generated-by" ]
+
+#Accrual method
+accrualMethod:
+ display: "methodology"
label: "Accrual method"
- description: "DCTERMS: The method by which items are added to a collection. May use a value from the Collection Description Accrual Method Vocabulary"
- display: "methodology"
helpText: "The method by which items are added to the ontology."
+ example: 'We take term request via GitHub issues.'
+ description: [
+ "DCTERMS: The method by which items are added to a collection. May use a value from the Collection Description Accrual Method Vocabulary." ]
+ extractedMetadata: true
+ metadataMappings: [ "dcterms:accrualMethod" ]
-
+#Accrual periodicity
accrualPeriodicity:
- extractedMetadata: true
+ display: "methodology"
label: "Accrual periodicity"
- description: "DCTERMS: The frequency with which items are added to a collection"
- display: "methodology"
- metadataMappings: [ "nkos:updateFrequency" ]
- helpText: "The frequency with which items are added to the ontology."
+ helpText: "The frequency with which items are added to the ontology (as defined by CLD)."
+ description: [
+ "DCTERMS: The frequency with which items are added to a collection.",
+ "NKOS: The period in which a KOS is typically updated." ]
+ extractedMetadata: true
enforcedValues: {
"http://purl.org/cld/freq/triennial": "Triennial",
"http://purl.org/cld/freq/biennial": "Biennial",
@@ -703,453 +817,728 @@ accrualPeriodicity:
"http://purl.org/cld/freq/daily": "Daily",
"http://purl.org/cld/freq/continuous": "Continuous",
"http://purl.org/cld/freq/irregular": "Irregular",
-
}
+ metadataMappings: [ "dcterms:accrualPeriodicity", "nkos:updateFrequency" ]
-
+#Accrual policy
accrualPolicy:
- extractedMetadata: true
+ display: "methodology"
label: "Accrual policy"
- description: "DCTERMS: The policy governing the addition of items to a collection"
- display: "methodology"
helpText: "The policy governing the addition of items to the ontology."
-
-endpoint:
+ example: 'Term proposals are review by an expert committee.'
+ description: [
+ "DCTERMS: The policy governing the addition of items to a collection." ]
extractedMetadata: true
- label: "SPARQL endpoint"
- description: [ "SD: Relates an instance of sd:Service to a SPARQL endpoint that implements the SPARQL Protocol service for the service. The object of the sd:endpoint property is an IRI.",
- "VOID: A SPARQL protocol endpoint that allows SPARQL query access to a void:Dataset." ]
- metadataMappings: [ "void:sparqlEndpoint" ]
- display: "content"
-
-
-dataDump:
+ metadataMappings: [ "dcterms:accrualPolicy" ]
+
+#Competency question
+competencyQuestion:
+ display: "methodology"
+ label: "Competency question"
+ helpText: "A set of questions made to build an ontology at the design time."
+ description: [
+ "MOD: A set of questions made to build an ontology at the design time." ]
extractedMetadata: true
- label: "Download URL"
- description: [ "DCAT: The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dcterms:format and/or dcat:mediaType.",
- "VOID: An RDF dump, partial or complete, of a void:Dataset.",
- "DOAP: Mirror of software download web page.",
- "SCHEMA: A downloadable form of this dataset, at a specific location, in a specific format." ]
- metadataMappings: [ "schema:distribution", "doap:download-mirror" ]
- display: "content"
+ metadataMappings: [ "mod:competencyQuestion" ]
-csvDump:
- display: "content"
- label: "CSV dump"
+#Sample queries => Not implemented
-openSearchDescription:
+#Was generated by
+wasGeneratedBy:
+ display: "methodology"
+ label: "Was generated by"
+ helpText: "Generation is the completion of production of a new ontology by an activity."
+ example: 'Generated by the workflow described ...'
+ description: [
+ "PROV: Generation is the completion of production of a new entity by an activity. This entity did not exist before generation and becomes available for usage after this generation." ]
extractedMetadata: true
- label: "Free-text search endpoint"
- description: "VOID: An OpenSearch description document for a free-text search service over a void:Dataset. "
- metadataMappings: [ "doap:service-endpoint" ]
- display: "content"
+ metadataMappings: [ "prov:wasGeneratedBy" ]
-uriLookupEndpoint:
+#Was invalidated by
+wasInvalidatedBy:
+ display: "methodology"
+ label: "Was invalidated by"
+ helpText: "Invalidation is the start of the destruction, cessation, or expiry of an existing ontology by an activity."
+ example: 'Invalidated by the production of ...'
+ description: [
+ "PROV: Invalidation is the start of the destruction, cessation, or expiry of an existing entity by an activity. The entity is no longer available for use (or further invalidation) after invalidation. Any generation or usage of an entity precedes its invalidation." ]
extractedMetadata: true
- description: "VOID: A protocol endpoint for simple URI lookups for a void:Dataset."
- display: "content"
- label: "URI Lookup Endpoint"
- helpText: "A protocol endpoint for simple URI lookups for the ontology."
+ metadataMappings: [ "prov:wasInvalidatedBy" ]
-uriRegexPattern:
- extractedMetadata: true
- description: "VOID: A protocol endpoint for simple URI lookups for a void:Dataset."
- metadataMappings: [ "idot:identifierPattern" ]
- display: "content"
- label: "URI Regex Pattern"
- helpText: "A regular expression that matches the URIs of the ontology entities."
+### Object description properties
-depiction:
+#Object preferred label property
+prefLabelProperty:
+ display: "object description properties"
+ label: "Object preferred label property"
+ helpText: "Property used to specify objects preferred label."
+ description: [
+ "MOD: Property used to specify objects preferred label." ]
extractedMetadata: true
- label: "Depiction"
- description: [ "FOAF: A depiction of something.",
- "DOAP: Web page with screenshots of project. An image of the item. SCHEMA: An image of the item. This can be a URL or a fully described ImageObject." ]
- metadataMappings: [ "doap:screenshots", "schema:image" ]
- display: "images"
- helpText: "The URL of an image representing the ontology."
+ enforcedValues: {
+ "http://www.w3.org/2004/02/skos/core#prefLabel": "skos:prefLabel",
+ "http://www.w3.org/2000/01/rdf-schema#label": "rdfs:label",
+ "http://schema.org/name": "schema:name",
+ "http://xmlns.com/foaf/0.1/name": "foaf:name",
+ "http://purl.org/dc/terms/title": "dcterms:title",
+ "http://purl.org/dc/elements/1.1/title": "dc:title"
+ }
+ metadataMappings: [ "mod:prefLabelProperty" ]
-logo:
+#Object synonym property
+synonymProperty:
+ display: "object description properties"
+ label: "Object synonym property"
+ helpText: "Property used to specify objects synonyms."
+ description: [
+ "MOD: Property used to specify objects synonyms." ]
extractedMetadata: true
- label: "Logo"
- description: [ "FOAF: A logo representing something.",
- "SCHEMA: An associated logo." ]
- metadataMappings: [ "schema:logo" ]
- display: "images"
- helpText: "The URL of the ontology logo."
+ enforcedValues: {
+ "http://www.w3.org/2004/02/skos/core#altLabel ": "skos:altLabel",
+ "http://purl.org/dc/terms/alternative ": "dcterms:alternative",
+ "http://schema.org/alternateName": "schema:alternativeName",
+ "http://www.geneontology.org/formats/oboInOwl#hasSynonym": "oboInOwl:hasSynonym",
+ "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym": "oboInOwl:hasExactSynonym",
+ "http://www.geneontology.org/formats/oboInOwl#hasNarrowSynonym": "oboInOwl:hasNarrowSynonym",
+ "http://www.geneontology.org/formats/oboInOwl#hasBroadSynonym": "oboInOwl:hasBroadSynonym",
+ "http://www.geneontology.org/formats/oboInOwl#hasRelatedSynonym": "oboInOwl:hasRelatedSynonym"
+ }
+ metadataMappings: [ "mod:synonymProperty" ]
-competencyQuestion:
+#Object definition property
+definitionProperty:
+ display: "object description properties"
+ label: "Object definition property"
+ helpText: "Property used to specify objects definition."
+ description: [
+ "MOD: Property used to specify objects definition." ]
+ extractedMetadata: true
+ enforcedValues: {
+ "http://www.w3.org/2004/02/skos/core#definition": "skos:definition",
+ "http://www.w3.org/2000/01/rdf-schema#comment": "rdfs:comment",
+ "http://purl.org/dc/terms/description ": "dcterms:description",
+ "http://purl.org/dc/elements/1.1/description": "dc:description",
+ "http://schema.org/description ": "schema:decription",
+ "http://www.geneontology.org/formats/oboInOwl#hasDefinition": "oboInOwl:hasDefinition"
+ }
+ metadataMappings: [ "mod:definitionProperty" ]
+
+#Object author property
+authorProperty:
+ display: "object description properties"
+ label: "Object author property"
+ helpText: "Property used to specify object author."
+ description: [
+ "MOD: Property used to specify object author." ]
extractedMetadata: true
- label: "Competency question"
- description: [ "FOAF: A logo representing something.",
- "SCHEMA: An associated logo." ]
- display: "methodology"
- helpText: "A set of questions made to build an ontology at the design time."
-
+ enforcedValues: {
+ "http://purl.org/dc/elements/1.1/creator": "dc:creator",
+ "http://purl.org/dc/terms/creator": "dcterms:creator",
+ "http://schema.org/author": "schema:author",
+ "http://www.w3.org/ns/prov#wasAttributedTo": "prov:wasAttributedTo",
+ "http://purl.org/pav/authoredBy": "pav:authoredBy",
+ "http://purl.org/pav/createdBy": "pav:createdBy",
+ "http://xmlns.com/foaf/0.1/maker": "foaf:maker"
+ }
+ metadataMappings: [ "mod:authorProperty" ]
-usedBy:
+#Object obsolete property
+obsoleteProperty:
+ display: "object description properties"
+ label: "Object obsolete property"
+ helpText: "Property used to specify obsolete objects."
+ description: [
+ "MOD: Property used to specify obsolete objects." ]
extractedMetadata: true
- label: "Used by"
- description: [ "VOAF: Indicates that the subject vocabulary is used by the object vocabulary.",
- "NKOS: Agent using the described KOS" ]
- display: "relations"
- metadataMappings: [ "nkos:usedBy" ]
- helpText: "Ontologies that use the described ontology."
+ enforcedValues: {
+ "http://www.w3.org/2002/07/owl#deprecated": "owl:deprecated",
+ "http://identifiers.org/idot/obsolete": "idot:obsolete"
+ }
+ metadataMappings: [ "mod:obsoleteProperty" ]
-metadataVoc:
+#Object creation date property
+createdProperty:
+ display: "object description properties"
+ label: "Object creation date property"
+ helpText: "Property used to specify the date of creation of a class or another object in the ontology."
+ description: [
+ "MOD: Property used to specify the date of creation of a class or another object in the ontology." ]
extractedMetadata: true
- description: [ "VOAF: Indicates that the subject vocabulary uses the object vocabulary in metadata at vocabulary or element level",
- "SCHEMA: Indicates (by URL or string) a particular version of a schema used in some CreativeWork.",
- "ADMS: A schema according to which the Asset Repository can provide data about its content, e.g. ADMS.",
- "MOD: A vocabulary(ies) that is used and/or referred to create the current ontology." ]
- display: "content"
- label: "Metadata vocabulary used"
- metadataMappings: [ "mod:vocabularyUsed", "adms:supportedSchema", "schema:schemaVersion" ]
- helpText: "Vocabularies that are used and/or referred to create the described ontology."
+ enforcedValues: {
+ "http://purl.org/dc/terms/created ": "dcterms:created",
+ "http://purl.org/dc/terms/issued ": "dcterms:issued",
+ "http://purl.org/dc/terms/date": "dcterms:date",
+ "http://purl.org/dc/elements/1.1/date": "dc:date",
+ "http://purl.org/pav/authoredOn ": "pav:authoredOn",
+ "http://purl.org/pav/contributedOn": "pav:contributedOn",
+ "http://purl.org/pav/createdOn": "pav:createdOn",
+ "http://schema.org/dateCreated": "schema:dateCreated",
+ "http://www.w3.org/ns/prov#generatedAtTime": "prov:generatedAtTime"
+ }
+ metadataMappings: [ "mod:modifiedProperty" ]
-hasDisjunctionsWith:
+#Object modification date property
+modifiedProperty:
+ display: "object description properties"
+ label: "Object modification date property"
+ helpText: "Property used to specify the date of modification of a class or another object in the ontology."
+ description: [
+ "MOD: Property used to specify the date of modification of a class or another object in the ontology." ]
extractedMetadata: true
- label: "Disparate modelling with"
- description: "VOAF: Indicates that the subject vocabulary declares some disjunct classes with the object vocabulary"
- helpText: "Ontology that declares some disjunct classes with the described ontology."
-
-toDoList:
+ enforcedValues: {
+ "http://purl.org/dc/terms/modified ": "dc:modified",
+ "http://purl.org/dc/terms/issued ": "dcterms:issued",
+ "http://purl.org/dc/terms/date": "dcterms:date",
+ "http://purl.org/dc/elements/1.1/date": "dc:date",
+ "http://purl.org/pav/authoredOn ": "pav:authoredOn",
+ "http://purl.org/pav/contributedOn": "pav:contributedOn",
+ "http://purl.org/pav/lastUpdateOn": "pav:lastUpdateOn",
+ "http://schema.org/dateModified": "schema:dateModified"
+ }
+ metadataMappings: [ "mod:createdProperty" ]
+
+#Hierarchy property
+hierarchyProperty:
+ display: "object description properties"
+ label: "Hierarchy property"
+ helpText: "A property that is used to specify the hierarchy."
+ description: [
+ "MOD: A property that is used to specify the hierarchy." ]
extractedMetadata: true
- label: "To do list"
- description: "VOID: Describes future tasks planned by a resource curator. This property is primarily intended to be used for vocabularies or datasets, but the domain is left open, it can be used for any resource. Use iCalendar Vtodo class and its properties to further describe the task calendar, priorities etc"
- display: "community"
- helpText: "Describes future tasks planned by a resource curator."
+ enforcedValues: {
+ "http://www.w3.org/2000/01/rdf-schema#subClassOf": "rdfs:subClassOf",
+ "http://www.w3.org/2004/02/skos/core#broader": "skos:broader"
+ }
+ metadataMappings: [ "mod:hierarchyProperty" ]
+
+### Links
-example:
+#Access URL
+pullLocation:
+ display: "links"
+ label: "Access URL"
+ helpText: "A URL of a resource that gives access to a distribution of the ontology."
+ description: [
+ "DCAT: A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint.",
+ "OMV: The location where the ontology can be found.",
+ "DOAP: Web page from which the project software can be downloaded" ]
extractedMetadata: true
- description: [ "VANN: A reference to a resource that provides an example of how this resource can be used.",
- "SCHEMA: Example/instance/realization/derivation of the concept of this creative work. eg. The paperback edition, first edition, or eBook" ]
- metadataMappings: [ "schema:workExample" ]
- display: "usage"
- helpText: "A reference to a resource that provides an example of how this ontology can be used."
- label: "Example of use"
+ metadataMappings: [ "doap:download-page" , "dcat:accessURL" , "omv:resourceLocator" ]
-preferredNamespaceUri:
+#Is format of
+isFormatOf:
+ display: "links"
+ label: "Is format of"
+ helpText: "A related resource that is substantially the same as the described resource, but in another format."
+ description: [
+ "DCTERMS: A related resource that is substantially the same as the described resource, but in another format." ]
extractedMetadata: true
- label: "Preferred namespace URI"
- description: [ "VANN: The preferred namespace URI to use when using terms from this vocabulary in an XML document.",
- "VOID: A URI that is a common string prefix of all the entity URIs in a void:Dataset" ]
- metadataMappings: [ "void:uriSpace" ]
- helpText: "The preferred namespace URI to use when using terms from this ontology."
- example: 'http://purl.obolibrary.org/obo/ENVO_'
+ metadataMappings: [ "dcterms:isFormatOf" ]
-
-preferredNamespacePrefix:
+#Has format
+hasFormat:
+ display: "links"
+ label: "Has format"
+ helpText: "A related resource that is substantially the same as the pre-existing described resource, but in another format."
+ description: [
+ "DCTERMS: A related resource that is substantially the same as the pre-existing described resource, but in another format." ]
extractedMetadata: true
- label: "Preferred namespace prefix"
- description: [ "VANN: The preferred namespace prefix to use when using terms from this vocabulary in an XML document.",
- "IDOT: Short label that is commonly used to refer to the dataset. Often used to identify the dataset in IRIs for specific items (or records). This may also stand in place of the base IRI of the dataset (e.g. see http://prefix.cc)." ]
- metadataMappings: [ "idot:preferredPrefix", "oboInOwl:default-namespace", "oboInOwl:hasDefaultNamespace" ]
- helpText: "The preferred namespace prefix to use when using terms from this ontology."
+ metadataMappings: [ "dcterms:hasFormat" ]
-
-morePermissions:
- description: "CC: A related resource which describes additional permissions or alternative licenses for a Work which may be available"
+#Download URL
+dataDump:
+ display: "links"
+ label: "Download URL"
+ helpText: "An RDF dump, partial or complete, of an ontology."
+ description: [
+ "DCAT: The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dcterms:format and/or dcat:mediaType.",
+ "VOID: An RDF dump, partial or complete, of a void:Dataset.",
+ "DOAP: Mirror of software download web page.",
+ "SCHEMA: A downloadable form of this dataset, at a specific location, in a specific format." ]
extractedMetadata: true
- label: "More permissions"
- display: "license"
- helpText: "A related resource which describes additional permissions or alternative licenses."
+ metadataMappings: [ "void:dataDump", "schema:distribution", "doap:download-mirror", "dcat:downloadURL" ]
+csvDump:
+ display: "links"
+ label: "CSV dump"
+ helpText: "A CSV dump, partial or complete, of an ontology."
+ extractedMetadata: false
-useGuidelines:
+#URI look endpoint
+uriLookupEndpoint:
+ display: "links"
+ label: "URI Lookup Endpoint"
+ helpText: "A protocol endpoint for simple URI lookups for the ontology."
+ description: [
+ "VOID: A protocol endpoint for simple URI lookups for a void:Dataset." ]
extractedMetadata: true
- label: "Use guidelines"
- description: "CC: A related resource which defines non-binding use guidelines for the work"
- display: "community"
- helpText: "A related resource which defines how the ontology should be used."
-
+ metadataMappings: [ "void:uriLookupEndpoint" ]
-curatedOn:
+#Free-text search endpoint
+openSearchDescription:
+ display: "links"
+ label: "Free-text search endpoint"
+ helpText: "An open search description document for a free-text search service over an ontology."
+ description: [
+ "VOID: An OpenSearch description document for a free-text search service over a void:Dataset." ]
extractedMetadata: true
- label: "Curation date"
- description: "PAV: Specifies the date this resource was curated. pav:curatedBy gives the agents that performed the curation."
- display: "dates"
- helpText: "The date the ontology was curated."
- example: '2022-09-06'
+ metadataMappings: [ "void:openSearchDescription" ]
-repository:
+#Browsing user interface
+ui:
+ display: "links"
+ label: "Browsing user interface"
+ helpText: "The user interface (URL) where the ontology may be browsed or searched."
+ description: [
+ "MOD: The user interface (URL) where the ontology may be browsed or searched." ]
extractedMetadata: true
- label: "Repository"
- description: "DOAP: Source code repository"
- display: "community"
- helpText: "Link to the source code repository."
- example: 'https://github.com/Planteome/plant-trait-ontology'
+ metadataMappings: [ "mod:browsingUI" ]
-bugDatabase:
+#Source
+source:
+ display: "links"
+ label: "Source"
+ helpText: "A related resource from which the described resource is derived."
+ description: [
+ "DCTERMS: A related resource from which the described resource is derived.",
+ "SCHEMA: A resource from which this work is derived or from which it is a modification or adaptation.",
+ "PROV:prov:wasDerivedFrom: A derivation is a transformation of an entity into another, an update of an entity resulting in a new one, or the construction of a new entity based on a pre-existing entity.",
+ "PROV:wasInfluencedBy: Influence is the capacity of an entity, activity, or agent to have an effect on the character, development, or behavior of another by means of usage, start, end, generation, invalidation, communication, derivation, attribution, association, or delegation.",
+ "PAV: Derived from a different resource.",
+ "NKOS: A resource used as the source for a derivative resource.",
+ "MOD: The ontology(ies) referred to while creating the present ontology." ]
extractedMetadata: true
- label: "Bug database"
- description: "DOAP: Bug tracker for a project"
- display: "community"
- helpText: "Link to the bug tracker of the ontology (i.e.: GitHub issues)."
+ metadataMappings: [ "dcterms:source", "mod:sourceOntology", "nkos:basedOn", "pav:derivedFrom", "prov:wasInfluencedBy", "prov:wasDerivedFrom", "schema:isBasedOn" ]
-mailingList:
+#SPARQL endpoint
+endpoint:
+ display: "links"
+ label: "SPARQL endpoint"
+ helpText: "Relates an instance of sd:Service to a SPARQL endpoint that implements the SPARQL Protocol service for the service."
+ description: [
+ "SD: Relates an instance of sd:Service to a SPARQL endpoint that implements the SPARQL Protocol service for the service. The object of the sd:endpoint property is an IRI.",
+ "VOID: A SPARQL protocol endpoint that allows SPARQL query access to a void:Dataset." ]
extractedMetadata: true
- label: "Mailing list"
- description: "DOAP: Mailing list home page or email address "
- display: "community"
- helpText: "Mailing list home page or email address."
-
-exampleIdentifier:
- extractedMetadata: false
- label: "Example of resource"
- description: [ "VOID: Example resource of dataset.",
- "IDOT: An example identifier used by one item (or record) from a dataset." ]
- display: "content"
- helpText: "An example identifier used by one item (or record) from a dataset."
-
-award:
+ metadataMappings: [ "sd:endpoint", "void:sparqlEndpoint" ]
+
+#Indexed or Included in catalog or repository
+includedInDataCatalog:
+ display: "links"
+ label: "Indexed or included in catalog or repository"
+ helpText: "An ontology library or repository which contains this ontology (e.g., OBO Foundry, NCBO BioPortal, EBI-OLS, FAIRsharing, etc.)."
+ description: [
+ "SCHEMA: A data catalog which contains this dataset." ]
extractedMetadata: true
- label: "Award"
- description: "SCHEMA: An award won by or for this item"
- display: "community"
- helpText: "An award won by this ontology."
-
-copyrightHolder:
- extractedMetadata: false
- label: "Rights holder"
- description: [ "SCHEMA: The party holding the legal copyright to the CreativeWork.",
- "DCTERMS: A person or organization owning or managing rights over the resource." ]
- display: "license"
- helpText: "The party holding the legal copyright to the CreativeWork."
- example: 'INRAE (003vg9w96)'
-
-associatedMedia:
+ enforcedValues: {
+ "https://bioportal.bioontology.org": "NCBO BioPortal",
+ "https://agroportal.lirmm.fr": "AgroPortal",
+ "https://bioportal.lirmm.fr": "SIFR BioPortal",
+ "https://ecoportal.lifewatchitaly.eu": "LifeWatch EcoPortal",
+ "https://medportal.bmicc.cn": "MedPortal",
+ "https://matportal.org": "MatPortal",
+ "https://industryportal.enit.fr": "IndustryPortal",
+ "https://earthportal.eu": "EarthPortal",
+ "https://biodivportal.gfbio.org": "BiodivPortal",
+ "https://ebi.ac.uk/ols": "EBI OLS",
+ "https://ontobee.org": "Ontobee",
+ "https://ontohub.org": "OntoHub",
+ "https://aber-owl.net": "AberOWL",
+ "https://lov.linkeddata.es/dataset/lov": "LOV",
+ "https://onki.fi": "ONKI Ontology Library Service",
+ "https://mmisw.org": "MMI ORR",
+ "https://cor.esipfed.org": "ESIP COR",
+ "https://hetop.eu": "CISMeF HeTOP",
+ "https://finto.fi": "FINTO",
+ "https://vocabs.ardc.edu.au": "ANDC RVA" ,
+ "https://vocab.nerc.ac.uk": "NVS" ,
+ "https://terminologies.gfbio.org": "GFBIO TS",
+ "https://loterre.fr": "Loterre",
+ "https://datalab.review.fao.org/datalab/caliper": "Caliper",
+ "https://cropontology.org": "Crop Ontology Curation Tool",
+ "https://planteome.org": "Planteome",
+ "https://obofoundry.org": "OBO Foundry",
+ "https://vest.agrisemantics.org": "Agrisemantics",
+ "https://fairsharing.org": "FAIRsharing",
+ "https://thezfiles.co.za/ROMULUS": "ROMULUS",
+ "https://daml.org/ontologies": "DAML Ontology Library",
+ "https://stl.mie.utoronto.ca/colore": "Colore",
+ "https://bartoc.org": "BARTOC",
+ "https://taxobank.org": "TaxoBank",
+ "https://linkeddata.ge.imati.cnr.it": "LusTRE",
+ "https://lov4iot.appspot.com": "LOV4IoT",
+ "https://vocab.linkeddata.es": "VOCAB OEG",
+ "https://liveschema.eu": "LiveSchema",
+ "https://protegewiki.stanford.edu/wiki/Protege_Ontology_Library": "Protege Ontology Library"
+ }
+ metadataMappings: [ "schema:includedInDataCatalog" ]
+
+### Relation
+
+#Imports
+useImports:
+ display: "relations"
+ label: "Imports"
+ helpText: "References another ontology metadata instance that describes an ontology containing definitions, whose meaning is considered to be part of the meaning of the ontology described by this ontology metadata instance."
+ description: [
+ "OWL: References another OWL ontology containing definitions, whose meaning is considered to be part of the meaning of the importing ontology.",
+ "OMV: References another ontology metadata instance that describes an ontology containing definitions, whose meaning is considered to be part of the meaning of the ontology described by this ontology metadata instance.",
+ "DCTERMS: A related resource that is required by the described resource to support its function, delivery, or coherence.",
+ "VOAF: Indicates that the subject vocabulary extends the expressivity of the object vocabulary by declaring subsumption relationships, using object vocabulary class as domain or range of a subject vocabulary property, defining local restrictions etc." ]
extractedMetadata: true
- label: "Associated media"
- description: "SCHEMA: A media object that encodes this CreativeWork. This property is a synonym for encoding"
- display: "images"
- helpText: "A media object that encodes this ontology. This property is a synonym for encoding."
-
+ metadataMappings: [ "omv:useImports", "owl:imports", "voaf:extends", "dcterms:requires", "oboInOwl:import" ]
-workTranslation:
- extractedMetadata: true
- description: [ "MOD: A pointer to the translated ontology(ies) for an existing ontology. ",
- "SCHEMA: A work that is a translation of the content of this work.",
- "ADMS: Links Assets that are translations of each other." ]
+#Prior version
+hasPriorVersion:
display: "relations"
- helpText: "A ontology that is a translation of the content of this ontology."
- metadataMappings: [ "mod:translation " ]
- label: "Translated from"
+ label: "Prior version"
+ helpText: "Contains a reference to another ontology metadata instance."
+ description: [
+ "OWL: c This identifies the specified ontology as a prior version of the containing ontology.",
+ "OMV: Contains a reference to another ontology metadata instance.",
+ "DCTERMS: A related resource of which the described resource is a version, edition, or adaptation.",
+ "PROV: A revision is a derivation for which the resulting entity is a revised version of some original. The implication here is that the resulting entity contains substantial content from the original.",
+ "DOOR: Prior version relation from OWL.",
+ "ADMS: A link to the previous version of the Asset." ]
+ extractedMetadata: true
+ metadataMappings: [ "omv:hasPriorVersion", "owl:priorVersion", "dcterms:isVersionOf", "door:priorVersion", "prov:wasRevisionOf", "adms:prev" ]
+
+#Is part of (view of) => Ontology object (bpm:viewOf)
+#Has part (has views)
+hasPart:
+ display: "relations"
+ label: "Has part (has views)"
+ helpText: "A related resource that is included either physically or logically in the described resource."
+ description: [
+ "DCTERMS: A related resource that is included either physically or logically in the described resource.",
+ "SCHEMA: Indicates an item or CreativeWork that is part of this item, or CreativeWork (in some sense).",
+ "ADMS: Links to a sample of an Asset (which is itself an Asset)." ]
+ extractedMetadata: true
+ metadataMappings: [ "dcterms:hasPart", "schema:hasPart", "oboInOwl:hasSubset", "adms:sample" ]
-translationOfWork:
+#Specializes
+explanationEvolution:
+ display: "relations"
+ label: "Specializes"
+ helpText: "Evolution which involves only at the syntactic level."
+ description: [
+ "DOOR: Evolution which involves only at the syntactic level.",
+ "PROV: An entity that is a specialization of another shares all aspects of the latter, and additionally presents more specific aspects of the same thing as the latter.",
+ "VOAF:Indicates that the subject vocabulary defines some subclasses or subproperties of the object vocabulary, or local restrictions on those.",
+ "MOD: An ontology that is a specialization of another and presents more specific aspects." ]
extractedMetadata: true
- description: [ "SCHEMA: The work that this work has been translated from.",
- "ADMS: Links Assets that are translations of each other." ]
- metadataMappings: [ "adms:translation" ]
- helpText: "The ontology that this ontology has been translated from."
- label: "Translation of"
+ metadataMappings: [ "mod:specializes", "door:explanationEvolution", "voaf:specializes", "prov:specializationOf" ]
+
+#Generalizes
+generalizes:
display: "relations"
-
-includedInDataCatalog:
+ label: "Generalizes"
+ helpText: "Indicates that the subject vocabulary generalizes by some superclasses or super properties the object vocabulary."
+ description: [
+ "VOAF: Indicates that the subject vocabulary generalizes by some superclasses or super properties the object vocabulary.",
+ "MOD: An ontology that is a generalization of another and presents more generic aspects.",
+ "PROV: Inverse property of specializationOf." ]
extractedMetadata: true
- label: "Indexed or Included in catalog or repository"
- description: "SCHEMA: A data catalog which contains this dataset."
- display: "links"
- helpText: "A data catalog which contains this ontology (i.e.: OBOfoundry, aber-owl, EBI, VEST registry...)."
- enforcedValues: {
- "bioportal.bioontology.org": "NCBO BioPortal",
- "agroportal.lirmm.fr": "AgroPortal",
- "bioportal.lirmm.fr": "SIFR BioPortal",
- "ebi.ac.uk/ols": "EBI OLS",
- "ontobee.org": "Ontobee",
- "ontohub.org": "OntoHub",
- "aber-owl.net": "AberOWL",
- "lov.linkeddata.es/dataset/lov": "LOV",
- "onki.fi": "ONKI Ontology Library Service",
- "mmisw.org": "MMI ORR",
- "cor.esipfed.org": "ESIP COR",
- "ecoportal.lifewatchitaly.eu": "LifeWatch EcoPortal",
- "matportal.org": "MatPortal",
- "medportal.bmicc.cn": "MedPortal",
- "hetop.eu": "CISMeF HeTOP",
- "finto.fi": "FINTO",
- "vocabs.ardc.edu.au": "ANDC RVA" ,
- "vocab.nerc.ac.uk": "NVS" ,
- "terminologies.gfbio.org": "GFBIO TS",
- "loterre.fr": "Loterre",
- "datalab.review.fao.org/datalab/caliper": "Caliper",
- "cropontology.org": "Crop Ontology Curation Tool",
- "planteome.org": "Planteome",
- "obofoundry.org": "OBO Foundry",
- "vest.agrisemantics.org": "Agrisemantics",
- "fairsharing.org": "FAIRsharing",
- "thezfiles.co.za/ROMULUS": "ROMULUS",
- "daml.org/ontologies": "DAML Ontology Library",
- "stl.mie.utoronto.ca/colore": "Colore",
- "bartoc.org": "BARTOC",
- "taxobank.org": "TaxoBank",
- "linkeddata.ge.imati.cnr.it": "LusTRE",
- "lov4iot.appspot.com": "LOV4IoT",
- "vocab.linkeddata.es": "VOCAB OEG",
- "liveschema.eu": "LiveSchema",
- "protegewiki.stanford.edu/wiki/Protege_Ontology_Library": "Protege Ontology Library"
- }
+ metadataMappings: [ "voaf:generalizes", "mod:generalizes", "prov:generalizationOf" ]
-prefLabelProperty:
+#Used by
+usedBy:
+ display: "relations"
+ label: "Used by"
+ helpText: "Indicates that the subject vocabulary is used by the object vocabulary."
+ description: [
+ "VOAF: Indicates that the subject vocabulary is used by the object vocabulary.",
+ "NKOS: Agent using the described KOS.",
+ "MOD: Ontologies that use the described ontology." ]
+ extractedMetadata: true
+ metadataMappings: [ "mod:usedBy", "voaf:usedBy", "nkos:usedBy" ]
+
+#Relies on
+#Relation
+#Generaly related to
+ontologyRelatedTo:
+ display: "relations"
+ label: "Generally related to or relies on"
+ helpText: "An ontology is related to another or relies/uses another one."
+ description: [
+ "DCTERMS: A related resource.",
+ "VOAF: Indicates that the subject vocabulary uses or extends some class or property of the object vocabulary.",
+ "MOD: A general property for different kind of case when a semantic resource relies or reuses another one.",
+ "VOID: A vocabulary that is used in the dataset.",
+ "DOOR: An ontology is related to another if one of the DOOR relations is satisfied." ]
+ extractedMetadata: true
+ metadataMappings: [ "door:ontologyRelatedTo", "dc:relation", "dcterms:relation", "voaf:reliesOn", "mod:reliesOn", "void:vocabulary"]
+
+#Similar to
+similarTo:
+ display: "relations"
+ label: "Similar to"
+ helpText: "Represents the meaning of 'how an ontology overlap/cover parts of the same area of interest of another ontology."
+ description: [
+ "VOAF: Used to assert that two vocabularies are similar in scope and objectives, independently of the fact that they otherwise refer to each other.",
+ "DOOR: Represents the meaning of 'how an ontology overlap/cover parts of the same area of interest of another ontology.",
+ "MOD: Ontologies that are similar or thematically close to the described ontology." ]
+ extractedMetadata: true
+ metadataMappings: [ "voaf:similar", "mod:similar", "door:similarTo" ]
+
+#Comes from the same domain
+comesFromTheSameDomain:
+ display: "relations"
+ label: "Comes from the same domain"
+ helpText: "If the two ontologies come from the same domain (without any other details)."
+ description: [
+ "DOOR: If the two ontologies come from the same domain (without any other details).",
+ "MOD: Ontologies that come from the same domain or discipline than the described ontology, not necessariliy similar or close." ]
+ extractedMetadata: true
+ metadataMappings: [ "mod:comesFromTheSameDomain", "door:comesFromTheSameDomain" ]
+
+#Has equivalences with
+isAlignedTo:
+ display: "relations"
+ label: "Has equivalences with"
+ helpText: "Links two ontologies if there exists an alignment which covers a substantial part of the vocabulary (i.e., a proportion greater than a threshold)."
+ description: [
+ "VOAF: Indicates that the subject vocabulary declares some equivalent classes or properties with the object vocabulary.",
+ "DOOR: Links two ontologies if there exists an alignment which covers a substantial part of the vocabulary (i.e., a proportion greater than a threshold).",
+ "NKOS: A related resource with which the described resource is aligned.",
+ "MOD: Ontologies to which the described ontology is aligned or has equivalences or mappigns with." ]
extractedMetadata: true
- description: "MOD: Property used to specify objects preferred label"
- label: "Object preferred label property"
- display: "Object description properties"
- enforcedValues: {
- "http://www.w3.org/2004/02/skos/core#prefLabel": "prefLabel",
- "http://www.w3.org/2000/01/rdf-schema#label": "label",
- "http://schema.org/name": "name",
- "http://xmlns.com/foaf/0.1/name": "name",
- "http://purl.org/dc/terms/title": "title"
- }
-
-synonymProperty:
+ metadataMappings: [ "door:isAlignedTo", "voaf:hasEquivalencesWith", "nkos:alignedWith", "mod:hasEquivalencesWith"]
+
+#Backward Compatible
+isBackwardCompatibleWith:
+ display: "relations"
+ label: "Backward compatible"
+ helpText: "The ontology metadata instance which describes an ontology that is a compatible prior version of the ontology described by this ontology metadata Instance."
+ description: [
+ "OWL: This identifies the specified ontology as a prior version of the containing ontology, and further indicates that it is backward compatible with it.",
+ "OMV: The ontology metadata instance which describes an ontology that is a compatible prior version of the ontology described by this ontology metadata Instance.",
+ "DOOR: The relation of being a compatible new version from owl." ]
extractedMetadata: true
- description: "MOD: Property used to specify objects synonyms"
- label: "Object synonym property"
- display: "Object description properties"
- enforcedValues: {
- "http://www.w3.org/2004/02/skos/core#altLabel ": "altLabel",
- "http://purl.org/dc/terms/alternative ": "alternative",
- "http://schema.org/alternateName": "alternativeName"
- }
-
-
-definitionProperty:
+ metadataMappings: [ "omv:isBackwardCompatibleWith", "owl:backwardCompatibleWith", "door:backwardCompatibleWith" ]
+
+#Incompatible
+isIncompatibleWith:
+ display: "relations"
+ label: "Incompatible"
+ helpText: "The described ontology is a later version of the ontology described by the metadata specified, but is not backward compatible with it. It can be used to explicitly state that ontology cannot upgrade to use the new version without checking whether changes are required."
+ description: [
+ "OWL: This indicates that the containing ontology is a later version of the referenced ontology, but is not backward compatible with it.",
+ "OMV: The described ontology is a later version of the ontology described by the metadata specified, but is not backward compatible with it. It can be used to explicitly state that ontology cannot upgrade to use the new version without checking whether changes are required." ]
extractedMetadata: true
- description: "MOD: Property used to specify objects' definition"
- label: "Object definition property"
- display: "Object description properties"
- enforcedValues: {
- "http://www.w3.org/2004/02/skos/core#definition": "definition",
- "http://www.w3.org/2000/01/rdf-schema#comment": "comment",
- "http://purl.org/dc/terms/description ": "description",
- "http://schema.org/description ": "decription"
- }
+ metadataMappings: [ "owl:incompatibleWith", "door:owlIncompatibleWith", "omv:isIncompatibleWith" ]
-
-authorProperty:
+#Disparate modelling with
+hasDisparateModelling:
+ display: "relations"
+ label: "Disparate modelling with"
+ helpText: "Disagreements related to the conceptualization of the ontologies."
+ description: [
+ "DOOR: Disagreements related to the conceptualization of the ontologies. Two ontologies are considered to have disparate modeling if they represent corresponding entities in different ways, e.g. as an instance in one case and a class in the other.",
+ "MOD: Ontologies that are considered to have a different model, because they represent corresponding entities in different ways e.g., an instance in one case and a class in the other for the same concept." ]
extractedMetadata: true
- description: "MOD: Property used to specify object's author"
- label: "Object author property"
- display: "Object description properties"
- enforcedValues: {
- "http://purl.org/dc/elements/1.1/creator": "creator",
- "http://schema.org/author": "author",
- "http://www.w3.org/ns/prov#wasAttributedTo": "wasAttributedTo",
- "http://purl.org/pav/authoredBy": "authoredBy",
- "http://purl.org/pav/createdBy": "createdBy",
- "http://xmlns.com/foaf/0.1/maker": "maker"
- }
+ metadataMappings: [ "door:hasDisparateModelling", "mod:hasDisparateModellingWith" ]
-obsoleteProperty:
+#Has disjunctions with
+hasDisjunctionsWith:
+ display: "relations"
+ label: "Has disjunctions with"
+ helpText: "Indicates that the subject vocabulary declares some disjunct classes with the object vocabulary."
+ description: [
+ "VOAF: Indicates that the subject vocabulary declares some disjunct classes with the object vocabulary.",
+ "MOD: Indicates that the subject ontologies declares some disjunct classes with the object ontologies." ]
extractedMetadata: true
- description: "MOD: Property used to specify obsolete objects"
- label: "Object obsolete property"
- display: "Object description properties"
- enforcedValues: {
- "http://www.w3.org/2002/07/owl#owl:deprecated": "deprecated",
- "http://identifiers.org/idot/obsolete": "obsolete"
- }
+ metadataMappings: [ "mod:hasDisjunctionsWith", "voaf:hasDisjunctionsWith" ]
-createdProperty:
+#Translation
+workTranslation:
+ display: "relations"
+ label: "Translation"
+ helpText: "A work that is a translation of the content of this work."
+ description: [
+ "MOD: Ontologies which are translations from the described ontology. ",
+ "SCHEMA: A work that is a translation of the content of this work.",
+ "ADMS: Links Assets that are translations of each other." ]
extractedMetadata: true
- description: "MOD: Property used to specify the date of creation of a class or another object in the ontology."
- label: "Object creation date property"
- display: "Object description properties"
- enforcedValues: {
- "http://purl.org/dc/terms/created ": "created",
- "http://purl.org/dc/terms/issued ": "issued",
- "http://purl.org/dc/terms/date": "date",
- "http://purl.org/pav/authoredOn ": "authoredOn",
- "http://purl.org/pav/contributedOn": "contributedOn",
- "http://purl.org/pav/createdOn": "createdOn",
- "http://www.isibang.ac.in/ns/mod/1.1/creationDate": "creationDate",
- "http://schema.org/dateCreated ": "dateCreated",
- "http://www.w3.org/ns/prov#generatedAtTime": "generatedAtTime"
- }
+ metadataMappings: [ "mod:translation", "schema:workTranslation", "adms:translation" ]
+
+#Translation of
+translationOfWork:
+ display: "relations"
+ label: "Translation of"
+ helpText: "The work that this work has been translated from."
+ description: [
+ "SCHEMA: The work that this work has been translated from.",
+ "ADMS: Links Assets that are translations of each other." ]
+ extractedMetadata: true
+ metadataMappings: [ "adms:translation", "schema:translationOfWork" ]
+
+### Content
-modifiedProperty:
+#Identifier pattern
+uriRegexPattern:
+ display: "content"
+ label: "Identifier pattern"
+ helpText: "A regular expression that matches the URIs of a void:Dataset's entities."
+ example: 'http://purl.obolibrary.org/obo/ENVO_'
+ description: [
+ "VOID: A regular expression that matches the URIs of a void:Dataset's entities.",
+ "IDOT: Regular expression describing alphanumeric strings used to identify items (or records) in a dataset." ]
extractedMetadata: true
- description: "MOD: Property used to specify the date of modification of a class or another object in the ontology."
- label: "Object modification date property"
- display: "Object description properties"
- enforcedValues: {
- "http://purl.org/dc/terms" ,
- "http://schema.org" ,
- "http://purl.org/pav" ,
- "https://omv2.sourceforge.net"
- }
+ metadataMappings: [ "void:uriRegexPattern", "idot:identifierPattern" ]
-hierarchyProperty:
+#Preferred Namespace URI
+preferredNamespaceUri:
+ display: "content"
+ label: "Preferred namespace URI"
+ helpText: "The preferred namespace URI to use when using terms from this vocabulary in an XML document."
+ example: ''
+ description: [
+ "VANN: The preferred namespace URI to use when using terms from this vocabulary in an XML document.",
+ "VOID: A URI that is a common string prefix of all the entity URIs in a void:Dataset." ]
extractedMetadata: true
- description: "MOD: A property that is used to specify the hierarchy"
- label: "Hierarchy property"
- display: "Object description properties"
- enforcedValues: {
- "https://www.w3.org/TR/rdf-schema/#ch_subclassof": "rdfs:subClassOf",
- "http://www.sparna.fr/skos/SKOS-traduction-francais.html#broader": "skos:broader"
- }
+ metadataMappings: [ "void:uriSpace", "vann:preferredNamespaceUri" ]
-pullLocation:
+#Preferred Namespace Prefix
+preferredNamespacePrefix:
+ display: "content"
+ label: "Preferred namespace prefix"
+ helpText: "The preferred namespace prefix to use when using terms from this ontology."
+ example: ''
+ description: [
+ "VANN: The preferred namespace prefix to use when using terms from this vocabulary in an XML document.",
+ "IDOT: Short label that is commonly used to refer to the dataset. Often used to identify the dataset in IRIs for specific items (or records). This may also stand in place of the base IRI of the dataset (e.g. see http://prefix.cc)." ]
extractedMetadata: true
- description: [ "DCAT: A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint.",
- "OMV: The location where the ontology can be found.",
- "DOAP: Web page from which the project software can be downloaded" ]
- metadataMappings: [ "doap:download-page" , "dcat:accessURL" , "omv:resourceLocator" ]
- label: "Access URL"
- display: "Links"
- enforcedValues: {
- "https://www.w3.org/TR/rdf-schema/#ch_subclassof": "rdfs:subClassOf",
- "http://www.sparna.fr/skos/SKOS-traduction-francais.html#broader": "skos:broader"
- }
+ metadataMappings: [ "vann:preferredNamespacePrefix", "idot:preferredPrefix", "oboInOwl:default-namespace", "oboInOwl:hasDefaultNamespace" ]
+#Root of obsolete branch
obsoleteParent:
- extractedMetadata: true
- description: [ "MOD: property used to specify the root of an obsolete branch in the ontology." ]
- metadataMappings: [ "doap:download-page" , "dcat:accessURL" , "omv:resourceLocator" ]
+ display: "content"
label: "Root of obsolete branch"
- display: "Content"
-
-
-maxDepth:
+ helpText: "Property used to specify the root of an obsolete branch in the ontology."
+ description: [
+ "MOD: Property used to specify the root of an obsolete branch in the ontology." ]
extractedMetadata: true
- description: "MOD: Maximum depth of the hierarchy tree (BioPortal definition)."
- metadataMappings: "mod:maxDepth"
- label: "Maximum depth of the hierarchy"
- display: "Metrics"
+ metadataMappings: [ "mod:obsoleteParent" ]
-maxChildCount:
- extractedMetadata: true
- description: "MOD: Maximum number of children per class (BioPortal definition)."
- metadataMappings: "mod:maxChildCount"
- label: "Maximum number of children per class"
- display: "Metrics"
+#Example of resource
+exampleIdentifier:
+ display: "content"
+ label: "Example of resource"
+ helpText: "An example identifier used by one item in the ontology."
+ description: [
+ "VOID: Example resource of dataset.",
+ "IDOT: An example identifier used by one item (or record) from a dataset." ]
+ extractedMetadata: false
+ metadataMappings: [ "void:exampleResource", "idot:exampleIdentifier" ]
-averageChildCount:
- extractedMetadata: true
- description: "MOD: Average number of children per class (BioPortal definition)."
- metadataMappings: "mod:averageChildCount"
- label: "Average number of children per class"
- display: "Metrics"
+#Key classes
+keyClasses:
+ display: "content"
+ label: "Key classes"
+ helpText: "Representative classes in the ontology."
+ description: [
+ "OMV: Representative classes in the ontology.",
+ "FOAF: The primary topic of some page or document.",
+ "SCHEMA: Indicates the primary entity described in some page or other CreativeWork." ]
+ extractedMetadata: false
+ metadataMappings: [ "foaf:primaryTopic", "schema:mainEntity", "omv:keyClasses"]
-classesWithOneChild:
+#Metadata vocabulary used
+metadataVoc:
+ display: "content"
+ label: "Metadata vocabulary used"
+ helpText: "Indicates that the subject vocabulary uses the object vocabulary in metadata at vocabulary or element level."
+ description: [
+ "VOAF: Indicates that the subject vocabulary uses the object vocabulary in metadata at vocabulary or element level.",
+ "SCHEMA: Indicates (by URL or string) a particular version of a schema used in some CreativeWork.",
+ "ADMS: A schema according to which the Asset Repository can provide data about its content, e.g. ADMS.",
+ "MOD: A vocabulary(ies) that is used and/or referred to create the current ontology." ]
+ extractedMetadata: false
+ enforcedValues: {
+ "http://w3id.org/nkos/nkostype#classification_schema": "Classification scheme",
+ "http://www.w3.org/2000/01/rdf-schema#": "RDF Schema (RDFS)",
+ "http://www.w3.org/2002/07/owl#": "OWL 2 Web Ontology Language (OWL)",
+ "http://www.w3.org/2004/02/skos/core#": "Simple Knowledge Organization System (SKOS)",
+ "http://purl.org/dc/elements/1.1/": "Dublin core (DC)",
+ "http://purl.org/dc/terms/": "Dublin core (DCTERMS)",
+ "http://omv.ontoware.org/2005/05/ontology#": "Ontology Metadata Vocabulary (OMV)",
+ "http://www.isibang.ac.in/ns/mod#": "Metadata for Ontology Description and Publication (MOD 1)",
+ "https://w3id.org/mod": "Metadata for Ontology Description and Publication (MOD 2)",
+ "http://kannel.open.ac.uk/ontology#": "Descriptive Ontology of Ontology Relations (DOOR)",
+ "http://purl.org/vocommons/voaf#": "Vocabulary of a Friend (VOAF)",
+ "http://rdfs.org/ns/void#": "Vocabulary of Interlinked Datasets (VOID)",
+ "http://biomodels.net/vocab/idot.rdf#": "Identifiers.org (IDOT)",
+ "http://purl.org/vocab/vann/": "Vocabulary for annotating vocabulary descriptions (VANN)",
+ "http://www.w3.org/ns/dcat#": "Data Catalog Vocabulary (DCAT)",
+ "http://www.w3.org/ns/adms#": "Asset Description Metadata Schema (ADMS)",
+ "http://schema.org/": "Schema.org (SCHEMA)",
+ "http://xmlns.com/foaf/0.1/": "Friend of a Friend Vocabulary (FOAF)",
+ "http://usefulinc.com/ns/doap#": "Description of a Project (DOAP)",
+ "http://creativecommons.org/ns#": "Creative Commons Rights Expression Language (CC)",
+ "http://www.w3.org/ns/prov#": "Provenance Ontology (PROV)",
+ "http://purl.org/pav/": "Provenance, Authoring and Versioning (PAV)",
+ "http://www.geneontology.org/formats/oboInOwl#": "OboInOwl Mappings (OBOINOWL)",
+ "http://www.w3.org/ns/sparql-service-description#": "SPARQL 1.1 Service Description (SD)",
+ "http://w3id.org/nkos#": "Networked Knowledge Organization Systems Dublin Core Application Profile (NKOS)" }
+ metadataMappings: [ "mod:metadataVoc", "mod:vocabularyUsed", "adms:supportedSchema", "schema:schemaVersion", "voaf:metadataVoc"]
+
+#Root resources => Ontology object (roots)
+#Classes partition => Ontology object (classes)
+#Properties partition => Ontology object (properties)
+#Has version => Ontology object (submissions)
+#Changes => Ontology object (diffFilePath)
+
+### Media
+
+#Associated media
+associatedMedia:
+ display: "media"
+ label: "The URL of a media associated to the ontology."
+ helpText: "A media object that encodes this ontology. This property is a synonym for encoding."
+ description: [
+ "SCHEMA: A media object that encodes this CreativeWork. This property is a synonym for encoding." ]
extractedMetadata: true
- description: "MOD: Average number of children per class (BioPortal definition)."
- metadataMappings: "mod:classesWithOneChild"
- label: "Number of classes with a single child"
- display: "Metrics"
+ metadataMappings: [ "schema:associatedMedia" ]
-classesWithMoreThan25Children:
+#Depiction
+depiction:
+ display: "media"
+ label: "Depiction"
+ helpText: "The URL of an image or depiction representing the ontology."
+ description: [
+ "FOAF: A depiction of something.",
+ "DOAP: Web page with screenshots of project. An image of the item. SCHEMA: An image of the item. This can be a URL or a fully described ImageObject.",
+ "SCHEMA: An image of the item. This can be a URL or a fully described ImageObject." ]
extractedMetadata: true
- description: "MOD: Number of classes that have more than 25 direct subclasses (BioPortal definition)."
- metadataMappings: "mod:classesWithMoreThan25Children"
- label: "Number of classes with more than 25 children"
- display: "Metrics"
-
-classesWithNoDefinition:
+ metadataMappings: [ "doap:screenshots", "schema:image" ]
+
+#Logo
+logo:
+ display: "media"
+ label: "Logo"
+ helpText: "The URL of the ontology logo."
+ description: [
+ "FOAF: A logo representing something.",
+ "SCHEMA: An associated logo." ]
extractedMetadata: true
- description: "MOD: Number of classes that have no value for the definition property (BioPortal definition). For ontologies in OBO and RRF formats, the property for definition is part of the language. For OWL ontologies, the authors specify this property as part of the ontology metadata (the default is skos:definition)."
- metadataMappings: "mod:classesWithNoDefinition"
- label: "Number of classes with no definition"
- display: "Metrics"
+ metadataMappings: [ "schema:logo", "foaf:logo"]
+
+### Metrics
+
+#Number of classes => Metrics object (classes)
+#Number of individuals => Metrics object (individuals)
+#Number of properties => Metrics object (properties)
+#Number of object properties => Not implemented
+#Number of data properties => Not implemented
+#Number of axioms or triples => Not implemented
+#Number of labels => Not implemented
+#Number of deprecated objects => Not implemented
+#Byte size => Not implemented
+#Maximum depth of the hierarchy => Metrics object (maxDepth)
+#Maximum number of children per class => Metrics object (maxChildCount)
+#Average number of children per class => Metrics object (averageChildCount)
+#Number of classes with a single child => Metrics object (classesWithOneChild)
+#Number of classes with more than 25 children => Metrics object (classesWithMoreThan25Children)
+#Number of classes with no definition => Metrics object (classesWithNoDefinition)
+
diff --git a/config/solr/property_search/enumsconfig.xml b/config/solr/property_search/enumsconfig.xml
deleted file mode 100644
index 72e7b7d3..00000000
--- a/config/solr/property_search/enumsconfig.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- ONTOLOGY
- VALUE_SET_COLLECTION
-
-
- ANNOTATION
- DATATYPE
- OBJECT
-
-
\ No newline at end of file
diff --git a/config/solr/property_search/mapping-ISOLatin1Accent.txt b/config/solr/property_search/mapping-ISOLatin1Accent.txt
deleted file mode 100644
index ede77425..00000000
--- a/config/solr/property_search/mapping-ISOLatin1Accent.txt
+++ /dev/null
@@ -1,246 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Syntax:
-# "source" => "target"
-# "source".length() > 0 (source cannot be empty.)
-# "target".length() >= 0 (target can be empty.)
-
-# example:
-# "À" => "A"
-# "\u00C0" => "A"
-# "\u00C0" => "\u0041"
-# "ß" => "ss"
-# "\t" => " "
-# "\n" => ""
-
-# À => A
-"\u00C0" => "A"
-
-# Á => A
-"\u00C1" => "A"
-
-# Â => A
-"\u00C2" => "A"
-
-# Ã => A
-"\u00C3" => "A"
-
-# Ä => A
-"\u00C4" => "A"
-
-# Å => A
-"\u00C5" => "A"
-
-# Æ => AE
-"\u00C6" => "AE"
-
-# Ç => C
-"\u00C7" => "C"
-
-# È => E
-"\u00C8" => "E"
-
-# É => E
-"\u00C9" => "E"
-
-# Ê => E
-"\u00CA" => "E"
-
-# Ë => E
-"\u00CB" => "E"
-
-# Ì => I
-"\u00CC" => "I"
-
-# Í => I
-"\u00CD" => "I"
-
-# Î => I
-"\u00CE" => "I"
-
-# Ï => I
-"\u00CF" => "I"
-
-# IJ => IJ
-"\u0132" => "IJ"
-
-# Ð => D
-"\u00D0" => "D"
-
-# Ñ => N
-"\u00D1" => "N"
-
-# Ò => O
-"\u00D2" => "O"
-
-# Ó => O
-"\u00D3" => "O"
-
-# Ô => O
-"\u00D4" => "O"
-
-# Õ => O
-"\u00D5" => "O"
-
-# Ö => O
-"\u00D6" => "O"
-
-# Ø => O
-"\u00D8" => "O"
-
-# Œ => OE
-"\u0152" => "OE"
-
-# Þ
-"\u00DE" => "TH"
-
-# Ù => U
-"\u00D9" => "U"
-
-# Ú => U
-"\u00DA" => "U"
-
-# Û => U
-"\u00DB" => "U"
-
-# Ü => U
-"\u00DC" => "U"
-
-# Ý => Y
-"\u00DD" => "Y"
-
-# Ÿ => Y
-"\u0178" => "Y"
-
-# à => a
-"\u00E0" => "a"
-
-# á => a
-"\u00E1" => "a"
-
-# â => a
-"\u00E2" => "a"
-
-# ã => a
-"\u00E3" => "a"
-
-# ä => a
-"\u00E4" => "a"
-
-# å => a
-"\u00E5" => "a"
-
-# æ => ae
-"\u00E6" => "ae"
-
-# ç => c
-"\u00E7" => "c"
-
-# è => e
-"\u00E8" => "e"
-
-# é => e
-"\u00E9" => "e"
-
-# ê => e
-"\u00EA" => "e"
-
-# ë => e
-"\u00EB" => "e"
-
-# ì => i
-"\u00EC" => "i"
-
-# í => i
-"\u00ED" => "i"
-
-# î => i
-"\u00EE" => "i"
-
-# ï => i
-"\u00EF" => "i"
-
-# ij => ij
-"\u0133" => "ij"
-
-# ð => d
-"\u00F0" => "d"
-
-# ñ => n
-"\u00F1" => "n"
-
-# ò => o
-"\u00F2" => "o"
-
-# ó => o
-"\u00F3" => "o"
-
-# ô => o
-"\u00F4" => "o"
-
-# õ => o
-"\u00F5" => "o"
-
-# ö => o
-"\u00F6" => "o"
-
-# ø => o
-"\u00F8" => "o"
-
-# œ => oe
-"\u0153" => "oe"
-
-# ß => ss
-"\u00DF" => "ss"
-
-# þ => th
-"\u00FE" => "th"
-
-# ù => u
-"\u00F9" => "u"
-
-# ú => u
-"\u00FA" => "u"
-
-# û => u
-"\u00FB" => "u"
-
-# ü => u
-"\u00FC" => "u"
-
-# ý => y
-"\u00FD" => "y"
-
-# ÿ => y
-"\u00FF" => "y"
-
-# ff => ff
-"\uFB00" => "ff"
-
-# fi => fi
-"\uFB01" => "fi"
-
-# fl => fl
-"\uFB02" => "fl"
-
-# ffi => ffi
-"\uFB03" => "ffi"
-
-# ffl => ffl
-"\uFB04" => "ffl"
-
-# ſt => ft
-"\uFB05" => "ft"
-
-# st => st
-"\uFB06" => "st"
diff --git a/config/solr/property_search/schema.xml b/config/solr/property_search/schema.xml
deleted file mode 100644
index 20824ea6..00000000
--- a/config/solr/property_search/schema.xml
+++ /dev/null
@@ -1,1179 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- id
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/solr/property_search/solrconfig.xml b/config/solr/property_search/solrconfig.xml
deleted file mode 100644
index 771a0f32..00000000
--- a/config/solr/property_search/solrconfig.xml
+++ /dev/null
@@ -1,1299 +0,0 @@
-
-
-
-
-
-
-
-
- 8.8.2
-
-
-
-
-
-
-
-
-
-
- ${solr.data.dir:}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.lock.type:native}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.ulog.dir:}
- ${solr.ulog.numVersionBuckets:65536}
-
-
-
-
- ${solr.autoCommit.maxTime:15000}
- false
-
-
-
-
-
- ${solr.autoSoftCommit.maxTime:-1}
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.max.booleanClauses:500000}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- 20
-
-
- 200
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- json
- true
-
-
-
-
-
- _text_
-
-
-
-
-
-
-
-
- text_general
-
-
-
-
-
- default
- _text_
- solr.DirectSolrSpellChecker
-
- internal
-
- 0.5
-
- 2
-
- 1
-
- 5
-
- 4
-
- 0.01
-
-
-
-
-
-
-
-
-
-
-
- default
- on
- true
- 10
- 5
- 5
- true
- true
- 10
- 5
-
-
- spellcheck
-
-
-
-
-
-
-
-
-
- true
- false
-
-
- terms
-
-
-
-
-
-
-
-
-
-
- 100
-
-
-
-
-
-
-
- 70
-
- 0.5
-
- [-\w ,/\n\"']{20,200}
-
-
-
-
-
-
- ]]>
- ]]>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ,,
- ,,
- ,,
- ,,
- ,]]>
- ]]>
-
-
-
-
-
- 10
- .,!?
-
-
-
-
-
-
- WORD
-
-
- en
- US
-
-
-
-
-
-
-
-
-
-
-
- [^\w-\.]
- _
-
-
-
-
-
-
- yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z
- yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z
- yyyy-MM-dd HH:mm[:ss[.SSS]][z
- yyyy-MM-dd HH:mm[:ss[,SSS]][z
- [EEE, ]dd MMM yyyy HH:mm[:ss] z
- EEEE, dd-MMM-yy HH:mm:ss z
- EEE MMM ppd HH:mm:ss [z ]yyyy
-
-
-
-
- java.lang.String
- text_general
-
- *_str
- 256
-
-
- true
-
-
- java.lang.Boolean
- booleans
-
-
- java.util.Date
- pdates
-
-
- java.lang.Long
- java.lang.Integer
- plongs
-
-
- java.lang.Number
- pdoubles
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/plain; charset=UTF-8
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/solr/solr.xml b/config/solr/solr.xml
deleted file mode 100644
index d9d089e4..00000000
--- a/config/solr/solr.xml
+++ /dev/null
@@ -1,60 +0,0 @@
-
-
-
-
-
-
-
- ${solr.max.booleanClauses:500000}
- ${solr.sharedLib:}
- ${solr.allowPaths:}
-
-
-
- ${host:}
- ${solr.port.advertise:0}
- ${hostContext:solr}
-
- ${genericCoreNodeNames:true}
-
- ${zkClientTimeout:30000}
- ${distribUpdateSoTimeout:600000}
- ${distribUpdateConnTimeout:60000}
- ${zkCredentialsProvider:org.apache.solr.common.cloud.DefaultZkCredentialsProvider}
- ${zkACLProvider:org.apache.solr.common.cloud.DefaultZkACLProvider}
-
-
-
-
- ${socketTimeout:600000}
- ${connTimeout:60000}
- ${solr.shardsWhitelist:}
-
-
-
-
-
diff --git a/config/solr/term_search/enumsconfig.xml b/config/solr/term_search/enumsconfig.xml
deleted file mode 100644
index 72e7b7d3..00000000
--- a/config/solr/term_search/enumsconfig.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- ONTOLOGY
- VALUE_SET_COLLECTION
-
-
- ANNOTATION
- DATATYPE
- OBJECT
-
-
\ No newline at end of file
diff --git a/config/solr/term_search/mapping-ISOLatin1Accent.txt b/config/solr/term_search/mapping-ISOLatin1Accent.txt
deleted file mode 100644
index ede77425..00000000
--- a/config/solr/term_search/mapping-ISOLatin1Accent.txt
+++ /dev/null
@@ -1,246 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Syntax:
-# "source" => "target"
-# "source".length() > 0 (source cannot be empty.)
-# "target".length() >= 0 (target can be empty.)
-
-# example:
-# "À" => "A"
-# "\u00C0" => "A"
-# "\u00C0" => "\u0041"
-# "ß" => "ss"
-# "\t" => " "
-# "\n" => ""
-
-# À => A
-"\u00C0" => "A"
-
-# Á => A
-"\u00C1" => "A"
-
-# Â => A
-"\u00C2" => "A"
-
-# Ã => A
-"\u00C3" => "A"
-
-# Ä => A
-"\u00C4" => "A"
-
-# Å => A
-"\u00C5" => "A"
-
-# Æ => AE
-"\u00C6" => "AE"
-
-# Ç => C
-"\u00C7" => "C"
-
-# È => E
-"\u00C8" => "E"
-
-# É => E
-"\u00C9" => "E"
-
-# Ê => E
-"\u00CA" => "E"
-
-# Ë => E
-"\u00CB" => "E"
-
-# Ì => I
-"\u00CC" => "I"
-
-# Í => I
-"\u00CD" => "I"
-
-# Î => I
-"\u00CE" => "I"
-
-# Ï => I
-"\u00CF" => "I"
-
-# IJ => IJ
-"\u0132" => "IJ"
-
-# Ð => D
-"\u00D0" => "D"
-
-# Ñ => N
-"\u00D1" => "N"
-
-# Ò => O
-"\u00D2" => "O"
-
-# Ó => O
-"\u00D3" => "O"
-
-# Ô => O
-"\u00D4" => "O"
-
-# Õ => O
-"\u00D5" => "O"
-
-# Ö => O
-"\u00D6" => "O"
-
-# Ø => O
-"\u00D8" => "O"
-
-# Œ => OE
-"\u0152" => "OE"
-
-# Þ
-"\u00DE" => "TH"
-
-# Ù => U
-"\u00D9" => "U"
-
-# Ú => U
-"\u00DA" => "U"
-
-# Û => U
-"\u00DB" => "U"
-
-# Ü => U
-"\u00DC" => "U"
-
-# Ý => Y
-"\u00DD" => "Y"
-
-# Ÿ => Y
-"\u0178" => "Y"
-
-# à => a
-"\u00E0" => "a"
-
-# á => a
-"\u00E1" => "a"
-
-# â => a
-"\u00E2" => "a"
-
-# ã => a
-"\u00E3" => "a"
-
-# ä => a
-"\u00E4" => "a"
-
-# å => a
-"\u00E5" => "a"
-
-# æ => ae
-"\u00E6" => "ae"
-
-# ç => c
-"\u00E7" => "c"
-
-# è => e
-"\u00E8" => "e"
-
-# é => e
-"\u00E9" => "e"
-
-# ê => e
-"\u00EA" => "e"
-
-# ë => e
-"\u00EB" => "e"
-
-# ì => i
-"\u00EC" => "i"
-
-# í => i
-"\u00ED" => "i"
-
-# î => i
-"\u00EE" => "i"
-
-# ï => i
-"\u00EF" => "i"
-
-# ij => ij
-"\u0133" => "ij"
-
-# ð => d
-"\u00F0" => "d"
-
-# ñ => n
-"\u00F1" => "n"
-
-# ò => o
-"\u00F2" => "o"
-
-# ó => o
-"\u00F3" => "o"
-
-# ô => o
-"\u00F4" => "o"
-
-# õ => o
-"\u00F5" => "o"
-
-# ö => o
-"\u00F6" => "o"
-
-# ø => o
-"\u00F8" => "o"
-
-# œ => oe
-"\u0153" => "oe"
-
-# ß => ss
-"\u00DF" => "ss"
-
-# þ => th
-"\u00FE" => "th"
-
-# ù => u
-"\u00F9" => "u"
-
-# ú => u
-"\u00FA" => "u"
-
-# û => u
-"\u00FB" => "u"
-
-# ü => u
-"\u00FC" => "u"
-
-# ý => y
-"\u00FD" => "y"
-
-# ÿ => y
-"\u00FF" => "y"
-
-# ff => ff
-"\uFB00" => "ff"
-
-# fi => fi
-"\uFB01" => "fi"
-
-# fl => fl
-"\uFB02" => "fl"
-
-# ffi => ffi
-"\uFB03" => "ffi"
-
-# ffl => ffl
-"\uFB04" => "ffl"
-
-# ſt => ft
-"\uFB05" => "ft"
-
-# st => st
-"\uFB06" => "st"
diff --git a/config/solr/term_search/solrconfig.xml b/config/solr/term_search/solrconfig.xml
deleted file mode 100644
index 771a0f32..00000000
--- a/config/solr/term_search/solrconfig.xml
+++ /dev/null
@@ -1,1299 +0,0 @@
-
-
-
-
-
-
-
-
- 8.8.2
-
-
-
-
-
-
-
-
-
-
- ${solr.data.dir:}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.lock.type:native}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.ulog.dir:}
- ${solr.ulog.numVersionBuckets:65536}
-
-
-
-
- ${solr.autoCommit.maxTime:15000}
- false
-
-
-
-
-
- ${solr.autoSoftCommit.maxTime:-1}
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.max.booleanClauses:500000}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- 20
-
-
- 200
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
- json
- true
-
-
-
-
-
- _text_
-
-
-
-
-
-
-
-
- text_general
-
-
-
-
-
- default
- _text_
- solr.DirectSolrSpellChecker
-
- internal
-
- 0.5
-
- 2
-
- 1
-
- 5
-
- 4
-
- 0.01
-
-
-
-
-
-
-
-
-
-
-
- default
- on
- true
- 10
- 5
- 5
- true
- true
- 10
- 5
-
-
- spellcheck
-
-
-
-
-
-
-
-
-
- true
- false
-
-
- terms
-
-
-
-
-
-
-
-
-
-
- 100
-
-
-
-
-
-
-
- 70
-
- 0.5
-
- [-\w ,/\n\"']{20,200}
-
-
-
-
-
-
- ]]>
- ]]>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ,,
- ,,
- ,,
- ,,
- ,]]>
- ]]>
-
-
-
-
-
- 10
- .,!?
-
-
-
-
-
-
- WORD
-
-
- en
- US
-
-
-
-
-
-
-
-
-
-
-
- [^\w-\.]
- _
-
-
-
-
-
-
- yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z
- yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z
- yyyy-MM-dd HH:mm[:ss[.SSS]][z
- yyyy-MM-dd HH:mm[:ss[,SSS]][z
- [EEE, ]dd MMM yyyy HH:mm[:ss] z
- EEEE, dd-MMM-yy HH:mm:ss z
- EEE MMM ppd HH:mm:ss [z ]yyyy
-
-
-
-
- java.lang.String
- text_general
-
- *_str
- 256
-
-
- true
-
-
- java.lang.Boolean
- booleans
-
-
- java.util.Date
- pdates
-
-
- java.lang.Long
- java.lang.Integer
- plongs
-
-
- java.lang.Number
- pdoubles
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/plain; charset=UTF-8
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/docker-compose.yml b/docker-compose.yml
index a44b5818..55229ff6 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,72 +1,49 @@
x-app: &app
- build:
- context: .
- args:
- RUBY_VERSION: '2.7'
- # Increase the version number in the image tag every time Dockerfile or its arguments is changed
- image: ontologies_ld-dev:0.0.2
- environment: &env
- # default bundle config resolves to /usr/local/bundle/config inside of the container
- # we are setting it to local app directory if we need to use 'bundle config local'
- BUNDLE_APP_CONFIG: /srv/ontoportal/ontologies_api/.bundle
- BUNDLE_PATH: /srv/ontoportal/bundle
- COVERAGE: 'true' # enable simplecov code coverage
- REDIS_HOST: redis-ut
- REDIS_PORT: 6379
- SOLR_TERM_SEARCH_URL: http://solr-ut:8983/solr/term_search_core1
- SOLR_PROP_SEARCH_URL: http://solr-ut:8983/solr/prop_search_core1
- stdin_open: true
- tty: true
- command: /bin/bash
- volumes:
- # bundle volume for hosting gems installed by bundle; it speeds up gem install in local development
- - bundle:/srv/ontoportal/bundle
- - .:/srv/ontoportal/ontologies_linked_data
- # mount directory containing development version of the gems if you need to use 'bundle config local'
- #- /Users/alexskr/ontoportal:/Users/alexskr/ontoportal
- depends_on:
- - solr-ut
- - redis-ut
+ build:
+ context: .
+ args:
+ RUBY_VERSION: '2.7'
+ # Increase the version number in the image tag every time Dockerfile or its arguments is changed
+ image: ontologies_ld-dev:0.0.2
+ environment: &env
+ # default bundle config resolves to /usr/local/bundle/config inside of the container
+ # we are setting it to local app directory if we need to use 'bundle config local'
+ BUNDLE_APP_CONFIG: /srv/ontoportal/ontologies_api/.bundle
+ BUNDLE_PATH: /srv/ontoportal/bundle
+ COVERAGE: 'true' # enable simplecov code coverage
+ REDIS_HOST: redis-ut
+ REDIS_PORT: 6379
+ SOLR_TERM_SEARCH_URL: http://solr-term-ut:8983/solr/term_search_core1
+ SOLR_PROP_SEARCH_URL: http://solr-prop-ut:8983/solr/prop_search_core1
+ stdin_open: true
+ tty: true
+ command: /bin/bash
+ volumes:
+ # bundle volume for hosting gems installed by bundle; it speeds up gem install in local development
+ - bundle:/srv/ontoportal/bundle
+ - .:/srv/ontoportal/ontologies_linked_data
+ # mount directory containing development version of the gems if you need to use 'bundle config local'
+ #- /Users/alexskr/ontoportal:/Users/alexskr/ontoportal
+ depends_on: &depends_on
+ solr-prop-ut:
+ condition: service_healthy
+ solr-term-ut:
+ condition: service_healthy
+ redis-ut:
+ condition: service_healthy
services:
- # environment wtih 4store backend
- ruby:
- <<: *app
- environment:
- <<: *env
- GOO_BACKEND_NAME: 4store
- GOO_PORT: 9000
- GOO_HOST: 4store-ut
- GOO_PATH_QUERY: /sparql/
- GOO_PATH_DATA: /data/
- GOO_PATH_UPDATE: /update/
- profiles:
- - 4store
- depends_on:
- - solr-ut
- - redis-ut
- - 4store-ut
-
- # environment with AllegroGraph backend
- ruby-agraph:
- <<: *app
- environment:
- <<: *env
- GOO_BACKEND_NAME: ag
- GOO_PORT: 10035
- GOO_HOST: agraph-ut
- GOO_PATH_QUERY: /repositories/bioportal_test
- GOO_PATH_DATA: /repositories/bioportal_test/statements
- GOO_PATH_UPDATE: /repositories/bioportal_test/statements
- profiles:
- - agraph
- depends_on:
- - solr-ut
- - redis-ut
- - agraph-ut
redis-ut:
image: redis
+ ports:
+ - "6379:6379"
+ command: [ "redis-server", "--save", "", "--appendonly", "no" ]
+ healthcheck:
+ test: redis-cli ping
+ interval: 10s
+ timeout: 3s
+ retries: 10
4store-ut:
image: bde2020/4store
@@ -74,36 +51,84 @@ services:
bash -c "4s-backend-setup --segments 4 ontoportal_kb
&& 4s-backend ontoportal_kb
&& 4s-httpd -D -s-1 -p 9000 ontoportal_kb"
+ ports:
+ - "9000:9000"
profiles:
- - 4store
+ - fs
solr-ut:
- image: solr:8
- volumes:
- - ./test/solr/configsets:/configsets:ro
- #ports:
- # - "8983:8983"
- command: >
- bash -c "precreate-core term_search_core1 /configsets/term_search
- && precreate-core prop_search_core1 /configsets/property_search
- && solr-foreground"
+ image: solr:8.11.2
+ ports:
+ - 8983:8983
+ command: bin/solr start -cloud -f
+
+
agraph-ut:
- image: franzinc/agraph:v7.3.0
+ image: franzinc/agraph:v8.1.0
+ platform: linux/amd64
environment:
- AGRAPH_SUPER_USER=test
- AGRAPH_SUPER_PASSWORD=xyzzy
shm_size: 1g
- # ports:
- # - 10035:10035
+ ports:
+ # - 10035:10035
+ - 10000-10035:10000-10035
+ volumes:
+ - agdata:/agraph/data
+ # - ./agraph/etc:/agraph/etc
command: >
- bash -c "/agraph/bin/agraph-control --config /agraph/etc/agraph.cfg start
- ; agtool repos create bioportal_test
- ; agtool users add anonymous
- ; agtool users grant anonymous root:bioportal_test:rw
- ; tail -f /agraph/data/agraph.log"
+ bash -c "/agraph/bin/agraph-control --config /agraph/etc/agraph.cfg start
+ ; agtool repos create ontoportal_test --supersede
+ ; agtool users add anonymous
+ ; agtool users grant anonymous root:ontoportal_test:rw
+ ; tail -f /agraph/data/agraph.log"
+ # healthcheck:
+ # test: ["CMD-SHELL", "curl -sf http://127.0.0.1:10035/repositories/ontoportal_test/status | grep -iqE '(^running|^lingering)' || exit 1"]
+ # start_period: 10s
+ # interval: 10s
+ # timeout: 5s
+ # retries: 5
+ profiles:
+ - ag
+
+ virtuoso-ut:
+ image: tenforce/virtuoso:virtuoso7.2.5
+ platform: linux/amd64
+ environment:
+ - SPARQL_UPDATE=true
+ ports:
+ - 1111:1111
+ - 8890:8890
+ profiles:
+ - vo
+ healthcheck:
+ test: [ "CMD-SHELL", "curl -sf http://localhost:8890/sparql || exit 1" ]
+ start_period: 10s
+ interval: 60s
+ timeout: 5s
+ retries: 3
+
+ graphdb-ut:
+ image: ontotext/graphdb:10.3.3
+ platform: linux/amd64
+ privileged: true
+ environment:
+ GDB_HEAP_SIZE: 5G
+ GDB_JAVA_OPTS: >-
+ -Xms5g -Xmx5g
+ ports:
+ - 7200:7200
+ - 7300:7300
+ volumes:
+ - ./test/data/graphdb-repo-config.ttl:/opt/graphdb/dist/configs/templates/data/graphdb-repo-config.ttl
+ - ./test/data/graphdb-test-load.nt:/opt/graphdb/dist/configs/templates/data/graphdb-test-load.nt
+
+ entrypoint: >
+ bash -c " importrdf load -f -c /opt/graphdb/dist/configs/templates/data/graphdb-repo-config.ttl -m parallel /opt/graphdb/dist/configs/templates/data/graphdb-test-load.nt ; graphdb -Ddefault.min.distinct.threshold=3000 "
profiles:
- - agraph
+ - gb
volumes:
bundle:
+ agdata:
\ No newline at end of file
diff --git a/lib/ontologies_linked_data.rb b/lib/ontologies_linked_data.rb
index 76bd316d..868c1e5d 100644
--- a/lib/ontologies_linked_data.rb
+++ b/lib/ontologies_linked_data.rb
@@ -1,11 +1,11 @@
-require "goo"
+require 'goo'
# Make sure we're in the load path
-lib_dir = File.dirname(__FILE__)+"/../lib"
+lib_dir = "#{File.dirname(__FILE__)}/../lib"
$LOAD_PATH.unshift lib_dir unless $LOAD_PATH.include?(lib_dir)
# Setup Goo (repo connection and namespaces)
-require "ontologies_linked_data/config/config"
+require 'ontologies_linked_data/config/config'
project_root = File.dirname(File.absolute_path(__FILE__))
@@ -38,22 +38,43 @@
require "ontologies_linked_data/metrics/metrics"
# Require base model
-require "ontologies_linked_data/models/base"
+require 'ontologies_linked_data/models/base'
-# Require all models
+
+
+
+# Require all models and services
+project_root = File.dirname(File.absolute_path(__FILE__))
+# Require base services
+require 'ontologies_linked_data/services/submission_process/submission_process'
# We need to require deterministic - that is why we have the sort.
-models = Dir.glob(project_root + '/ontologies_linked_data/models/**/*.rb').sort
+
+models = Dir.glob("#{project_root}/ontologies_linked_data/services/**/*.rb").sort
models.each do |m|
require m
end
+# We need to require deterministic - that is why we have the sort.
+models = Dir.glob("#{project_root}/ontologies_linked_data/models/concerns//**/*.rb").sort
+models.each do |m|
+ require m
+end
+
+# We need to require deterministic - that is why we have the sort.
+models = Dir.glob("#{project_root}/ontologies_linked_data/models/**/*.rb").sort
+models.each do |m|
+ require m
+end
+
+
+
module LinkedData
def rootdir
File.dirname(File.absolute_path(__FILE__))
end
def bindir
- File.expand_path(rootdir + '/../bin')
+ File.expand_path("#{rootdir}/../bin")
end
end
diff --git a/lib/ontologies_linked_data/concerns/analytics.rb b/lib/ontologies_linked_data/concerns/analytics.rb
new file mode 100644
index 00000000..58853e2f
--- /dev/null
+++ b/lib/ontologies_linked_data/concerns/analytics.rb
@@ -0,0 +1,52 @@
+module LinkedData
+ module Concerns
+ module Analytics
+ def self.included base
+ base.extend ClassMethods
+ end
+
+ module ClassMethods
+ def load_data(field_name)
+ @@redis ||= Redis.new(:host => LinkedData.settings.ontology_analytics_redis_host,
+ :port => LinkedData.settings.ontology_analytics_redis_port,
+ :timeout => 30)
+ raw_data = @@redis.get(field_name)
+ raw_data.nil? ? Hash.new : Marshal.load(raw_data)
+ end
+
+ def analytics_redis_key
+ raise NotImplementedError # the class that includes it need to implement it
+ end
+
+ def load_analytics_data
+ self.load_data(analytics_redis_key)
+ end
+
+ def analytics(year = nil, month = nil)
+ retrieve_analytics(year, month)
+ end
+
+ # A static method for retrieving Analytics for a combination of ontologies, year, month
+ def retrieve_analytics(year = nil, month = nil)
+ analytics = self.load_analytics_data
+
+ year = year.to_s if year
+ month = month.to_s if month
+
+ unless analytics.empty?
+ analytics.values.each do |ont_analytics|
+ ont_analytics.delete_if { |key, _| key != year } unless year.nil?
+ ont_analytics.each { |_, val| val.delete_if { |key, __| key != month } } unless month.nil?
+ end
+ # sort results by the highest traffic values
+ analytics = Hash[analytics.sort_by { |_, v| v[year][month] }.reverse] if year && month
+ end
+ analytics
+ end
+ end
+
+ end
+ end
+end
+
+
diff --git a/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb b/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb
index 600ed13a..92662e57 100644
--- a/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb
+++ b/lib/ontologies_linked_data/concerns/mappings/mapping_creator.rb
@@ -198,7 +198,7 @@ def generate_class_urns(classes)
def find_submission_by_ontology_id(ontology_id)
return nil if ontology_id.nil?
- o = LinkedData::Models::Ontology.where(submissions: { URI: ontology_id })
+ o = LinkedData::Models::Ontology.where(submissions: { URI: RDF::URI.new(ontology_id) })
.include(submissions: %i[submissionId submissionStatus URI])
.first
o.nil? ? nil : o.latest_submission
diff --git a/lib/ontologies_linked_data/concerns/mappings/mapping_external.rb b/lib/ontologies_linked_data/concerns/mappings/mapping_external.rb
deleted file mode 100644
index 08717380..00000000
--- a/lib/ontologies_linked_data/concerns/mappings/mapping_external.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-module LinkedData
- module Concerns
- module Mappings
- module ExternalUtils
-
-
- end
- end
- end
-end
-
diff --git a/lib/ontologies_linked_data/concerns/ontology_submissions/skos/skos_submission_roots.rb b/lib/ontologies_linked_data/concerns/ontology_submissions/skos/skos_submission_roots.rb
index 4a88c12a..717afb3a 100644
--- a/lib/ontologies_linked_data/concerns/ontology_submissions/skos/skos_submission_roots.rb
+++ b/lib/ontologies_linked_data/concerns/ontology_submissions/skos/skos_submission_roots.rb
@@ -42,7 +42,7 @@ def roots_by_query(query_body, page, paged, pagesize)
def roots_by_has_top_concept(concept_schemes, page, paged, pagesize)
query_body = <<-eos
- ?x #{RDF::SKOS[:hasTopConcept].to_ntriples} ?root .
+ ?x #{RDF::Vocab::SKOS[:hasTopConcept].to_ntriples} ?root .
#{concept_schemes_filter(concept_schemes)}
eos
roots_by_query query_body, page, paged, pagesize
@@ -50,7 +50,7 @@ def roots_by_has_top_concept(concept_schemes, page, paged, pagesize)
def roots_by_top_concept_of(concept_schemes, page, paged, pagesize)
query_body = <<-eos
- ?root #{RDF::SKOS[:topConceptOf].to_ntriples} ?x.
+ ?root #{RDF::Vocab::SKOS[:topConceptOf].to_ntriples} ?x.
#{concept_schemes_filter(concept_schemes)}
eos
roots_by_query query_body, page, paged, pagesize
diff --git a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_metadata_extractor.rb b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_metadata_extractor.rb
deleted file mode 100644
index a5722893..00000000
--- a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_metadata_extractor.rb
+++ /dev/null
@@ -1,273 +0,0 @@
-module LinkedData
- module Concerns
- module OntologySubmission
- module MetadataExtractor
-
- def extract_metadata(logger, user_params)
-
- version_info = extract_version
- ontology_iri = extract_ontology_iri
-
- self.version = version_info if version_info
- self.uri = ontology_iri if ontology_iri
-
- begin
- # Extract metadata directly from the ontology
- extract_ontology_metadata(logger, user_params)
- logger.info('Additional metadata extracted.')
- rescue StandardError => e
- e.backtrace
- logger.error("Error while extracting additional metadata: #{e}")
- end
-
- begin
- # Set default metadata
- set_default_metadata
- logger.info('Default metadata set.')
- rescue StandardError => e
- logger.error("Error while setting default metadata: #{e}")
- end
-
- if self.valid?
- self.save
- else
- logger.error("Error while extracting additional metadata: #{self.errors}")
- end
-
- end
-
- def extract_version
-
- query = Goo.sparql_query_client.select(:versionInfo).distinct
- .from(id)
- .where([RDF::URI.new('http://bioportal.bioontology.org/ontologies/versionSubject'),
- RDF::URI.new('http://www.w3.org/2002/07/owl#versionInfo'),
- :versionInfo])
-
- sol = query.each_solution.first || {}
- sol[:versionInfo]&.to_s
- end
-
- def extract_ontology_iri
- query = Goo.sparql_query_client.select(:uri).distinct
- .from(id)
- .where([:uri,
- RDF::URI.new('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
- RDF::URI.new('http://www.w3.org/2002/07/owl#Ontology')])
- sol = query.each_solution.first || {}
- sol[:uri]&.to_s
- end
-
- private
-
- # Extract additional metadata about the ontology
- # First it extracts the main metadata, then the mapped metadata
- def extract_ontology_metadata(logger, user_params)
- user_params = {} if user_params.nil? || !user_params
- ontology_uri = uri
- logger.info("Extraction metadata from ontology #{ontology_uri}")
-
- # go through all OntologySubmission attributes. Returns symbols
- LinkedData::Models::OntologySubmission.attributes(:all).each do |attr|
- # for attribute with the :extractedMetadata setting on, and that have not been defined by the user
- attr_settings = LinkedData::Models::OntologySubmission.attribute_settings(attr)
-
- attr_not_excluded = user_params && !(user_params.key?(attr) && !user_params[attr].nil? && !user_params[attr].empty?)
-
- next unless attr_settings[:extractedMetadata] && attr_not_excluded
-
- # a boolean to check if a value that should be single have already been extracted
- single_extracted = false
- type = enforce?(attr, :list) ? :list : :string
- old_value = value(attr, type)
-
- unless attr_settings[:namespace].nil?
- property_to_extract = "#{attr_settings[:namespace].to_s}:#{attr.to_s}"
- hash_results = extract_each_metadata(ontology_uri, attr, property_to_extract, logger)
- single_extracted = send_value(attr, hash_results) unless hash_results.empty?
- end
-
- # extracts attribute value from metadata mappings
- attr_settings[:metadataMappings] ||= []
-
- attr_settings[:metadataMappings].each do |mapping|
- break if single_extracted
-
- hash_mapping_results = extract_each_metadata(ontology_uri, attr, mapping.to_s, logger)
- single_extracted = send_value(attr, hash_mapping_results) unless hash_mapping_results.empty?
- end
-
- new_value = value(attr, type)
- send_value(attr, old_value) if empty_value?(new_value) && !empty_value?(old_value)
-
- end
- end
-
- # Set some metadata to default values if nothing extracted
- def set_default_metadata
-
- end
-
- def empty_value?(value)
- value.nil? || (value.is_a?(Array) && value.empty?) || value.to_s.strip.empty?
- end
-
- def value(attr, type)
- val = send(attr.to_s)
- type.eql?(:list) ? Array(val) || [] : val || ''
- end
-
- def send_value(attr, value)
-
- if enforce?(attr, :list)
- # Add the retrieved value(s) to the attribute if the attribute take a list of objects
- metadata_values = value(attr, :list)
- metadata_values = metadata_values.dup
-
- metadata_values.push(*value.values)
-
- send("#{attr}=", metadata_values.uniq)
- elsif enforce?(attr, :concatenate)
- # if multiple value for this attribute, then we concatenate it
- # Add the concat at the very end, to easily join the content of the array
- metadata_values = value(attr, :string)
- metadata_values = metadata_values.split(', ')
- new_values = value.values.map { |x| x.to_s.split(', ') }.flatten
- send("#{attr}=", (metadata_values + new_values).uniq.join(', '))
- else
- # If multiple value for a metadata that should have a single value: taking one value randomly (the first in the hash)
- send("#{attr}=", value.values.first)
- return true
- end
- false
- end
-
- # Return a hash with the best literal value for an URI
- # it selects the literal according to their language: no language > english > french > other languages
- def select_metadata_literal(metadata_uri, metadata_literal, hash)
- return unless metadata_literal.is_a?(RDF::Literal)
-
- if hash.key?(metadata_uri)
- if metadata_literal.has_language?
- if !hash[metadata_uri].has_language?
- return hash
- else
- case metadata_literal.language
- when :en, :eng
- # Take the value with english language over other languages
- hash[metadata_uri] = metadata_literal
- return hash
- when :fr, :fre
- # If no english, take french
- if hash[metadata_uri].language == :en || hash[metadata_uri].language == :eng
- return hash
- else
- hash[metadata_uri] = metadata_literal
- return hash
- end
- else
- return hash
- end
- end
- else
- # Take the value with no language in priority (considered as a default)
- hash[metadata_uri] = metadata_literal
- return hash
- end
- else
- hash[metadata_uri] = metadata_literal
- hash
- end
- end
-
- # A function to extract additional metadata
- # Take the literal data if the property is pointing to a literal
- # If pointing to an URI: first it takes the "omv:name" of the object pointed by the property, if nil it takes the "rdfs:label".
- # If not found it check for "omv:firstName + omv:lastName" (for "omv:Person") of this object. And to finish it takes the "URI"
- # The hash_results contains the metadataUri (objet pointed on by the metadata property) with the value we are using from it
- def extract_each_metadata(ontology_uri, attr, prop_to_extract, logger)
-
- query_metadata = < #{prop_to_extract} ?extractedObject .
- OPTIONAL { ?extractedObject omv:name ?omvname } .
- OPTIONAL { ?extractedObject omv:firstName ?omvfirstname } .
- OPTIONAL { ?extractedObject omv:lastName ?omvlastname } .
- OPTIONAL { ?extractedObject rdfs:label ?rdfslabel } .
-}
-eos
- Goo.namespaces.each do |prefix, uri|
- query_metadata = "PREFIX #{prefix}: <#{uri}>\n" + query_metadata
- end
-
- #logger.info(query_metadata)
- # This hash will contain the "literal" metadata for each object (uri or literal) pointed by the metadata predicate
- hash_results = {}
- Goo.sparql_query_client.query(query_metadata).each_solution do |sol|
- value = sol[:extractedObject]
- if enforce?(attr, :uri)
- # If the attr is enforced as URI then it directly takes the URI
- uri_value = value ? RDF::URI.new(value.to_s.strip) : nil
- hash_results[value] = uri_value if uri_value&.valid?
- elsif enforce?(attr, :date_time)
- begin
- hash_results[value] = DateTime.iso8601(value.to_s)
- rescue StandardError => e
- logger.error("Impossible to extract DateTime metadata for #{attr}: #{value}. It should follow iso8601 standards. Error message: #{e}")
- end
- elsif enforce?(attr, :integer)
- begin
- hash_results[value] = value.to_s.to_i
- rescue StandardError => e
- logger.error("Impossible to extract integer metadata for #{attr}: #{value}. Error message: #{e}")
- end
- elsif enforce?(attr, :boolean)
- case value.to_s.downcase
- when 'true'
- hash_results[value] = true
- when 'false'
- hash_results[value] = false
- else
- logger.error("Impossible to extract boolean metadata for #{attr}: #{value}. Error message: #{e}")
- end
- elsif value.is_a?(RDF::URI)
- hash_results = find_object_label(hash_results, sol, value)
- else
- # If this is directly a literal
- hash_results = select_metadata_literal(value, value, hash_results)
- end
- end
- hash_results
- end
-
- def find_object_label(hash_results, sol, value)
- if !sol[:omvname].nil?
- hash_results = select_metadata_literal(value, sol[:omvname], hash_results)
- elsif !sol[:rdfslabel].nil?
- hash_results = select_metadata_literal(value, sol[:rdfslabel], hash_results)
- elsif !sol[:omvfirstname].nil?
- hash_results = select_metadata_literal(value, sol[:omvfirstname], hash_results)
- # if first and last name are defined (for omv:Person)
- hash_results[value] = "#{hash_results[value]} #{sol[:omvlastname]}" unless sol[:omvlastname].nil?
- elsif !sol[:omvlastname].nil?
- # if only last name is defined
- hash_results = select_metadata_literal(value, sol[:omvlastname], hash_results)
- else
- # if the object is an URI but we are requesting a String
- hash_results[value] = value.to_s
- end
- hash_results
- end
-
- def enforce?(attr, type)
- LinkedData::Models::OntologySubmission.attribute_settings(attr)[:enforce].include?(type)
- end
-
- end
- end
- end
-end
-
diff --git a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb
index f16c41b8..b667fd88 100644
--- a/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb
+++ b/lib/ontologies_linked_data/concerns/ontology_submissions/submission_validators.rb
@@ -46,18 +46,29 @@ def deprecated?(inst = self)
module Validators
include ValidatorsHelpers
+ def enforce_agent_type(values, type, attr)
+ Array(values).each do |aff|
+ error = ["is_#{type}", "`#{attr}` must contain only agents of type #{type.capitalize}"]
+
+ return error unless aff.is_a?(LinkedData::Models::Agent)
+
+ aff.bring(:agentType) if aff.bring?(:agentType)
+ return error unless aff.agentType&.eql?(type)
+ end
+ []
+ end
+
def is_organization(inst, attr)
inst.bring(attr) if inst.bring?(attr)
affiliations = inst.send(attr)
- Array(affiliations).each do |aff|
- aff.bring(:agentType) if aff.bring?(:agentType)
- unless aff.agentType&.eql?('organization')
- return [:is_organization, "`#{attr}` must contain only agents of type Organization"]
- end
- end
+ enforce_agent_type(affiliations, 'organization', attr)
+ end
- []
+ def is_person(inst, attr)
+ inst.bring(attr) if inst.bring?(attr)
+ persons = inst.send(attr)
+ enforce_agent_type(persons, 'person', attr)
end
def is_person(inst, attr)
@@ -77,9 +88,9 @@ def is_person(inst, attr)
def lexvo_language(inst, attr)
values = Array(attr_value(inst, attr))
- return if values.all? { |x| x&.to_s&.start_with?('http://lexvo.org/id/iso639-3') }
+ return if values.all? { |x| x&.to_s&.start_with?('http://lexvo.org/id/iso') }
- [:lexvo_language, "#{attr} values need to be in the lexvo namespace (e.g http://lexvo.org/id/iso639-3/fra)"]
+ [:lexvo_language, "#{attr} values need to be in the lexvo namespace (e.g http://lexvo.org/id/iso639-1/fr)"]
end
def deprecated_retired_align(inst, attr)
@@ -126,7 +137,7 @@ def include_ontology_views(inst, attr)
return if views.nil? || views.empty?
parts = attr_value(inst, :hasPart) || []
- return if views.all? { |v| parts.include?(v.id) }
+ return if views.all? { |v| parts.include?(LinkedData::Models::Base.replace_url_id_to_prefix(v.id)) }
[:include_ontology_views, "#{attr} needs to include all the views of the ontology"]
@@ -279,6 +290,11 @@ def ontology_has_domain(sub)
ontology_domain_list
end
+ def default_sparql_endpoint(sub)
+ url = LinkedData.settings.sparql_endpoint_url || ''
+
+ url.strip.blank? ? [] : [RDF::URI.new(url)]
+ end
def open_search_default(sub)
RDF::URI.new("#{LinkedData.settings.rest_url_prefix}search?ontologies=#{sub.ontology.acronym}&q=")
end
diff --git a/lib/ontologies_linked_data/config/config.rb b/lib/ontologies_linked_data/config/config.rb
index f47ab633..537a04f6 100644
--- a/lib/ontologies_linked_data/config/config.rb
+++ b/lib/ontologies_linked_data/config/config.rb
@@ -25,8 +25,8 @@ def config(&block)
@settings.goo_path_query ||= '/sparql/'
@settings.goo_path_data ||= '/data/'
@settings.goo_path_update ||= '/update/'
- @settings.search_server_url ||= 'http://localhost:8983/solr/term_search_core1'
- @settings.property_search_server_url ||= 'http://localhost:8983/solr/prop_search_core1'
+ @settings.search_server_url ||= 'http://localhost:8983/solr'
+ @settings.property_search_server_url ||= 'http://localhost:8983/solr'
@settings.repository_folder ||= './test/data/ontology_files/repo'
@settings.rest_url_prefix ||= DEFAULT_PREFIX
@settings.enable_security ||= false
@@ -88,6 +88,7 @@ def config(&block)
@settings.admin_emails ||= []
@settings.interportal_hash ||= {}
+ @settings.oauth_providers ||= {}
# number of times to retry a query when empty records are returned
@settings.num_retries_4store ||= 10
diff --git a/lib/ontologies_linked_data/diff/bubastis_diff.rb b/lib/ontologies_linked_data/diff/bubastis_diff.rb
index 9a21b2d6..ff72ffb4 100644
--- a/lib/ontologies_linked_data/diff/bubastis_diff.rb
+++ b/lib/ontologies_linked_data/diff/bubastis_diff.rb
@@ -10,7 +10,7 @@ class InputFileNotFoundError < Diff::DiffException
class DiffFileNotGeneratedException < Diff::DiffException
end
- class BubastisDiffCommand
+ class BubastisDiffCommand < DiffTool
# Bubastis version 1.2
# 18th December 2014
@@ -37,15 +37,33 @@ class BubastisDiffCommand
# Loading one file locally and one from the web and outputting results to plain text:
# java -jar bubastis_1_2.jar -ontology1 "H://disease_ontology_version_1.owl" -ontology2 "http://www.disease.org/diseaseontology_latest.owl" -output "C://my_diff.txt"
- def initialize(input_fileOld, input_fileNew, output_repo)
+ def initialize(old_file_path, new_file_path)
@bubastis_jar_path = LinkedData.bindir + "/bubastis.jar"
- @input_fileOld = input_fileOld
- @input_fileNew = input_fileNew
- @output_repo = output_repo
+ @input_fileOld = old_file_path
+ @input_fileNew = new_file_path
+ @output_repo = File.expand_path(@input_fileNew).gsub(File.basename(@input_fileNew),'')
@file_diff_path = nil
@java_heap_size = LinkedData.settings.java_max_heap_size
end
+
+ def file_diff_path
+ @file_diff_path
+ end
+
+ def diff
+ setup_environment
+ call_bubastis_java_cmd
+ if @file_diff_path.nil?
+ raise DiffFileNotGeneratedException, "Diff file nil"
+ elsif not File.exist?(@file_diff_path)
+ raise DiffFileNotGeneratedException, "Diff file not found in #{@file_diff_path}"
+ end
+ return @file_diff_path
+ end
+
+ private
+
def setup_environment
if @input_fileOld.nil? or (not File.exist?(@input_fileOld))
raise InputFileNotFoundError, "#{@input_fileOld} not found."
@@ -105,21 +123,6 @@ def call_bubastis_java_cmd
end
return @file_diff_path
end
-
- def file_diff_path
- @file_diff_path
- end
-
- def diff
- setup_environment
- call_bubastis_java_cmd
- if @file_diff_path.nil?
- raise DiffFileNotGeneratedException, "Diff file nil"
- elsif not File.exist?(@file_diff_path)
- raise DiffFileNotGeneratedException, "Diff file not found in #{@file_diff_path}"
- end
- return @file_diff_path
- end
end
end
end
diff --git a/lib/ontologies_linked_data/diff/diff.rb b/lib/ontologies_linked_data/diff/diff.rb
index 32b054f8..3c89326e 100644
--- a/lib/ontologies_linked_data/diff/diff.rb
+++ b/lib/ontologies_linked_data/diff/diff.rb
@@ -1,8 +1,20 @@
module LinkedData
module Diff
- class < 0) ? "#{remaining_ont} ontologies remaining..." : "All ontologies processed!"))
- sleep(5)
end
# fsave.close
end
diff --git a/lib/ontologies_linked_data/media_types.rb b/lib/ontologies_linked_data/media_types.rb
index d109e80d..01a26480 100644
--- a/lib/ontologies_linked_data/media_types.rb
+++ b/lib/ontologies_linked_data/media_types.rb
@@ -3,8 +3,11 @@ module MediaTypes
HTML = :html
JSON = :json
JSONP = :jsonp
+ JSONLD = :jsonld
XML = :xml
+ RDF_XML = :rdf_xml
TURTLE = :turtle
+ NTRIPLES = :ntriples
DEFAULT = JSON
def self.all
diff --git a/lib/ontologies_linked_data/metrics/metrics.rb b/lib/ontologies_linked_data/metrics/metrics.rb
index c74f9e2b..95cd5e87 100644
--- a/lib/ontologies_linked_data/metrics/metrics.rb
+++ b/lib/ontologies_linked_data/metrics/metrics.rb
@@ -2,50 +2,6 @@
module LinkedData
module Metrics
- def self.metrics_for_submission(submission, logger)
- metrics = nil
- logger.info("metrics_for_submission start")
- logger.flush
- begin
- submission.bring(:submissionStatus) if submission.bring?(:submissionStatus)
- cls_metrics = class_metrics(submission, logger)
- logger.info("class_metrics finished")
- logger.flush
- metrics = LinkedData::Models::Metric.new
-
- cls_metrics.each do |k,v|
- unless v.instance_of?(Integer)
- begin
- v = Integer(v)
- rescue ArgumentError
- v = 0
- rescue TypeError
- v = 0
- end
- end
- metrics.send("#{k}=",v)
- end
- indiv_count = number_individuals(logger, submission)
- metrics.individuals = indiv_count
- logger.info("individuals finished")
- logger.flush
- prop_count = number_properties(logger, submission)
- metrics.properties = prop_count
- logger.info("properties finished")
- logger.flush
- # re-generate metrics file
- submission.generate_metrics_file(cls_metrics[:classes], indiv_count, prop_count)
- logger.info("generation of metrics file finished")
- logger.flush
- rescue Exception => e
- logger.error(e.message)
- logger.error(e)
- logger.flush
- metrics = nil
- end
- metrics
- end
-
def self.class_metrics(submission, logger)
t00 = Time.now
submission.ontology.bring(:flat) if submission.ontology.bring?(:flat)
@@ -54,9 +10,9 @@ def self.class_metrics(submission, logger)
roots = submission.roots
max_depth = 0
+ rdfsSC = Goo.namespaces[:rdfs][:subClassOf]
unless is_flat
depths = []
- rdfsSC = Goo.namespaces[:rdfs][:subClassOf]
roots.each do |root|
ok = true
n=1
@@ -97,7 +53,7 @@ def self.class_metrics(submission, logger)
logger.flush
children_counts = []
groupby_children.each do |cls,count|
- unless cls.start_with?("http")
+ unless cls.start_with?('http')
next
end
unless is_flat
@@ -178,7 +134,7 @@ def self.number_individuals(logger, submission)
else
logger.info("Unable to find metrics in file for submission #{submission.id.to_s}. Performing a COUNT of type query to get the total individual count...")
logger.flush
- indiv_count = count_owl_type(submission.id, "NamedIndividual")
+ indiv_count = count_owl_type(submission.id, 'NamedIndividual')
end
indiv_count
end
@@ -192,8 +148,8 @@ def self.number_properties(logger, submission)
else
logger.info("Unable to find metrics in file for submission #{submission.id.to_s}. Performing a COUNT of type query to get the total property count...")
logger.flush
- prop_count = count_owl_type(submission.id, "DatatypeProperty")
- prop_count += count_owl_type(submission.id, "ObjectProperty")
+ prop_count = count_owl_type(submission.id, 'DatatypeProperty')
+ prop_count += count_owl_type(submission.id, 'ObjectProperty')
end
prop_count
end
@@ -203,17 +159,17 @@ def self.hierarchy_depth?(graph,root,n,treeProp)
hops = []
vars = []
n.times do |i|
- hop = sTemplate.sub("children","?x#{i}")
+ hop = sTemplate.sub('children',"?x#{i}")
if i == 0
- hop = hop.sub("parent", "<#{root.to_s}>")
+ hop = hop.sub('parent', "<#{root.to_s}>")
else
- hop = hop.sub("parent", "?x#{i-1}")
+ hop = hop.sub('parent', "?x#{i-1}")
end
hops << hop
vars << "?x#{i}"
end
joins = hops.join(".\n")
- vars = vars.join(" ")
+ vars = vars.join(' ')
query = < {
@@ -238,7 +194,7 @@ def self.hierarchy_depth?(graph,root,n,treeProp)
def self.query_count_definitions(subId,defProps)
propFilter = defProps.map { |x| "?p = <#{x.to_s}>" }
- propFilter = propFilter.join " || "
+ propFilter = propFilter.join ' || '
query = <<-eos
SELECT (count(DISTINCT ?s) as ?c) WHERE {
GRAPH <#{subId.to_s}> {
@@ -249,7 +205,7 @@ def self.query_count_definitions(subId,defProps)
FILTER (?s != <#{Goo.namespaces[:owl][:Thing]}>)
}}
eos
- query = query.sub("properties", propFilter)
+ query = query.sub('properties', propFilter)
rs = Goo.sparql_query_client.query(query)
rs.each do |sol|
return sol[:c].object
diff --git a/lib/ontologies_linked_data/models/agents/agent.rb b/lib/ontologies_linked_data/models/agents/agent.rb
index f9574a0c..24601748 100644
--- a/lib/ontologies_linked_data/models/agents/agent.rb
+++ b/lib/ontologies_linked_data/models/agents/agent.rb
@@ -7,22 +7,55 @@ class Agent < LinkedData::Models::Base
model :Agent, namespace: :foaf, name_with: lambda { |cc| uuid_uri_generator(cc) }
attribute :agentType, enforce: [:existence], enforcedValues: %w[person organization]
- attribute :name, namespace: :foaf, enforce: %i[existence]
+ attribute :name, namespace: :foaf, enforce: %i[existence], fuzzy_search: true
attribute :homepage, namespace: :foaf
- attribute :acronym, namespace: :skos, property: :altLabel
- attribute :email, namespace: :foaf, property: :mbox, enforce: %i[email unique]
+ attribute :acronym, namespace: :skos, property: :altLabel, fuzzy_search: true
+ attribute :email, namespace: :foaf, property: :mbox, enforce: %i[email unique], fuzzy_search: true
- attribute :identifiers, namespace: :adms, property: :identifier, enforce: %i[Identifier list unique_identifiers]
+ attribute :identifiers, namespace: :adms, property: :identifier, enforce: %i[Identifier list unique_identifiers], fuzzy_search: true
attribute :affiliations, enforce: %i[Agent list is_organization], namespace: :org, property: :memberOf
attribute :creator, type: :user, enforce: [:existence]
-
embed :identifiers, :affiliations
embed_values affiliations: LinkedData::Models::Agent.goo_attrs_to_load + [identifiers: LinkedData::Models::AgentIdentifier.goo_attrs_to_load]
+ serialize_methods :usages
write_access :creator
access_control_load :creator
+ enable_indexing(:agents_metadata)
+
+ def embedded_doc
+ "#{self.name} #{self.acronym} #{self.email} #{self.agentType}"
+ end
+
+ def self.load_agents_usages(agents = [], agent_attributes = OntologySubmission.agents_attr_uris)
+ q = Goo.sparql_query_client.select(:id, :property, :agent, :status).distinct.from(LinkedData::Models::OntologySubmission.uri_type).where([:id,LinkedData::Models::OntologySubmission.attribute_uri(:submissionStatus),:status], [:id, :property, :agent])
+ q = q.filter("?status = <#{RDF::URI.new(LinkedData::Models::SubmissionStatus.id_prefix + 'RDF')}> || ?status = <#{RDF::URI.new(LinkedData::Models::SubmissionStatus.id_prefix + 'UPLOADED')}>")
+ q = q.filter(agent_attributes.map{|attr| "?property = <#{attr}>"}.join(' || '))
+
+ data = q.each_solution.group_by{|x| x[:agent]}
+
+ agents_usages = data.transform_values do |values|
+ r = values.select { |value| value[:status]['RDF'] }
+ r = values.select { |value| value[:status]['UPLOADED'] } if r.empty?
+ r.reject{|x| x[:property].nil? }.map{|x| [x[:id], x[:property]]}
+ end
+
+ agents.each do |agent|
+ usages = agents_usages[agent.id]
+ usages = usages ? usages.group_by(&:shift) : {}
+ usages = usages.transform_values{|x| x.flatten.map(&:to_s)}
+
+ agent.instance_variable_set("@usages", usages)
+ agent.loaded_attributes.add(:usages)
+ end
+ end
+
+ def usages(force_update: false)
+ self.class.load_agents_usages([self]) if !instance_variable_defined?("@usages") || force_update
+ @usages
+ end
def unique_identifiers(inst, attr)
inst.bring(attr) if inst.bring?(attr)
diff --git a/lib/ontologies_linked_data/models/agents/identifier.rb b/lib/ontologies_linked_data/models/agents/identifier.rb
index 7f504456..5e7d77cc 100644
--- a/lib/ontologies_linked_data/models/agents/identifier.rb
+++ b/lib/ontologies_linked_data/models/agents/identifier.rb
@@ -21,6 +21,10 @@ def self.generate_identifier(notation, schema_agency)
return RDF::URI.new(Goo.id_prefix + 'Identifiers/' + out.join(':')) if out.size.eql?(2)
end
+ def embedded_doc
+ "#{self.id.split('/').last}"
+ end
+
def no_url(inst,attr)
inst.bring(attr) if inst.bring?(attr)
notation = inst.send(attr)
diff --git a/lib/ontologies_linked_data/models/class.rb b/lib/ontologies_linked_data/models/class.rb
index 7a3a5864..2687c683 100644
--- a/lib/ontologies_linked_data/models/class.rb
+++ b/lib/ontologies_linked_data/models/class.rb
@@ -57,33 +57,33 @@ def self.urn_id(acronym,classId)
attribute :parents, namespace: :rdfs,
property: lambda {|x| self.tree_view_property(x) },
- enforce: [:list, :class]
+ enforce: [:list, :class]
#transitive parent
attribute :ancestors, namespace: :rdfs,
- property: :subClassOf,
- enforce: [:list, :class],
- transitive: true
+ property: :subClassOf,
+ enforce: [:list, :class],
+ transitive: true
attribute :children, namespace: :rdfs,
property: lambda {|x| self.tree_view_property(x) },
inverse: { on: :class , :attribute => :parents }
attribute :subClassOf, namespace: :rdfs,
- enforce: [:list, :uri]
+ enforce: [:list, :uri]
attribute :ancestors, namespace: :rdfs, property: :subClassOf, handler: :retrieve_ancestors
attribute :descendants, namespace: :rdfs, property: :subClassOf,
- handler: :retrieve_descendants
+ handler: :retrieve_descendants
attribute :semanticType, enforce: [:list], :namespace => :umls, :property => :hasSTY
attribute :cui, enforce: [:list], :namespace => :umls, alias: true
attribute :xref, :namespace => :oboinowl_gen, alias: true,
- :property => :hasDbXref
+ :property => :hasDbXref
attribute :notes,
- inverse: { on: :note, attribute: :relatedClass }
+ inverse: { on: :note, attribute: :relatedClass }
attribute :inScheme, enforce: [:list, :uri], namespace: :skos
attribute :memberOf, namespace: :uneskos, inverse: { on: :collection , :attribute => :member }
attribute :created, namespace: :dcterms
@@ -116,6 +116,66 @@ def self.urn_id(acronym,classId)
cache_segment_keys [:class]
cache_load submission: [ontology: [:acronym]]
+ # Index settings
+ def self.index_schema(schema_generator)
+ schema_generator.add_field(:prefLabel, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:synonym, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:notation, 'text_general', indexed: true, stored: true, multi_valued: false)
+
+ schema_generator.add_field(:definition, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:submissionAcronym, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:parents, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:ontologyType, 'string', indexed: true, stored: true, multi_valued: false)
+ # schema_generator.add_field(:ontologyType, 'ontologyType', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:ontologyId, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:submissionId, 'pint', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:childCount, 'pint', indexed: true, stored: true, multi_valued: false)
+
+ schema_generator.add_field(:cui, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:semanticType, 'text_general', indexed: true, stored: true, multi_valued: true)
+
+ schema_generator.add_field(:property, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:propertyRaw, 'text_general', indexed: false, stored: true, multi_valued: false)
+
+ schema_generator.add_field(:obsolete, 'boolean', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:provisional, 'boolean', indexed: true, stored: true, multi_valued: false)
+
+ # Copy fields for term search
+ schema_generator.add_copy_field('notation', '_text_')
+
+ %w[prefLabel synonym].each do |field|
+
+ schema_generator.add_field("#{field}Exact", 'string', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_field("#{field}Suggest", 'text_suggest', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+ schema_generator.add_field("#{field}SuggestEdge", 'text_suggest_edge', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_field("#{field}SuggestNgram", 'text_suggest_ngram', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+
+ schema_generator.add_copy_field(field, '_text_')
+ schema_generator.add_copy_field(field, "#{field}Exact")
+ schema_generator.add_copy_field(field, "#{field}Suggest")
+ schema_generator.add_copy_field(field, "#{field}SuggestEdge")
+ schema_generator.add_copy_field(field, "#{field}SuggestNgram")
+
+ schema_generator.add_dynamic_field("#{field}_*", 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_dynamic_field("#{field}Exact_*", 'string', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_dynamic_field("#{field}Suggest_*", 'text_suggest', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+ schema_generator.add_dynamic_field("#{field}SuggestEdge_*", 'text_suggest_edge', indexed: true, stored: false, multi_valued: true)
+ schema_generator.add_dynamic_field("#{field}SuggestNgram_*", 'text_suggest_ngram', indexed: true, stored: false, multi_valued: true, omit_norms: true)
+
+ schema_generator.add_copy_field("#{field}_*", "#{field}Exact_*")
+ schema_generator.add_copy_field("#{field}_*", "#{field}Suggest_*")
+ schema_generator.add_copy_field("#{field}_*", "#{field}SuggestEdge_*")
+ schema_generator.add_copy_field("#{field}_*", "#{field}SuggestNgram_*")
+ end
+
+ schema_generator.add_dynamic_field('definition_*', 'text_general', indexed: true, stored: true, multi_valued: true)
+
+ end
+
+ enable_indexing(:term_search_core1) do |schema_generator|
+ index_schema(schema_generator)
+ end
+
def self.tree_view_property(*args)
submission = args.first
unless submission.loaded_attributes.include?(:hasOntologyLanguage)
@@ -146,6 +206,31 @@ def index_id()
"#{self.id.to_s}_#{self.submission.ontology.acronym}_#{self.submission.submissionId}"
end
+ def to_hash(include_languages: false)
+ attr_hash = {}
+ self.class.attributes.each do |attr|
+ v = self.instance_variable_get("@#{attr}")
+ attr_hash[attr] = v unless v.nil?
+ end
+ properties_values = properties(include_languages: include_languages)
+ if properties_values
+ all_attr_uris = Set.new
+ self.class.attributes.each do |attr|
+ if self.class.collection_opts
+ all_attr_uris << self.class.attribute_uri(attr, self.collection)
+ else
+ all_attr_uris << self.class.attribute_uri(attr)
+ end
+ end
+ properties_values.each do |attr, values|
+ values = values.values.flatten if values.is_a?(Hash)
+ attr_hash[attr] = values.map { |v| v.to_s } unless all_attr_uris.include?(attr)
+ end
+ end
+ attr_hash[:id] = @id
+ attr_hash
+ end
+
# to_set is an optional array that allows passing specific
# field names that require updating
# if to_set is nil, it's assumed to be a new document for insert
@@ -179,6 +264,8 @@ def index_doc(to_set=nil)
puts "Exception getting paths to root for search for #{self.id.to_s}: #{e.class}: #{e.message}\n#{e.backtrace.join("\n")}"
end
+ self.submission.ontology.bring(:ontologyType) if self.submission.ontology.bring?(:ontologyType)
+
doc[:ontologyId] = self.submission.id.to_s
doc[:submissionAcronym] = self.submission.ontology.acronym
doc[:submissionId] = self.submission.submissionId
@@ -187,20 +274,29 @@ def index_doc(to_set=nil)
all_attrs = self.to_hash
std = [:id, :prefLabel, :notation, :synonym, :definition, :cui]
-
+ multi_language_fields = [:prefLabel, :synonym, :definition]
std.each do |att|
cur_val = all_attrs[att]
# don't store empty values
next if cur_val.nil? || (cur_val.respond_to?('empty?') && cur_val.empty?)
+ if cur_val.is_a?(Hash) # Multi language
+ if multi_language_fields.include?(att)
+ doc[att] = cur_val.values.flatten # index all values of each language
+ cur_val.each { |lang, values| doc["#{att}_#{lang}".to_sym] = values } # index values per language
+ else
+ doc[att] = cur_val.values.flatten.first
+ end
+ end
+
if cur_val.is_a?(Array)
# don't store empty values
cur_val = cur_val.reject { |c| c.respond_to?('empty?') && c.empty? }
doc[att] = []
cur_val = cur_val.uniq
cur_val.map { |val| doc[att] << (val.kind_of?(Goo::Base::Resource) ? val.id.to_s : val.to_s.strip) }
- else
+ elsif doc[att].nil?
doc[att] = cur_val.to_s.strip
end
end
@@ -234,28 +330,28 @@ def properties_for_indexing()
self_props.each do |attr_key, attr_val|
# unless doc.include?(attr_key)
- if attr_val.is_a?(Array)
- props[attr_key] = []
- attr_val = attr_val.uniq
-
- attr_val.map { |val|
- real_val = val.kind_of?(Goo::Base::Resource) ? val.id.to_s : val.to_s.strip
+ if attr_val.is_a?(Array)
+ props[attr_key] = []
+ attr_val = attr_val.uniq
- # don't store empty values
- unless real_val.respond_to?('empty?') && real_val.empty?
- prop_vals << real_val
- props[attr_key] << real_val
- end
- }
- else
- real_val = attr_val.to_s.strip
+ attr_val.map { |val|
+ real_val = val.kind_of?(Goo::Base::Resource) ? val.id.to_s : val.to_s.strip
# don't store empty values
unless real_val.respond_to?('empty?') && real_val.empty?
prop_vals << real_val
- props[attr_key] = real_val
+ props[attr_key] << real_val
end
+ }
+ else
+ real_val = attr_val.to_s.strip
+
+ # don't store empty values
+ unless real_val.respond_to?('empty?') && real_val.empty?
+ prop_vals << real_val
+ props[attr_key] = real_val
end
+ end
# end
end
@@ -283,9 +379,9 @@ def childrenCount()
BAD_PROPERTY_URIS = LinkedData::Mappings.mapping_predicates.values.flatten + ['http://bioportal.bioontology.org/metadata/def/prefLabel']
EXCEPTION_URIS = ["http://bioportal.bioontology.org/ontologies/umls/cui"]
BLACKLIST_URIS = BAD_PROPERTY_URIS - EXCEPTION_URIS
- def properties
- return nil if self.unmapped.nil?
- properties = self.unmapped
+ def properties(*args)
+ return nil if self.unmapped(*args).nil?
+ properties = self.unmapped(*args)
BLACKLIST_URIS.each {|bad_iri| properties.delete(RDF::URI.new(bad_iri))}
properties
end
@@ -372,7 +468,7 @@ def hasChildren()
- def load_has_children()
+ def load_has_children()
if !instance_variable_get("@intlHasChildren").nil?
return
end
@@ -381,7 +477,7 @@ def load_has_children()
has_c = false
Goo.sparql_query_client.query(query,
query_options: {rules: :NONE }, graphs: graphs)
- .each do |sol|
+ .each do |sol|
has_c = true
end
@intlHasChildren = has_c
@@ -404,7 +500,7 @@ def retrieve_hierarchy_ids(direction=:ancestors)
next_level_thread = Set.new
query = hierarchy_query(direction,ids_slice)
Goo.sparql_query_client.query(query,query_options: {rules: :NONE }, graphs: graphs)
- .each do |sol|
+ .each do |sol|
parent = sol[:node].to_s
next if !parent.start_with?("http")
ontology = sol[:graph].to_s
@@ -443,7 +539,7 @@ def has_children_query(class_id, submission_id)
}
LIMIT 1
eos
- return query
+ return query
end
def hierarchy_query(direction, class_ids)
@@ -464,7 +560,7 @@ def hierarchy_query(direction, class_ids)
FILTER (#{filter_ids})
}
eos
- return query
+ return query
end
def append_if_not_there_already(path, r)
@@ -488,7 +584,7 @@ def traverse_path_to_root(parents, paths, path_i, tree = false, roots = nil)
parents.each_index do |i|
rec_i = recursions[i]
recurse_on_path[i] = recurse_on_path[i] ||
- !append_if_not_there_already(paths[rec_i], parents[i]).nil?
+ !append_if_not_there_already(paths[rec_i], parents[i]).nil?
end
else
path = paths[path_i]
diff --git a/lib/ontologies_linked_data/models/concerns/submission_process.rb b/lib/ontologies_linked_data/models/concerns/submission_process.rb
new file mode 100644
index 00000000..c5b03983
--- /dev/null
+++ b/lib/ontologies_linked_data/models/concerns/submission_process.rb
@@ -0,0 +1,56 @@
+module LinkedData
+ module Concerns
+ module SubmissionProcessable
+
+ def process_submission(logger, options = {})
+ LinkedData::Services::OntologyProcessor.new(self).process(logger, options)
+ end
+
+ def generate_missing_labels(logger)
+ LinkedData::Services::GenerateMissingLabels.new(self).process(logger, file_path: self.master_file_path)
+ end
+
+ def generate_obsolete_classes(logger)
+ LinkedData::Services::ObsoleteClassesGenerator.new(self).process(logger, file_path: self.master_file_path)
+ end
+
+ def extract_metadata(logger, options = {})
+ LinkedData::Services::SubmissionMetadataExtractor.new(self).process(logger, options)
+ end
+
+ def diff(logger, older)
+ LinkedData::Services::SubmissionDiffGenerator.new(self).diff(logger, older)
+ end
+
+ def generate_diff(logger)
+ LinkedData::Services::SubmissionDiffGenerator.new(self).process(logger)
+ end
+
+ def index_all(logger, commit: true)
+ LinkedData::Services::OntologySubmissionAllDataIndexer.new(self).process(logger, commit: commit)
+ end
+
+ def index_terms(logger, commit: true, optimize: true)
+ LinkedData::Services::OntologySubmissionIndexer.new(self).process(logger, commit: commit, optimize: optimize)
+ end
+
+ def index_properties(logger, commit: true, optimize: true)
+ LinkedData::Services::SubmissionPropertiesIndexer.new(self).process(logger, commit: commit, optimize: optimize)
+ end
+
+ def archive
+ LinkedData::Services::OntologySubmissionArchiver.new(self).process
+ end
+
+ def generate_rdf(logger, reasoning: true)
+ LinkedData::Services::SubmissionRDFGenerator.new(self).process(logger, reasoning: reasoning)
+ end
+
+ def generate_metrics(logger)
+ LinkedData::Services::SubmissionMetricsCalculator.new(self).process(logger)
+ end
+
+ end
+ end
+end
+
diff --git a/lib/ontologies_linked_data/models/contact.rb b/lib/ontologies_linked_data/models/contact.rb
index 9af31a95..ccb3d5bf 100644
--- a/lib/ontologies_linked_data/models/contact.rb
+++ b/lib/ontologies_linked_data/models/contact.rb
@@ -6,6 +6,14 @@ class Contact < LinkedData::Models::Base
attribute :email, enforce: [:existence]
embedded true
+
+ def embedded_doc
+ bring(:name) if bring?(:name)
+ bring(:email) if bring?(:email)
+
+ "#{self.name} | #{self.email}"
+ end
+
end
end
end
diff --git a/lib/ontologies_linked_data/models/metric.rb b/lib/ontologies_linked_data/models/metric.rb
index 84ee0305..2d39be66 100644
--- a/lib/ontologies_linked_data/models/metric.rb
+++ b/lib/ontologies_linked_data/models/metric.rb
@@ -53,6 +53,14 @@ def self.metrics_id_generator(m)
raise ArgumentError, "Metrics id needs to be set"
#return RDF::URI.new(m.submission.id.to_s + "/metrics")
end
+
+ def embedded_doc
+ doc = indexable_object
+ doc.delete(:resource_model)
+ doc.delete(:resource_id)
+ doc.delete(:id)
+ doc
+ end
end
end
end
diff --git a/lib/ontologies_linked_data/models/ontology.rb b/lib/ontologies_linked_data/models/ontology.rb
index 1651bf5d..f01497ae 100644
--- a/lib/ontologies_linked_data/models/ontology.rb
+++ b/lib/ontologies_linked_data/models/ontology.rb
@@ -18,6 +18,7 @@ module Models
class Ontology < LinkedData::Models::Base
class ParsedSubmissionError < StandardError; end
class OntologyAnalyticsError < StandardError; end
+ include LinkedData::Concerns::Analytics
ONTOLOGY_ANALYTICS_REDIS_FIELD = "ontology_analytics"
ONTOLOGY_RANK_REDIS_FIELD = "ontology_rank"
@@ -26,8 +27,8 @@ class OntologyAnalyticsError < StandardError; end
model :ontology, :name_with => :acronym
attribute :acronym, namespace: :omv,
- enforce: [:unique, :existence, lambda { |inst,attr| validate_acronym(inst,attr) } ]
- attribute :name, :namespace => :omv, enforce: [:unique, :existence]
+ enforce: [:unique, :existence, lambda { |inst,attr| validate_acronym(inst,attr) } ], fuzzy_search: true
+ attribute :name, :namespace => :omv, enforce: [:unique, :existence], fuzzy_search: true
attribute :submissions, inverse: { on: :ontology_submission, attribute: :ontology },
metadataMappings: ["dct:hasVersion", "pav:hasCurrentVersion", "pav:hasVersion", "prov:generalizationOf", "adms:next"]
attribute :projects,
@@ -89,6 +90,10 @@ class OntologyAnalyticsError < StandardError; end
# Cache
cache_timeout 3600
+ enable_indexing(:ontology_metadata)
+
+ after_save :index_latest_submission
+
def self.validate_acronym(inst, attr)
inst.bring(attr) if inst.bring?(attr)
acronym = inst.send(attr)
@@ -156,7 +161,7 @@ def update_submissions_has_part(inst, attr)
sub.bring_remaining
sub.hasPart = parts
- sub.save
+ sub.save if sub.valid?
return unless changed && action.eql?(:remove)
@@ -336,17 +341,8 @@ def rank(weight_analytics=DEFAULT_RANK_WEIGHT_ANALYTICS, weight_umls=DEFAULT_RAN
# A static method for retrieving Analytics for a combination of ontologies, year, month
def self.analytics(year=nil, month=nil, acronyms=nil)
- analytics = self.load_analytics_data
-
- unless analytics.empty?
- analytics.delete_if { |acronym, _| !acronyms.include? acronym } unless acronyms.nil?
- analytics.values.each do |ont_analytics|
- ont_analytics.delete_if { |key, _| key != year } unless year.nil?
- ont_analytics.each { |_, val| val.delete_if { |key, __| key != month } } unless month.nil?
- end
- # sort results by the highest traffic values
- analytics = Hash[analytics.sort_by {|_, v| v[year][month]}.reverse] if year && month
- end
+ analytics = retrieve_analytics(year, month)
+ analytics.delete_if { |acronym, _| !acronyms.include? acronym } unless acronyms.nil?
analytics
end
@@ -363,22 +359,14 @@ def self.rank(weight_analytics=DEFAULT_RANK_WEIGHT_ANALYTICS, weight_umls=DEFAUL
ranking
end
- def self.load_analytics_data
- self.load_data(ONTOLOGY_ANALYTICS_REDIS_FIELD)
+ def self.analytics_redis_key
+ ONTOLOGY_ANALYTICS_REDIS_FIELD
end
def self.load_ranking_data
self.load_data(ONTOLOGY_RANK_REDIS_FIELD)
end
- def self.load_data(field_name)
- @@redis ||= Redis.new(:host => LinkedData.settings.ontology_analytics_redis_host,
- :port => LinkedData.settings.ontology_analytics_redis_port,
- :timeout => 30)
- raw_data = @@redis.get(field_name)
- return raw_data.nil? ? Hash.new : Marshal.load(raw_data)
- end
-
##
# Delete all artifacts of an ontology
def delete(*args)
@@ -438,9 +426,8 @@ def delete(*args)
end
# remove index entries
- unindex(index_commit)
- unindex_properties(index_commit)
-
+ unindex_all_data(index_commit)
+
# delete all files
ontology_dir = File.join(LinkedData.settings.repository_folder, self.acronym.to_s)
FileUtils.rm_rf(ontology_dir)
@@ -461,19 +448,43 @@ def save(*args)
self
end
- def unindex(commit=true)
+ def index_latest_submission
+ last_s = latest_submission(status: :any)
+ return if last_s.nil?
+
+ last_s.ontology = self
+ last_s.index_update([:ontology])
+ end
+
+ def unindex_all_data(commit=true)
unindex_by_acronym(commit)
+ unindex_properties(commit)
+ end
+
+ def embedded_doc
+ self.administeredBy.map{|x| x.bring_remaining}
+ doc = indexable_object
+ doc.delete(:id)
+ doc.delete(:resource_id)
+ doc.delete('ontology_viewOf_resource_model_t')
+ doc['ontology_viewOf_t'] = self.viewOf.id.to_s unless self.viewOf.nil?
+ doc[:resource_model_t] = doc.delete(:resource_model)
+ doc
end
def unindex_properties(commit=true)
- unindex_by_acronym(commit, :property)
+ self.bring(:acronym) if self.bring?(:acronym)
+ query = "submissionAcronym:#{acronym}"
+ OntologyProperty.unindexByQuery(query)
+ OntologyProperty.indexCommit(nil) if commit
end
- def unindex_by_acronym(commit=true, connection_name=:main)
+ def unindex_by_acronym(commit=true)
self.bring(:acronym) if self.bring?(:acronym)
query = "submissionAcronym:#{acronym}"
- Ontology.unindexByQuery(query, connection_name)
- Ontology.indexCommit(nil, connection_name) if commit
+ Class.unindexByQuery(query)
+ Class.indexCommit(nil) if commit
+ #OntologySubmission.clear_indexed_content(acronym)
end
def restricted?
diff --git a/lib/ontologies_linked_data/models/ontology_format.rb b/lib/ontologies_linked_data/models/ontology_format.rb
index c582ef90..926c8c60 100644
--- a/lib/ontologies_linked_data/models/ontology_format.rb
+++ b/lib/ontologies_linked_data/models/ontology_format.rb
@@ -39,14 +39,14 @@ def tree_property
return Goo.vocabulary(:metadata)[:treeView]
end
if skos?
- return RDF::SKOS[:broader]
+ return RDF::Vocab::SKOS[:broader]
end
return RDF::RDFS[:subClassOf]
end
def class_type
if skos?
- return RDF::SKOS[:Concept]
+ return RDF::Vocab::SKOS[:Concept]
end
return RDF::OWL[:Class]
end
diff --git a/lib/ontologies_linked_data/models/ontology_submission.rb b/lib/ontologies_linked_data/models/ontology_submission.rb
index eb3c3d74..7dda0062 100644
--- a/lib/ontologies_linked_data/models/ontology_submission.rb
+++ b/lib/ontologies_linked_data/models/ontology_submission.rb
@@ -12,7 +12,7 @@ module Models
class OntologySubmission < LinkedData::Models::Base
- include LinkedData::Concerns::OntologySubmission::MetadataExtractor
+ include LinkedData::Concerns::SubmissionProcessable
include LinkedData::Concerns::OntologySubmission::Validators
include LinkedData::Concerns::OntologySubmission::UpdateCallbacks
extend LinkedData::Concerns::OntologySubmission::DefaultCallbacks
@@ -20,45 +20,43 @@ class OntologySubmission < LinkedData::Models::Base
include SKOS::ConceptSchemes
include SKOS::RootsFetcher
- FILES_TO_DELETE = ['labels.ttl', 'mappings.ttl', 'obsolete.ttl', 'owlapi.xrdf', 'errors.log']
- FOLDERS_TO_DELETE = ['unzipped']
+
FLAT_ROOTS_LIMIT = 1000
- FILE_SIZE_ZIPPING_THRESHOLD = 100 * 1024 * 1024 # 100MB
model :ontology_submission, scheme: File.join(__dir__, '../../../config/schemes/ontology_submission.yml'),
- name_with: ->(s) { submission_id_generator(s) }
+ name_with: ->(s) { submission_id_generator(s) }
attribute :submissionId, type: :integer, enforce: [:existence]
# Object description properties metadata
# Configurable properties for processing
- attribute :prefLabelProperty, type: :uri, default: ->(s) {Goo.vocabulary(:skos)[:prefLabel]}
- attribute :definitionProperty, type: :uri, default: ->(s) {Goo.vocabulary(:skos)[:definition]}
- attribute :synonymProperty, type: :uri, default: ->(s) {Goo.vocabulary(:skos)[:altLabel]}
- attribute :authorProperty, type: :uri, default: ->(s) {Goo.vocabulary(:dc)[:creator]}
+ attribute :prefLabelProperty, type: :uri, default: ->(s) { Goo.vocabulary(:skos)[:prefLabel] }
+ attribute :definitionProperty, type: :uri, default: ->(s) { Goo.vocabulary(:skos)[:definition] }
+ attribute :synonymProperty, type: :uri, default: ->(s) { Goo.vocabulary(:skos)[:altLabel] }
+ attribute :authorProperty, type: :uri, default: ->(s) { Goo.vocabulary(:dc)[:creator] }
attribute :classType, type: :uri
- attribute :hierarchyProperty, type: :uri, default: ->(s) {default_hierarchy_property(s)}
- attribute :obsoleteProperty, type: :uri, default: ->(s) {Goo.vocabulary(:owl)[:deprecated]}
- attribute :obsoleteParent, type: :uri, default: ->(s) {RDF::URI.new("http://www.geneontology.org/formats/oboInOwl#ObsoleteClass")}
- attribute :createdProperty, type: :uri, default: ->(s) {Goo.vocabulary(:dc)[:created]}
- attribute :modifiedProperty, type: :uri, default: ->(s) {Goo.vocabulary(:dc)[:modified]}
+ attribute :hierarchyProperty, type: :uri, default: ->(s) { default_hierarchy_property(s) }
+ attribute :obsoleteProperty, type: :uri, default: ->(s) { Goo.vocabulary(:owl)[:deprecated] }
+ attribute :obsoleteParent, type: :uri, default: ->(s) { RDF::URI.new("http://www.geneontology.org/formats/oboInOwl#ObsoleteClass") }
+ attribute :createdProperty, type: :uri, default: ->(s) { Goo.vocabulary(:dc)[:created] }
+ attribute :modifiedProperty, type: :uri, default: ->(s) { Goo.vocabulary(:dc)[:modified] }
# Ontology metadata
# General metadata
- attribute :URI, namespace: :omv, enforce: %i[existence distinct_of_identifier]
+ attribute :URI, namespace: :omv, type: :uri, enforce: %i[existence distinct_of_identifier], fuzzy_search: true
attribute :versionIRI, namespace: :owl, type: :uri, enforce: [:distinct_of_URI]
attribute :version, namespace: :omv
attribute :status, namespace: :omv, enforce: %i[existence], default: ->(x) { 'production' }
- attribute :deprecated, namespace: :owl, type: :boolean, enforce: [:deprecated_retired_align], default: ->(x) { false }
+ attribute :deprecated, namespace: :owl, type: :boolean, default: ->(x) { false }
attribute :hasOntologyLanguage, namespace: :omv, type: :ontology_format, enforce: [:existence]
attribute :hasFormalityLevel, namespace: :omv, type: :uri
- attribute :hasOntologySyntax, namespace: :omv, type: :uri, default: ->(s) {ontology_syntax_default(s)}
+ attribute :hasOntologySyntax, namespace: :omv, type: :uri, default: ->(s) { ontology_syntax_default(s) }
attribute :naturalLanguage, namespace: :omv, type: %i[list uri], enforce: [:lexvo_language]
attribute :isOfType, namespace: :omv, type: :uri
attribute :identifier, namespace: :dct, type: %i[list uri], enforce: [:distinct_of_URI]
# Description metadata
- attribute :description, namespace: :omv, enforce: %i[concatenate existence]
+ attribute :description, namespace: :omv, enforce: %i[concatenate existence], fuzzy_search: true
attribute :homepage, namespace: :foaf, type: :uri
attribute :documentation, namespace: :omv, type: :uri
attribute :notes, namespace: :omv, type: :list
@@ -76,11 +74,10 @@ class OntologySubmission < LinkedData::Models::Base
# Date metadata
attribute :released, type: :date_time, enforce: [:existence]
- attribute :valid, namespace: :dct, type: :date_time, enforce: [:validity_date_retired_align]
- attribute :curatedOn, namespace: :pav, type: %i[date_time list], enforce: [:superior_equal_to_creationDate]
+ attribute :valid, namespace: :dct, type: :date_time
+ attribute :curatedOn, namespace: :pav, type: %i[date_time list]
attribute :creationDate, namespace: :omv, type: :date_time, default: ->(x) { Date.today.to_datetime }
- attribute :modificationDate, namespace: :omv, type: :date_time,
- enforce: %i[superior_equal_to_creationDate modification_date_previous_align]
+ attribute :modificationDate, namespace: :omv, type: :date_time
# Person and organizations metadata
attribute :contact, type: %i[contact list], enforce: [:existence]
@@ -89,29 +86,29 @@ class OntologySubmission < LinkedData::Models::Base
attribute :curatedBy, namespace: :pav, type: %i[list Agent]
attribute :publisher, namespace: :dct, type: %i[list Agent]
attribute :fundedBy, namespace: :foaf, type: %i[list Agent], enforce: [:is_organization]
- attribute :endorsedBy, namespace: :omv, type: :list, enforce: [:is_organization]
+ attribute :endorsedBy, namespace: :omv, type: %i[list Agent], enforce: [:is_organization]
attribute :translator, namespace: :schema, type: %i[list Agent]
# Community metadata
attribute :audience, namespace: :dct
attribute :repository, namespace: :doap, type: :uri
attribute :bugDatabase, namespace: :doap, type: :uri
- attribute :mailingList, namespace: :doap, enforce: [:email]
+ attribute :mailingList, namespace: :doap
attribute :toDoList, namespace: :voaf, type: :list
attribute :award, namespace: :schema, type: :list
# Usage metadata
attribute :knownUsage, namespace: :omv, type: :list
attribute :designedForOntologyTask, namespace: :omv, type: %i[list uri]
- attribute :hasDomain, namespace: :omv, type: :list, default: ->(s) {ontology_has_domain(s)}
+ attribute :hasDomain, namespace: :omv, type: :list, default: ->(s) { ontology_has_domain(s) }
attribute :coverage, namespace: :dct
attribute :example, namespace: :vann, type: :list
# Methodology metadata
attribute :conformsToKnowledgeRepresentationParadigm, namespace: :omv
attribute :usedOntologyEngineeringMethodology, namespace: :omv
- attribute :usedOntologyEngineeringTool, namespace: :omv, type: %i[list uri]
- attribute :accrualMethod, namespace: :dct, type: %i[list uri]
+ attribute :usedOntologyEngineeringTool, namespace: :omv, type: %i[list]
+ attribute :accrualMethod, namespace: :dct, type: %i[list]
attribute :accrualPeriodicity, namespace: :dct
attribute :accrualPolicy, namespace: :dct
attribute :competencyQuestion, namespace: :mod, type: :list
@@ -122,13 +119,13 @@ class OntologySubmission < LinkedData::Models::Base
attribute :pullLocation, type: :uri # URI for pulling ontology
attribute :isFormatOf, namespace: :dct, type: :uri
attribute :hasFormat, namespace: :dct, type: %i[uri list]
- attribute :dataDump, namespace: :void, type: :uri, default: -> (s) {data_dump_default(s)}
- attribute :csvDump, type: :uri, default: -> (s) {csv_dump_default(s)}
- attribute :uriLookupEndpoint, namespace: :void, type: :uri, default: -> (s) {uri_lookup_default(s)}
- attribute :openSearchDescription, namespace: :void, type: :uri, default: -> (s) {open_search_default(s)}
+ attribute :dataDump, namespace: :void, type: :uri, default: -> (s) { data_dump_default(s) }
+ attribute :csvDump, type: :uri, default: -> (s) { csv_dump_default(s) }
+ attribute :uriLookupEndpoint, namespace: :void, type: :uri, default: -> (s) { uri_lookup_default(s) }
+ attribute :openSearchDescription, namespace: :void, type: :uri, default: -> (s) { open_search_default(s) }
attribute :source, namespace: :dct, type: :list
attribute :endpoint, namespace: :sd, type: %i[uri list],
- default: ->(s) {[RDF::URI.new(LinkedData.settings.sparql_endpoint_url)]}
+ default: ->(s) { default_sparql_endpoint(s)}
attribute :includedInDataCatalog, namespace: :schema, type: %i[list uri]
# Relations
@@ -150,11 +147,11 @@ class OntologySubmission < LinkedData::Models::Base
attribute :translationOfWork, namespace: :schema, type: %i[uri list]
# Content metadata
- attribute :uriRegexPattern, namespace: :void, type: :uri
+ attribute :uriRegexPattern, namespace: :void
attribute :preferredNamespaceUri, namespace: :vann, type: :uri
attribute :preferredNamespacePrefix, namespace: :vann
- attribute :exampleIdentifier, namespace: :idot, type: :class, default: ->(s) { LinkedData::Models::Class.in(s).first }
- attribute :keyClasses, namespace: :omv, type: %i[list class]
+ attribute :exampleIdentifier, namespace: :idot
+ attribute :keyClasses, namespace: :omv, type: %i[list]
attribute :metadataVoc, namespace: :voaf, type: %i[uri list]
attribute :uploadFilePath
attribute :diffFilePath
@@ -179,27 +176,27 @@ class OntologySubmission < LinkedData::Models::Base
attribute :identifierRequests, inverse: {on: :identifier_request, attribute: :submission}
attribute :ontology, type: :ontology, enforce: [:existence]
-
def self.agents_attrs
- [:hasCreator, :publisher, :copyrightHolder, :hasContributor,
- :translator, :endorsedBy, :fundedBy, :publisher, :curatedBy ]
+ %i[hasCreator publisher copyrightHolder hasContributor
+ translator endorsedBy fundedBy curatedBy]
end
+
# Hypermedia settings
- embed *[:contact, :ontology] + agents_attrs
+ embed *%i[contact ontology metrics] + agents_attrs
+
def self.embed_values_hash
out = {
- submissionStatus: [:code], hasOntologyLanguage: [:acronym], metrics: %i[classes individuals properties],
-
+ submissionStatus: [:code], hasOntologyLanguage: [:acronym]
}
agent_attributes = LinkedData::Models::Agent.goo_attrs_to_load +
[identifiers: LinkedData::Models::AgentIdentifier.goo_attrs_to_load, affiliations: LinkedData::Models::Agent.goo_attrs_to_load]
- agents_attrs.each { |k| out[k] = agent_attributes}
+ agents_attrs.each { |k| out[k] = agent_attributes }
out
end
- embed_values self.embed_values_hash
+ embed_values self.embed_values_hash
serialize_default :contact, :ontology, :hasOntologyLanguage, :released, :creationDate, :homepage,
:publication, :documentation, :version, :description, :status, :submissionId
@@ -219,6 +216,8 @@ def self.embed_values_hash
read_restriction_based_on ->(sub) { sub.ontology }
access_control_load ontology: %i[administeredBy acl viewingRestriction]
+ enable_indexing(:ontology_metadata)
+
def initialize(*args)
super(*args)
@mutex = Mutex.new
@@ -228,6 +227,9 @@ def synchronize(&block)
@mutex.synchronize(&block)
end
+ def self.agents_attr_uris
+ agents_attrs.map { |x| self.attribute_uri(x) }
+ end
def self.ontology_link(m)
ontology_link = ""
@@ -270,12 +272,8 @@ def self.segment_instance(sub)
end
def self.submission_id_generator(ss)
- if !ss.ontology.loaded_attributes.include?(:acronym)
- ss.ontology.bring(:acronym)
- end
- if ss.ontology.acronym.nil?
- raise ArgumentError, "Submission cannot be saved if ontology does not have acronym"
- end
+ ss.ontology.bring(:acronym) if !ss.ontology.loaded_attributes.include?(:acronym)
+ raise ArgumentError, "Submission cannot be saved if ontology does not have acronym" if ss.ontology.acronym.nil?
return RDF::URI.new(
"#{(Goo.id_prefix)}ontologies/#{CGI.escape(ss.ontology.acronym.to_s)}/submissions/#{ss.submissionId.to_s}"
)
@@ -294,12 +292,20 @@ def self.copy_file_repository(acronym, submissionId, src, filename = nil)
dst = File.join([path_to_repo, name])
FileUtils.copy(src, dst)
logger.debug("File created #{dst} | #{"%o" % File.stat(dst).mode} | umask: #{File.umask}") # NCBO-795
- if not File.exist? dst
- raise Exception, "Unable to copy #{src} to #{dst}"
- end
+ raise Exception, "Unable to copy #{src} to #{dst}" if not File.exist? dst
return dst
end
+ def self.clear_indexed_content(ontology)
+ conn = Goo.init_search_connection(:ontology_data)
+ begin
+ conn.delete_by_query("ontology_t:\"#{ontology}\"")
+ rescue StandardError => e
+ #puts e.message
+ end
+ conn
+ end
+
def valid?
valid_result = super
return false unless valid_result
@@ -345,9 +351,7 @@ def sanity_check
rescue Exception => e1
sum_only = nil
- if i == num_calls
- raise $!, "#{$!} after retrying #{i} times...", $!.backtrace
- end
+ raise $!, "#{$!} after retrying #{i} times...", $!.backtrace if i == num_calls
end
end
end
@@ -359,9 +363,7 @@ def sanity_check
return false
elsif self.pullLocation
self.errors[:pullLocation] = ["File at #{self.pullLocation.to_s} does not exist"]
- if self.uploadFilePath.nil?
- return remote_file_exists?(self.pullLocation.to_s)
- end
+ return remote_file_exists?(self.pullLocation.to_s) if self.uploadFilePath.nil?
return true
end
@@ -377,12 +379,10 @@ def sanity_check
self.masterFileName = LinkedData::Utils::FileHelpers.automaster(self.uploadFilePath, self.hasOntologyLanguage.file_extension)
return true
elsif zip and self.masterFileName.nil?
- #zip and masterFileName not set. The user has to choose.
- if self.errors[:uploadFilePath].nil?
- self.errors[:uploadFilePath] = []
- end
+ # zip and masterFileName not set. The user has to choose.
+ self.errors[:uploadFilePath] = [] if self.errors[:uploadFilePath].nil?
- #check for duplicated names
+ # check for duplicated names
repeated_names = LinkedData::Utils::FileHelpers.repeated_names_in_file_list(files)
if repeated_names.length > 0
names = repeated_names.keys.to_s
@@ -391,13 +391,13 @@ def sanity_check
return false
end
- #error message with options to choose from.
+ # error message with options to choose from.
self.errors[:uploadFilePath] << {
:message => "Zip file detected, choose the master file.", :options => files }
return false
elsif zip and not self.masterFileName.nil?
- #if zip and the user chose a file then we make sure the file is in the list.
+ # if zip and the user chose a file then we make sure the file is in the list.
files = LinkedData::Utils::FileHelpers.files_from_zip(self.uploadFilePath)
if not files.include? self.masterFileName
if self.errors[:uploadFilePath].nil?
@@ -463,9 +463,7 @@ def unzip_submission(logger)
if zipped?
zip_dst = self.zip_folder
- if Dir.exist? zip_dst
- FileUtils.rm_r [zip_dst]
- end
+ FileUtils.rm_r [zip_dst] if Dir.exist? zip_dst
FileUtils.mkdir_p zip_dst
extracted = LinkedData::Utils::FileHelpers.unzip(self.uploadFilePath, zip_dst)
@@ -483,56 +481,8 @@ def unzip_submission(logger)
zip_dst
end
- def delete_old_submission_files
- path_to_repo = data_folder
- submission_files = FILES_TO_DELETE.map { |f| File.join(path_to_repo, f) }
- submission_files.push(csv_path)
- submission_files.push(parsing_log_path) unless parsing_log_path.nil?
- FileUtils.rm(submission_files, force: true)
-
- submission_folders = FOLDERS_TO_DELETE.map { |f| File.join(path_to_repo, f) }
- submission_folders.each {|d| FileUtils.remove_dir(d) if File.directory?(d)}
- end
- def zip_submission_uploaded_file
- self.bring(:uploadFilePath) if self.bring?(:uploadFilePath)
- return self.uploadFilePath if zipped?
- return self.uploadFilePath if self.uploadFilePath.nil? || self.uploadFilePath.empty?
-
-
- return self.uploadFilePath if File.size(self.uploadFilePath) < FILE_SIZE_ZIPPING_THRESHOLD
-
-
- old_path = self.uploadFilePath
- new_path = Utils::FileHelpers.zip_file(old_path)
- FileUtils.rm(old_path, force: true)
- new_path
- end
-
- # accepts another submission in 'older' (it should be an 'older' ontology version)
- def diff(logger, older)
- begin
- bring_remaining
- bring :diffFilePath if bring? :diffFilePath
- older.bring :uploadFilePath if older.bring? :uploadFilePath
-
- LinkedData::Diff.logger = logger
- bubastis = LinkedData::Diff::BubastisDiffCommand.new(
- File.expand_path(older.master_file_path),
- File.expand_path(self.master_file_path),
- data_folder
- )
- self.diffFilePath = bubastis.diff
- save
- logger.info("Bubastis diff generated successfully for #{self.id}")
- logger.flush
- rescue Exception => e
- logger.error("Bubastis diff for #{self.id} failed - #{e.class}: #{e.message}")
- logger.flush
- raise e
- end
- end
def class_count(logger = nil)
logger ||= LinkedData::Parser.logger || Logger.new($stderr)
@@ -565,9 +515,7 @@ def class_count(logger = nil)
unless mx.empty?
count = mx[1][0].to_i
- if self.hasOntologyLanguage.skos?
- count += mx[1][1].to_i
- end
+ count += mx[1][1].to_i if self.hasOntologyLanguage.skos?
count_set = true
end
end
@@ -593,383 +541,12 @@ def metrics_from_file(logger = nil)
metrics
end
- def generate_metrics_file(class_count, indiv_count, prop_count)
- CSV.open(self.metrics_path, "wb") do |csv|
- csv << ["Class Count", "Individual Count", "Property Count"]
- csv << [class_count, indiv_count, prop_count]
- end
- end
-
- def generate_metrics_file2(class_count, indiv_count, prop_count, max_depth)
- CSV.open(self.metrics_path, "wb") do |csv|
- csv << ["Class Count", "Individual Count", "Property Count", "Max Depth"]
- csv << [class_count, indiv_count, prop_count, max_depth]
- end
- end
-
- def generate_umls_metrics_file(tr_file_path = nil)
- tr_file_path ||= self.triples_file_path
- class_count = 0
- indiv_count = 0
- prop_count = 0
-
- File.foreach(tr_file_path) do |line|
- class_count += 1 if line =~ /owl:Class/
- indiv_count += 1 if line =~ /owl:NamedIndividual/
- prop_count += 1 if line =~ /owl:ObjectProperty/
- prop_count += 1 if line =~ /owl:DatatypeProperty/
- end
- self.generate_metrics_file(class_count, indiv_count, prop_count)
- end
-
- def generate_rdf(logger, reasoning: true)
- mime_type = nil
-
- if self.hasOntologyLanguage.umls?
- triples_file_path = self.triples_file_path
- logger.info("Using UMLS turtle file found, skipping OWLAPI parse")
- logger.flush
- mime_type = LinkedData::MediaTypes.media_type_from_base(LinkedData::MediaTypes::TURTLE)
- generate_umls_metrics_file(triples_file_path)
- else
- output_rdf = self.rdf_path
-
- if File.exist?(output_rdf)
- logger.info("deleting old owlapi.xrdf ..")
- deleted = FileUtils.rm(output_rdf)
-
- if deleted.length > 0
- logger.info("deleted")
- else
- logger.info("error deleting owlapi.rdf")
- end
- end
- owlapi = owlapi_parser(logger: nil)
-
- if !reasoning
- owlapi.disable_reasoner
- end
- triples_file_path, missing_imports = owlapi.parse
-
- if missing_imports && missing_imports.length > 0
- self.missingImports = missing_imports
-
- missing_imports.each do |imp|
- logger.info("OWL_IMPORT_MISSING: #{imp}")
- end
- else
- self.missingImports = nil
- end
- logger.flush
- end
- delete_and_append(triples_file_path, logger, mime_type)
- end
-
- def process_callbacks(logger, callbacks, action_name, &block)
- callbacks.delete_if do |_, callback|
- begin
- if callback[action_name]
- callable = self.method(callback[action_name])
- yield(callable, callback)
- end
- false
- rescue Exception => e
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
-
- if callback[:status]
- add_submission_status(callback[:status].get_error_status)
- self.save
- end
-
- # halt the entire processing if :required is set to true
- raise e if callback[:required]
- # continue processing of other callbacks, but not this one
- true
- end
- end
- end
-
- def loop_classes(logger, raw_paging, callbacks)
- page = 1
- size = 2500
- count_classes = 0
- acr = self.id.to_s.split("/")[-1]
- operations = callbacks.values.map { |v| v[:op_name] }.join(", ")
-
- time = Benchmark.realtime do
- paging = raw_paging.page(page, size)
- cls_count_set = false
- cls_count = class_count(logger)
-
- if cls_count > -1
- # prevent a COUNT SPARQL query if possible
- paging.page_count_set(cls_count)
- cls_count_set = true
- else
- cls_count = 0
- end
-
- iterate_classes = false
- # 1. init artifacts hash if not explicitly passed in the callback
- # 2. determine if class-level iteration is required
- callbacks.each { |_, callback| callback[:artifacts] ||= {}; iterate_classes = true if callback[:caller_on_each] }
-
- process_callbacks(logger, callbacks, :caller_on_pre) {
- |callable, callback| callable.call(callback[:artifacts], logger, paging) }
-
- page_len = -1
- prev_page_len = -1
-
- begin
- t0 = Time.now
- page_classes = paging.page(page, size).all
- total_pages = page_classes.total_pages
- page_len = page_classes.length
-
- # nothing retrieved even though we're expecting more records
- if total_pages > 0 && page_classes.empty? && (prev_page_len == -1 || prev_page_len == size)
- j = 0
- num_calls = LinkedData.settings.num_retries_4store
-
- while page_classes.empty? && j < num_calls do
- j += 1
- logger.error("Empty page encountered. Retrying #{j} times...")
- sleep(2)
- page_classes = paging.page(page, size).all
- logger.info("Success retrieving a page of #{page_classes.length} classes after retrying #{j} times...") unless page_classes.empty?
- end
-
- if page_classes.empty?
- msg = "Empty page #{page} of #{total_pages} persisted after retrying #{j} times. #{operations} of #{acr} aborted..."
- logger.error(msg)
- raise msg
- end
- end
-
- if page_classes.empty?
- if total_pages > 0
- logger.info("The number of pages reported for #{acr} - #{total_pages} is higher than expected #{page - 1}. Completing #{operations}...")
- else
- logger.info("Ontology #{acr} contains #{total_pages} pages...")
- end
- break
- end
-
- prev_page_len = page_len
- logger.info("#{acr}: page #{page} of #{total_pages} - #{page_len} ontology terms retrieved in #{Time.now - t0} sec.")
- logger.flush
- count_classes += page_classes.length
-
- process_callbacks(logger, callbacks, :caller_on_pre_page) {
- |callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
-
- page_classes.each { |c|
- # For real this is calling "generate_missing_labels_each". Is it that hard to be clear in your code?
- # It is unreadable, not stable and not powerful. What did you want to do?
- process_callbacks(logger, callbacks, :caller_on_each) {
- |callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page, c) }
- } if iterate_classes
-
- process_callbacks(logger, callbacks, :caller_on_post_page) {
- |callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
- cls_count += page_classes.length unless cls_count_set
-
- page = page_classes.next? ? page + 1 : nil
- end while !page.nil?
-
- callbacks.each { |_, callback| callback[:artifacts][:count_classes] = cls_count }
- process_callbacks(logger, callbacks, :caller_on_post) {
- |callable, callback| callable.call(callback[:artifacts], logger, paging) }
- end
-
- logger.info("Completed #{operations}: #{acr} in #{time} sec. #{count_classes} classes.")
- logger.flush
-
- # set the status on actions that have completed successfully
- callbacks.each do |_, callback|
- if callback[:status]
- add_submission_status(callback[:status])
- self.save
- end
- end
- end
-
- def generate_missing_labels_pre(artifacts = {}, logger, paging)
- file_path = artifacts[:file_path]
- artifacts[:save_in_file] = File.join(File.dirname(file_path), "labels.ttl")
- artifacts[:save_in_file_mappings] = File.join(File.dirname(file_path), "mappings.ttl")
- property_triples = LinkedData::Utils::Triples.rdf_for_custom_properties(self)
- Goo.sparql_data_client.append_triples(self.id, property_triples, mime_type = "application/x-turtle")
- fsave = File.open(artifacts[:save_in_file], "w")
- fsave.write(property_triples)
- fsave_mappings = File.open(artifacts[:save_in_file_mappings], "w")
- artifacts[:fsave] = fsave
- artifacts[:fsave_mappings] = fsave_mappings
- end
-
- def generate_missing_labels_pre_page(artifacts = {}, logger, paging, page_classes, page)
- artifacts[:label_triples] = []
- artifacts[:mapping_triples] = []
- end
-
- # Generate labels when no label found in the prefLabel attribute (it checks rdfs:label and take label from the URI if nothing else found)
- def generate_missing_labels_each(artifacts = {}, logger, paging, page_classes, page, c)
- prefLabel = nil
-
- if c.prefLabel.nil?
- begin
- # in case there is no skos:prefLabel or rdfs:label from our main_lang
- rdfs_labels = c.label
-
- if rdfs_labels && rdfs_labels.length > 1 && c.synonym.length > 0
- rdfs_labels = (Set.new(c.label) - Set.new(c.synonym)).to_a.first
-
- if rdfs_labels.nil? || rdfs_labels.length == 0
- rdfs_labels = c.label
- end
- end
-
- if rdfs_labels and not (rdfs_labels.instance_of? Array)
- rdfs_labels = [rdfs_labels]
- end
- label = nil
-
- if rdfs_labels && rdfs_labels.length > 0
- label = rdfs_labels[0]
- else
- # If no label found, we take the last fragment of the URI
- label = LinkedData::Utils::Triples.last_iri_fragment c.id.to_s
- end
- rescue Goo::Base::AttributeNotLoaded => e
- label = LinkedData::Utils::Triples.last_iri_fragment c.id.to_s
- end
- artifacts[:label_triples] << LinkedData::Utils::Triples.label_for_class_triple(
- c.id, Goo.vocabulary(:metadata_def)[:prefLabel], label)
- prefLabel = label
- else
- prefLabel = c.prefLabel
- end
-
- if self.ontology.viewOf.nil?
- loomLabel = OntologySubmission.loom_transform_literal(prefLabel.to_s)
-
- if loomLabel.length > 2
- artifacts[:mapping_triples] << LinkedData::Utils::Triples.loom_mapping_triple(
- c.id, Goo.vocabulary(:metadata_def)[:mappingLoom], loomLabel)
- end
- artifacts[:mapping_triples] << LinkedData::Utils::Triples.uri_mapping_triple(
- c.id, Goo.vocabulary(:metadata_def)[:mappingSameURI], c.id)
- end
- end
-
- def generate_missing_labels_post_page(artifacts = {}, logger, paging, page_classes, page)
- rest_mappings = LinkedData::Mappings.migrate_rest_mappings(self.ontology.acronym)
- artifacts[:mapping_triples].concat(rest_mappings)
-
- if artifacts[:label_triples].length > 0
- logger.info("Asserting #{artifacts[:label_triples].length} labels in " +
- "#{self.id.to_ntriples}")
- logger.flush
- artifacts[:label_triples] = artifacts[:label_triples].join("\n")
- artifacts[:fsave].write(artifacts[:label_triples])
- t0 = Time.now
- Goo.sparql_data_client.append_triples(self.id, artifacts[:label_triples], mime_type = "application/x-turtle")
- t1 = Time.now
- logger.info("Labels asserted in #{t1 - t0} sec.")
- logger.flush
- else
- logger.info("No labels generated in page #{page}.")
- logger.flush
- end
-
- if artifacts[:mapping_triples].length > 0
- logger.info("Asserting #{artifacts[:mapping_triples].length} mappings in " +
- "#{self.id.to_ntriples}")
- logger.flush
- artifacts[:mapping_triples] = artifacts[:mapping_triples].join("\n")
- artifacts[:fsave_mappings].write(artifacts[:mapping_triples])
-
- t0 = Time.now
- Goo.sparql_data_client.append_triples(self.id, artifacts[:mapping_triples], mime_type = "application/x-turtle")
- t1 = Time.now
- logger.info("Mapping labels asserted in #{t1 - t0} sec.")
- logger.flush
- end
- end
-
- def generate_missing_labels_post(artifacts = {}, logger, paging)
- logger.info("end generate_missing_labels traversed #{artifacts[:count_classes]} classes")
- logger.info("Saved generated labels in #{artifacts[:save_in_file]}")
- artifacts[:fsave].close()
- artifacts[:fsave_mappings].close()
- logger.flush
- end
-
- def generate_obsolete_classes(logger, file_path)
- self.bring(:obsoleteProperty) if self.bring?(:obsoleteProperty)
- self.bring(:obsoleteParent) if self.bring?(:obsoleteParent)
- classes_deprecated = []
- if self.obsoleteProperty &&
- self.obsoleteProperty.to_s != "http://www.w3.org/2002/07/owl#deprecated"
-
- predicate_obsolete = RDF::URI.new(self.obsoleteProperty.to_s)
- query_obsolete_predicate = < 0
- classes_deprecated.uniq!
- logger.info("Asserting owl:deprecated statement for #{classes_deprecated} classes")
- save_in_file = File.join(File.dirname(file_path), "obsolete.ttl")
- fsave = File.open(save_in_file, "w")
- classes_deprecated.each do |class_id|
- fsave.write(LinkedData::Utils::Triples.obselete_class_triple(class_id) + "\n")
- end
- fsave.close()
- result = Goo.sparql_data_client.append_triples_from_file(
- self.id,
- save_in_file,
- mime_type = "application/x-turtle")
- end
- end
def add_submission_status(status)
valid = status.is_a?(LinkedData::Models::SubmissionStatus)
raise ArgumentError, "The status being added is not SubmissionStatus object" unless valid
- #archive removes the other status
+ # archive removes the other status
if status.archived?
self.submissionStatus = [status]
return self.submissionStatus
@@ -981,7 +558,9 @@ def add_submission_status(status)
if (status.error?)
# remove the corresponding non_error status (if exists)
non_error_status = status.get_non_error_status()
- s.reject! { |stat| stat.get_code_from_id() == non_error_status.get_code_from_id() } unless non_error_status.nil?
+ unless non_error_status.nil?
+ s.reject! { |stat| stat.get_code_from_id() == non_error_status.get_code_from_id() }
+ end
else
# remove the corresponding non_error status (if exists)
error_status = status.get_error_status()
@@ -1045,498 +624,8 @@ def archived?
return ready?(status: [:archived])
end
- def archive_submission
- self.submissionStatus = nil
- status = LinkedData::Models::SubmissionStatus.find("ARCHIVED").first
- add_submission_status(status)
-
- # Delete everything except for original ontology file.
- ontology.bring(:submissions)
- submissions = ontology.submissions
- unless submissions.nil?
- submissions.each { |s| s.bring(:submissionId) }
- submission = submissions.sort { |a, b| b.submissionId <=> a.submissionId }[0]
- # Don't perform deletion if this is the most recent submission.
- if self.submissionId < submission.submissionId
- delete_old_submission_files
- self.uploadFilePath = zip_submission_uploaded_file
- end
- end
- end
-
- ################################################################
- # Possible options with their defaults:
- # process_rdf = false
- # index_search = false
- # index_properties = false
- # index_commit = false
- # run_metrics = false
- # reasoning = false
- # diff = false
- # archive = false
- # if no options passed, ALL actions, except for archive = true
- ################################################################
- def process_submission(logger, options = {})
- # Wrap the whole process so we can email results
- begin
- process_rdf = false
- extract_metadata = false
- index_search = false
- index_properties = false
- index_commit = false
- run_metrics = false
- reasoning = false
- diff = false
- archive = false
-
- if options.empty?
- process_rdf = true
- extract_metadata = true
- index_search = true
- index_properties = true
- index_commit = true
- run_metrics = true
- reasoning = true
- diff = true
- archive = false
- else
- process_rdf = options[:process_rdf] == true ? true : false
- extract_metadata = options[:extract_metadata] == true ? true : false
- index_search = options[:index_search] == true ? true : false
- index_properties = options[:index_properties] == true ? true : false
- index_commit = options[:index_commit] == true ? true : false
- run_metrics = options[:run_metrics] == true ? true : false
-
- if !process_rdf || options[:reasoning] == false
- reasoning = false
- else
- reasoning = true
- end
-
- if (!index_search && !index_properties) || options[:index_commit] == false
- index_commit = false
- else
- index_commit = true
- end
-
- diff = options[:diff] == true ? true : false
- archive = options[:archive] == true ? true : false
- end
-
- self.bring_remaining
- self.ontology.bring_remaining
-
- logger.info("Starting to process #{self.ontology.acronym}/submissions/#{self.submissionId}")
- logger.flush
- LinkedData::Parser.logger = logger
- status = nil
-
- if archive
- archive_submission
- else
- if process_rdf
- # Remove processing status types before starting RDF parsing etc.
- self.submissionStatus = nil
- status = LinkedData::Models::SubmissionStatus.find("UPLOADED").first
- add_submission_status(status)
- self.save
-
- # Parse RDF
- begin
- if not self.valid?
- error = "Submission is not valid, it cannot be processed. Check errors."
- raise ArgumentError, error
- end
- if not self.uploadFilePath
- error = "Submission is missing an ontology file, cannot parse."
- raise ArgumentError, error
- end
- status = LinkedData::Models::SubmissionStatus.find("RDF").first
- remove_submission_status(status) #remove RDF status before starting
-
- generate_rdf(logger, reasoning: reasoning)
-
- add_submission_status(status)
- self.save
- rescue Exception => e
- logger.error("#{self.errors}")
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- add_submission_status(status.get_error_status)
- self.save
- # If RDF generation fails, no point of continuing
- raise e
- end
- end
-
- extract_metadata(logger, options[:params]) if extract_metadata || process_rdf
-
- if process_rdf
- file_path = self.uploadFilePath
- callbacks = {
- missing_labels: {
- op_name: "Missing Labels Generation",
- required: true,
- status: LinkedData::Models::SubmissionStatus.find("RDF_LABELS").first,
- artifacts: {
- file_path: file_path
- },
- caller_on_pre: :generate_missing_labels_pre,
- caller_on_pre_page: :generate_missing_labels_pre_page,
- caller_on_each: :generate_missing_labels_each,
- caller_on_post_page: :generate_missing_labels_post_page,
- caller_on_post: :generate_missing_labels_post
- }
- }
-
- raw_paging = LinkedData::Models::Class.in(self).include(:prefLabel, :synonym, :label)
- loop_classes(logger, raw_paging, callbacks)
-
- status = LinkedData::Models::SubmissionStatus.find("OBSOLETE").first
- begin
- generate_obsolete_classes(logger, file_path)
- add_submission_status(status)
- self.save
- rescue Exception => e
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- add_submission_status(status.get_error_status)
- self.save
- # if obsolete fails the parsing fails
- raise e
- end
- end
-
- parsed = ready?(status: %i[rdf rdf_labels])
-
- if index_search
- raise Exception, "The submission #{self.ontology.acronym}/submissions/#{self.submissionId} cannot be indexed because it has not been successfully parsed" unless parsed
- status = LinkedData::Models::SubmissionStatus.find("INDEXED").first
- begin
- index(logger, index_commit, false)
- add_submission_status(status)
- rescue Exception => e
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- add_submission_status(status.get_error_status)
- if File.file?(self.csv_path)
- FileUtils.rm(self.csv_path)
- end
- ensure
- self.save
- end
- end
-
- if index_properties
- raise Exception, "The properties for the submission #{self.ontology.acronym}/submissions/#{self.submissionId} cannot be indexed because it has not been successfully parsed" unless parsed
- status = LinkedData::Models::SubmissionStatus.find("INDEXED_PROPERTIES").first
- begin
- index_properties(logger, index_commit, false)
- add_submission_status(status)
- rescue Exception => e
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- add_submission_status(status.get_error_status)
- ensure
- self.save
- end
- end
-
- if run_metrics
- raise Exception, "Metrics cannot be generated on the submission #{self.ontology.acronym}/submissions/#{self.submissionId} because it has not been successfully parsed" unless parsed
- status = LinkedData::Models::SubmissionStatus.find("METRICS").first
- begin
- process_metrics(logger)
- add_submission_status(status)
- rescue Exception => e
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- self.metrics = nil
- add_submission_status(status.get_error_status)
- ensure
- self.save
- end
- end
-
- if diff
- status = LinkedData::Models::SubmissionStatus.find("DIFF").first
- # Get previous submission from ontology.submissions
- self.ontology.bring(:submissions)
- submissions = self.ontology.submissions
-
- unless submissions.nil?
- submissions.each { |s| s.bring(:submissionId, :diffFilePath) }
- # Sort submissions in descending order of submissionId, extract last two submissions
- recent_submissions = submissions.sort { |a, b| b.submissionId <=> a.submissionId }[0..1]
-
- if recent_submissions.length > 1
- # validate that the most recent submission is the current submission
- if self.submissionId == recent_submissions.first.submissionId
- prev = recent_submissions.last
-
- # Ensure that prev is older than the current submission
- if self.submissionId > prev.submissionId
- # generate a diff
- begin
- self.diff(logger, prev)
- add_submission_status(status)
- rescue Exception => e
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- add_submission_status(status.get_error_status)
- ensure
- self.save
- end
- end
- end
- else
- logger.info("Bubastis diff: no older submissions available for #{self.id}.")
- end
- else
- logger.info("Bubastis diff: no submissions available for #{self.id}.")
- end
- end
- end
-
- self.save
- logger.info("Submission processing of #{self.id} completed successfully")
- logger.flush
- ensure
- # make sure results get emailed
- begin
- LinkedData::Utils::Notifications.submission_processed(self)
- rescue Exception => e
- logger.error("Email sending failed: #{e.message}\n#{e.backtrace.join("\n\t")}"); logger.flush
- end
- end
- self
- end
-
- def process_metrics(logger)
- metrics = LinkedData::Metrics.metrics_for_submission(self, logger)
- metrics.id = RDF::URI.new(self.id.to_s + "/metrics")
- exist_metrics = LinkedData::Models::Metric.find(metrics.id).first
- exist_metrics.delete if exist_metrics
- metrics.save
-
- self.metrics = metrics
- self
- end
-
- def index(logger, commit = true, optimize = true)
- page = 0
- size = 1000
- count_classes = 0
-
- time = Benchmark.realtime do
- self.bring(:ontology) if self.bring?(:ontology)
- self.ontology.bring(:acronym) if self.ontology.bring?(:acronym)
- self.ontology.bring(:provisionalClasses) if self.ontology.bring?(:provisionalClasses)
- csv_writer = LinkedData::Utils::OntologyCSVWriter.new
- csv_writer.open(self.ontology, self.csv_path)
-
- begin
- logger.info("Indexing ontology terms: #{self.ontology.acronym}...")
- t0 = Time.now
- self.ontology.unindex(false)
- logger.info("Removed ontology terms index (#{Time.now - t0}s)"); logger.flush
-
- paging = LinkedData::Models::Class.in(self).include(:unmapped).aggregate(:count, :children).page(page, size)
- # a fix for SKOS ontologies, see https://github.com/ncbo/ontologies_api/issues/20
- self.bring(:hasOntologyLanguage) unless self.loaded_attributes.include?(:hasOntologyLanguage)
- cls_count = self.hasOntologyLanguage.skos? ? -1 : class_count(logger)
- paging.page_count_set(cls_count) unless cls_count < 0
- total_pages = paging.page(1, size).all.total_pages
- num_threads = [total_pages, LinkedData.settings.indexing_num_threads].min
- threads = []
- page_classes = nil
-
- num_threads.times do |num|
- threads[num] = Thread.new {
- Thread.current["done"] = false
- Thread.current["page_len"] = -1
- Thread.current["prev_page_len"] = -1
-
- while !Thread.current["done"]
- synchronize do
- page = (page == 0 || page_classes.next?) ? page + 1 : nil
-
- if page.nil?
- Thread.current["done"] = true
- else
- Thread.current["page"] = page || "nil"
- page_classes = paging.page(page, size).all
- count_classes += page_classes.length
- Thread.current["page_classes"] = page_classes
- Thread.current["page_len"] = page_classes.length
- Thread.current["t0"] = Time.now
-
- # nothing retrieved even though we're expecting more records
- if total_pages > 0 && page_classes.empty? && (Thread.current["prev_page_len"] == -1 || Thread.current["prev_page_len"] == size)
- j = 0
- num_calls = LinkedData.settings.num_retries_4store
-
- while page_classes.empty? && j < num_calls do
- j += 1
- logger.error("Thread #{num + 1}: Empty page encountered. Retrying #{j} times...")
- sleep(2)
- page_classes = paging.page(page, size).all
- logger.info("Thread #{num + 1}: Success retrieving a page of #{page_classes.length} classes after retrying #{j} times...") unless page_classes.empty?
- end
-
- if page_classes.empty?
- msg = "Thread #{num + 1}: Empty page #{Thread.current["page"]} of #{total_pages} persisted after retrying #{j} times. Indexing of #{self.id.to_s} aborted..."
- logger.error(msg)
- raise msg
- else
- Thread.current["page_classes"] = page_classes
- end
- end
-
- if page_classes.empty?
- if total_pages > 0
- logger.info("Thread #{num + 1}: The number of pages reported for #{self.id.to_s} - #{total_pages} is higher than expected #{page - 1}. Completing indexing...")
- else
- logger.info("Thread #{num + 1}: Ontology #{self.id.to_s} contains #{total_pages} pages...")
- end
-
- break
- end
-
- Thread.current["prev_page_len"] = Thread.current["page_len"]
- end
- end
-
- break if Thread.current["done"]
-
- logger.info("Thread #{num + 1}: Page #{Thread.current["page"]} of #{total_pages} - #{Thread.current["page_len"]} ontology terms retrieved in #{Time.now - Thread.current["t0"]} sec.")
- Thread.current["t0"] = Time.now
-
- Thread.current["page_classes"].each do |c|
- begin
- # this cal is needed for indexing of properties
- LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates)
- rescue Exception => e
- i = 0
- num_calls = LinkedData.settings.num_retries_4store
- success = nil
-
- while success.nil? && i < num_calls do
- i += 1
- logger.error("Thread #{num + 1}: Exception while mapping attributes for #{c.id.to_s}. Retrying #{i} times...")
- sleep(2)
-
- begin
- LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates)
- logger.info("Thread #{num + 1}: Success mapping attributes for #{c.id.to_s} after retrying #{i} times...")
- success = true
- rescue Exception => e1
- success = nil
-
- if i == num_calls
- logger.error("Thread #{num + 1}: Error mapping attributes for #{c.id.to_s}:")
- logger.error("Thread #{num + 1}: #{e1.class}: #{e1.message} after retrying #{i} times...\n#{e1.backtrace.join("\n\t")}")
- logger.flush
- end
- end
- end
- end
-
- synchronize do
- csv_writer.write_class(c)
- end
- end
- logger.info("Thread #{num + 1}: Page #{Thread.current["page"]} of #{total_pages} attributes mapped in #{Time.now - Thread.current["t0"]} sec.")
-
- Thread.current["t0"] = Time.now
- LinkedData::Models::Class.indexBatch(Thread.current["page_classes"])
- logger.info("Thread #{num + 1}: Page #{Thread.current["page"]} of #{total_pages} - #{Thread.current["page_len"]} ontology terms indexed in #{Time.now - Thread.current["t0"]} sec.")
- logger.flush
- end
- }
- end
-
- threads.map { |t| t.join }
- csv_writer.close
-
- begin
- # index provisional classes
- self.ontology.provisionalClasses.each { |pc| pc.index }
- rescue Exception => e
- logger.error("Error while indexing provisional classes for ontology #{self.ontology.acronym}:")
- logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
- logger.flush
- end
-
- if commit
- t0 = Time.now
- LinkedData::Models::Class.indexCommit()
- logger.info("Ontology terms index commit in #{Time.now - t0} sec.")
- end
- rescue StandardError => e
- csv_writer.close
- logger.error("\n\n#{e.class}: #{e.message}\n")
- logger.error(e.backtrace)
- raise e
- end
- end
- logger.info("Completed indexing ontology terms: #{self.ontology.acronym} in #{time} sec. #{count_classes} classes.")
- logger.flush
-
- if optimize
- logger.info("Optimizing ontology terms index...")
- time = Benchmark.realtime do
- LinkedData::Models::Class.indexOptimize()
- end
- logger.info("Completed optimizing ontology terms index in #{time} sec.")
- end
- end
-
- def index_properties(logger, commit = true, optimize = true)
- page = 1
- size = 2500
- count_props = 0
-
- time = Benchmark.realtime do
- self.bring(:ontology) if self.bring?(:ontology)
- self.ontology.bring(:acronym) if self.ontology.bring?(:acronym)
- logger.info("Indexing ontology properties: #{self.ontology.acronym}...")
- t0 = Time.now
- self.ontology.unindex_properties(commit)
- logger.info("Removed ontology properties index in #{Time.now - t0} seconds."); logger.flush
-
- props = self.ontology.properties
- count_props = props.length
- total_pages = (count_props / size.to_f).ceil
- logger.info("Indexing a total of #{total_pages} pages of #{size} properties each.")
-
- props.each_slice(size) do |prop_batch|
- t = Time.now
- LinkedData::Models::Class.indexBatch(prop_batch, :property)
- logger.info("Page #{page} of ontology properties indexed in #{Time.now - t} seconds."); logger.flush
- page += 1
- end
-
- if commit
- t0 = Time.now
- LinkedData::Models::Class.indexCommit(nil, :property)
- logger.info("Ontology properties index commit in #{Time.now - t0} seconds.")
- end
- end
- logger.info("Completed indexing ontology properties of #{self.ontology.acronym} in #{time} sec. Total of #{count_props} properties indexed.")
- logger.flush
-
- if optimize
- logger.info("Optimizing ontology properties index...")
- time = Benchmark.realtime do
- LinkedData::Models::Class.indexOptimize(nil, :property)
- end
- logger.info("Completed optimizing ontology properties index in #{time} seconds.")
- end
- end
-
# Override delete to add removal from the search index
- #TODO: revise this with a better process
+ # TODO: revise this with a better process
def delete(*args)
options = {}
args.each { |e| options.merge!(e) if e.is_a?(Hash) }
@@ -1544,8 +633,7 @@ def delete(*args)
index_commit = options[:index_commit] == false ? false : true
super(*args)
- self.ontology.unindex(index_commit)
- self.ontology.unindex_properties(index_commit)
+ self.ontology.unindex_all_data(index_commit)
self.bring(:metrics) if self.bring?(:metrics)
self.metrics.delete if self.metrics
@@ -1555,10 +643,10 @@ def delete(*args)
self.ontology.bring(:submissions)
if self.ontology.submissions.length > 0
- prev_sub = self.ontology.latest_submission()
+ prev_sub = self.ontology.latest_submission
if prev_sub
- prev_sub.index(LinkedData::Parser.logger || Logger.new($stderr))
+ prev_sub.index_terms(LinkedData::Parser.logger || Logger.new($stderr))
prev_sub.index_properties(LinkedData::Parser.logger || Logger.new($stderr))
end
end
@@ -1638,15 +726,11 @@ def roots(extra_include = [], page = nil, pagesize = nil, concept_schemes: [], c
load_children = [:children]
end
- if extra_include.length > 0
- where.include(extra_include)
- end
+ where.include(extra_include) if extra_include.length > 0
end
where.all
- if load_children.length > 0
- LinkedData::Models::Class.partially_load_children(classes, 99, self)
- end
+ LinkedData::Models::Class.partially_load_children(classes, 99, self) if load_children.length > 0
classes.delete_if { |c|
obs = !c.obsolete.nil? && c.obsolete == true
@@ -1674,7 +758,7 @@ def uri
end
def uri=(uri)
- self.URI = uri
+ self.URI = RDF::URI.new(uri)
end
def roots_sorted(extra_include = nil, concept_schemes: [])
@@ -1734,17 +818,6 @@ def parsable?(logger: Logger.new($stdout))
parsable
end
- private
-
- def owlapi_parser_input
- path = if zipped?
- self.zip_folder
- else
- self.uploadFilePath
- end
- File.expand_path(path)
- end
-
def owlapi_parser(logger: Logger.new($stdout))
unzip_submission(logger)
LinkedData::Parser::OWLAPICommand.new(
@@ -1754,11 +827,15 @@ def owlapi_parser(logger: Logger.new($stdout))
logger: logger)
end
- def delete_and_append(triples_file_path, logger, mime_type = nil)
- Goo.sparql_data_client.delete_graph(self.id)
- Goo.sparql_data_client.put_triples(self.id, triples_file_path, mime_type)
- logger.info("Triples #{triples_file_path} appended in #{self.id.to_ntriples}")
- logger.flush
+ private
+
+ def owlapi_parser_input
+ path = if zipped?
+ self.zip_folder
+ else
+ self.uploadFilePath
+ end
+ File.expand_path(path)
end
def check_http_file(url)
@@ -1791,9 +868,7 @@ def check_ftp_file(uri)
def self.loom_transform_literal(lit)
res = []
lit.each_char do |c|
- if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
- res << c.downcase
- end
+ res << c.downcase if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
end
return res.join ''
end
diff --git a/lib/ontologies_linked_data/models/properties/annotation_property.rb b/lib/ontologies_linked_data/models/properties/annotation_property.rb
index b071d09f..783e7021 100644
--- a/lib/ontologies_linked_data/models/properties/annotation_property.rb
+++ b/lib/ontologies_linked_data/models/properties/annotation_property.rb
@@ -34,6 +34,10 @@ class AnnotationProperty < LinkedData::Models::OntologyProperty
LinkedData::Hypermedia::Link.new("ancestors", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/ancestors"}, self.uri_type),
LinkedData::Hypermedia::Link.new("descendants", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/descendants"}, self.uri_type),
LinkedData::Hypermedia::Link.new("tree", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/tree"}, self.uri_type)
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
end
end
diff --git a/lib/ontologies_linked_data/models/properties/datatype_property.rb b/lib/ontologies_linked_data/models/properties/datatype_property.rb
index 1974bdb2..13d7b431 100644
--- a/lib/ontologies_linked_data/models/properties/datatype_property.rb
+++ b/lib/ontologies_linked_data/models/properties/datatype_property.rb
@@ -34,6 +34,10 @@ class DatatypeProperty < LinkedData::Models::OntologyProperty
LinkedData::Hypermedia::Link.new("ancestors", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/ancestors"}, self.uri_type),
LinkedData::Hypermedia::Link.new("descendants", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/descendants"}, self.uri_type),
LinkedData::Hypermedia::Link.new("tree", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/tree"}, self.uri_type)
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
end
end
diff --git a/lib/ontologies_linked_data/models/properties/object_property.rb b/lib/ontologies_linked_data/models/properties/object_property.rb
index 8abbc52f..0a85f2da 100644
--- a/lib/ontologies_linked_data/models/properties/object_property.rb
+++ b/lib/ontologies_linked_data/models/properties/object_property.rb
@@ -34,6 +34,10 @@ class ObjectProperty < LinkedData::Models::OntologyProperty
LinkedData::Hypermedia::Link.new("ancestors", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/ancestors"}, self.uri_type),
LinkedData::Hypermedia::Link.new("descendants", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/descendants"}, self.uri_type),
LinkedData::Hypermedia::Link.new("tree", lambda {|m| "#{self.ontology_link(m)}/properties/#{CGI.escape(m.id.to_s)}/tree"}, self.uri_type)
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
end
end
diff --git a/lib/ontologies_linked_data/models/properties/ontology_property.rb b/lib/ontologies_linked_data/models/properties/ontology_property.rb
index 1e9ced84..ac2c1499 100644
--- a/lib/ontologies_linked_data/models/properties/ontology_property.rb
+++ b/lib/ontologies_linked_data/models/properties/ontology_property.rb
@@ -3,6 +3,36 @@ module LinkedData
module Models
class OntologyProperty < LinkedData::Models::Base
+ model :ontology_property, name_with: ->(p) { uuid_uri_generator(p) }
+
+
+ def self.index_schema(schema_generator)
+ schema_generator.add_field(:label, 'text_general', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:labelGenerated, 'text_general', indexed: true, stored: true, multi_valued: true)
+
+ schema_generator.add_field(:definition, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:submissionAcronym, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:parents, 'string', indexed: true, stored: true, multi_valued: true)
+ schema_generator.add_field(:ontologyType, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:propertyType, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:ontologyId, 'string', indexed: true, stored: true, multi_valued: false)
+ schema_generator.add_field(:submissionId, 'pint', indexed: true, stored: true, multi_valued: false)
+
+ %i[label labelGenerated].each do |field|
+ schema_generator.add_copy_field(field, '_text_')
+ schema_generator.add_copy_field(field, "#{field}Exact")
+ schema_generator.add_copy_field(field, "#{field}Suggest")
+ schema_generator.add_copy_field(field, "#{field}SuggestEdge")
+ schema_generator.add_copy_field(field, "#{field}SuggestNgram")
+ end
+ end
+
+
+ enable_indexing(:prop_search_core1, :property) do |schema_generator|
+ index_schema(schema_generator)
+ end
+
+
def retrieve_ancestors
retrieve_ancestors_descendants(:ancestors)
@@ -234,7 +264,7 @@ def index_doc(to_set=nil)
}
all_attrs = self.to_hash
- std = [:id, :label, :definition, :parents]
+ std = %i[id label definition parents]
std.each do |att|
cur_val = all_attrs[att]
@@ -288,7 +318,7 @@ def traverse_path_to_root(parents, paths, path_i, tree=false, top_property=nil)
rec_i = recursions[i]
path = paths[rec_i]
p = path.last
- p.bring(parents: [:label, :definition]) if p.bring?(:parents)
+ p.bring(parents: %i[label definition]) if p.bring?(:parents)
unless p.loaded_attributes.include?(:parents)
# fail safely
@@ -313,7 +343,7 @@ def self.ontology_link(m)
end
def self.partially_load_children(models, threshold, submission)
- ld = [:label, :definition]
+ ld = %i[label definition]
single_load = []
query = self.in(submission).models(models)
query.aggregate(:count, :children).all
diff --git a/lib/ontologies_linked_data/models/provisional_class.rb b/lib/ontologies_linked_data/models/provisional_class.rb
index b6c1a79e..1f4b06c4 100644
--- a/lib/ontologies_linked_data/models/provisional_class.rb
+++ b/lib/ontologies_linked_data/models/provisional_class.rb
@@ -38,6 +38,10 @@ class ProvisionalClass < LinkedData::Models::Base
end
}, Goo.vocabulary["Ontology"])
+ enable_indexing(:term_search_core1) do |schema_generator|
+ Class.index_schema(schema_generator)
+ end
+
def index_id()
self.bring(:ontology) if self.bring?(:ontology)
return nil unless self.ontology
@@ -141,38 +145,6 @@ def append_if_not_there_already(path, r)
true
end
- def index()
- if index_id
- unindex
- super
- LinkedData::Models::Ontology.indexCommit
- end
- end
-
- def unindex()
- ind_id = index_id
-
- if ind_id
- query = "id:#{solr_escape(ind_id)}"
- LinkedData::Models::Ontology.unindexByQuery(query)
- LinkedData::Models::Ontology.indexCommit
- end
- end
-
- ##
- # Override save to allow indexing
- def save(*args)
- super(*args)
- index
- self
- end
-
- def delete(*args)
- # remove index entries
- unindex
- super(*args)
- end
-
def solr_escape(text)
RSolr.solr_escape(text).gsub(/\s+/,"\\ ")
end
diff --git a/lib/ontologies_linked_data/models/resource.rb b/lib/ontologies_linked_data/models/resource.rb
new file mode 100644
index 00000000..9bccc785
--- /dev/null
+++ b/lib/ontologies_linked_data/models/resource.rb
@@ -0,0 +1,187 @@
+require 'rdf/raptor'
+
+module LinkedData
+ module Models
+
+ class Resource
+
+ def initialize(graph, id)
+ @id = id
+ @graph = graph
+ @hash = fetch_related_triples(graph, id)
+ end
+
+ def to_hash
+ @hash.dup
+ end
+
+ def to_object
+ hashes = self.to_hash
+ class_name = "GeneratedModel_#{Time.now.to_i}_#{rand(10000..99999)}"
+ model_schema = ::Class.new(LinkedData::Models::Base)
+ Object.const_set(class_name, model_schema)
+
+ model_schema.model(:resource, name_with: :id, rdf_type: lambda { |*_x| self.to_hash[Goo.namespaces[:rdf][:type].to_s] })
+ values_hash = {}
+ hashes.each do |predicate, value|
+ namespace, attr = namespace_predicate(predicate)
+ next if namespace.nil?
+
+ values = Array(value).map do |v|
+ if v.is_a?(Hash)
+ Struct.new(*v.keys.map { |k| namespace_predicate(k)[1].to_sym }.compact).new(*v.values)
+ else
+ v.is_a?(RDF::URI) ? v.to_s : v.object
+ end
+ end.compact
+
+ model_schema.attribute(attr.to_sym, property: namespace.to_s, enforce: get_type(value))
+ values_hash[attr.to_sym] = value.is_a?(Array) ? values : values.first
+ end
+
+ values_hash[:id] = hashes['id']
+ model_schema.new(values_hash)
+ end
+
+ def to_json
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::JSONLD, namespaces)
+ end
+
+ def to_xml
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::RDF_XML, namespaces)
+ end
+
+ def to_ntriples
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::NTRIPLES, namespaces)
+ end
+
+ def to_turtle
+ LinkedData::Serializers.serialize(to_hash, LinkedData::MediaTypes::TURTLE, namespaces)
+ end
+
+ def namespaces
+ prefixes = {}
+ ns_count = 0
+ hash = to_hash
+ reverse = hash.delete('reverse')
+
+ hash.each do |key, value|
+ uris = [key]
+ uris += Array(value).map { |v| v.is_a?(Hash) ? v.to_a.flatten : v }.flatten
+ prefixes, ns_count = transform_to_prefixes(ns_count, prefixes, uris)
+ end
+
+ reverse.each { |key, uris| prefixes, ns_count = transform_to_prefixes(ns_count, prefixes, [key] + Array(uris)) }
+
+ prefixes
+ end
+
+ private
+
+ def transform_to_prefixes(ns_count, prefixes, uris)
+ uris.each do |uri|
+ namespace, id = namespace_predicate(uri)
+ next if namespace.nil? || prefixes.value?(namespace)
+
+ prefix, prefix_namespace = Goo.namespaces.select { |_k, v| v.to_s.eql?(namespace) }.first
+ if prefix
+ prefixes[prefix] = prefix_namespace.to_s
+ else
+ prefixes["ns#{ns_count}".to_sym] = namespace
+ ns_count += 1
+ end
+ end
+ [prefixes, ns_count]
+ end
+
+ def fetch_related_triples(graph, id)
+ direct_fetch_query = Goo.sparql_query_client.select(:predicate, :object)
+ .from(RDF::URI.new(graph))
+ .where([RDF::URI.new(id), :predicate, :object])
+
+ inverse_fetch_query = Goo.sparql_query_client.select(:subject, :predicate)
+ .from(RDF::URI.new(graph))
+ .where([:subject, :predicate, RDF::URI.new(id)])
+
+ hashes = { 'id' => RDF::URI.new(id) }
+
+ direct_fetch_query.each_solution do |solution|
+ predicate = solution[:predicate].to_s
+ value = solution[:object]
+
+ if value.is_a?(RDF::Node) && Array(hashes[predicate]).none? { |x| x.is_a?(Hash) }
+ value = fetch_b_nodes_triples(graph, id, solution[:predicate])
+ elsif value.is_a?(RDF::Node)
+ next
+ end
+
+ hashes[predicate] = hashes[predicate] ? (Array(hashes[predicate]) + Array(value)) : value
+ end
+
+ hashes['reverse'] = {}
+ inverse_fetch_query.each_solution do |solution|
+ subject = solution[:subject].to_s
+ predicate = solution[:predicate]
+
+ if hashes['reverse'][subject]
+ if hashes['reverse'][subject].is_a?(Array)
+ hashes['reverse'][subject] << predicate
+ else
+ hashes['reverse'][subject] = [predicate, hashes['reverse'][subject]]
+ end
+ else
+ hashes['reverse'][subject] = predicate
+ end
+
+ end
+
+ hashes
+ end
+
+ def fetch_b_nodes_triples(graph, id, predicate)
+ b_node_fetch_query = Goo.sparql_query_client.select(:b, :predicate, :object)
+ .from(RDF::URI.new(graph))
+ .where(
+ [RDF::URI.new(id), predicate, :b],
+ %i[b predicate object]
+ )
+
+ b_nodes_hash = {}
+ b_node_fetch_query.each_solution do |s|
+ b_node_id = s[:b].to_s
+ s[:predicate].to_s
+ s[:object]
+ if b_nodes_hash[b_node_id]
+ b_nodes_hash[b_node_id][s[:predicate].to_s] = s[:object]
+ else
+ b_nodes_hash[b_node_id] = { s[:predicate].to_s => s[:object] }
+ end
+ end
+ b_nodes_hash.values
+ end
+
+ def get_type(value)
+ types = []
+ types << :list if value.is_a?(Array)
+ value = Array(value).first
+ if value.is_a?(RDF::URI)
+ types << :uri
+ elsif value.is_a?(Float)
+ types << :float
+ elsif value.is_a?(Integer)
+ types << :integer
+ elsif value.to_s.eql?('true') || value.to_s.eql?('false')
+ types << :boolean
+ end
+ types
+ end
+
+ def namespace_predicate(property_url)
+ regex = /^(?.*[\/#])(?[^\/#]+)$/
+ match = regex.match(property_url.to_s)
+ [match[:namespace], match[:id]] if match
+ end
+
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/models/skos/collection.rb b/lib/ontologies_linked_data/models/skos/collection.rb
index afc5724e..535ace48 100644
--- a/lib/ontologies_linked_data/models/skos/collection.rb
+++ b/lib/ontologies_linked_data/models/skos/collection.rb
@@ -4,7 +4,7 @@ module SKOS
class Collection < LinkedData::Models::Base
model :collection, name_with: :id, collection: :submission,
- namespace: :skos, schemaless: :true, rdf_type: ->(*x) { RDF::SKOS[:Collection] }
+ namespace: :skos, schemaless: :true, rdf_type: ->(*x) { RDF::Vocab::SKOS[:Collection] }
attribute :prefLabel, namespace: :skos, enforce: [:existence]
attribute :member, namespace: :skos, enforce: [:list, :class]
diff --git a/lib/ontologies_linked_data/models/skos/scheme.rb b/lib/ontologies_linked_data/models/skos/scheme.rb
index 37e04189..1aca9393 100644
--- a/lib/ontologies_linked_data/models/skos/scheme.rb
+++ b/lib/ontologies_linked_data/models/skos/scheme.rb
@@ -4,7 +4,7 @@ module SKOS
class Scheme < LinkedData::Models::Base
model :scheme, name_with: :id, collection: :submission,
- namespace: :skos, schemaless: :true, rdf_type: ->(*x) { RDF::SKOS[:ConceptScheme] }
+ namespace: :skos, schemaless: :true, rdf_type: ->(*x) { RDF::Vocab::SKOS[:ConceptScheme] }
attribute :prefLabel, namespace: :skos, enforce: [:existence]
diff --git a/lib/ontologies_linked_data/models/submission_status.rb b/lib/ontologies_linked_data/models/submission_status.rb
index 2e810277..d7c74363 100644
--- a/lib/ontologies_linked_data/models/submission_status.rb
+++ b/lib/ontologies_linked_data/models/submission_status.rb
@@ -7,6 +7,7 @@ class SubmissionStatus < LinkedData::Models::Base
"RDF_LABELS", "ERROR_RDF_LABELS",
"OBSOLETE", "ERROR_OBSOLETE",
"INDEXED", "ERROR_INDEXED",
+ "INDEXED_ALL_DATA", "ERROR_INDEXED_ALL_DATA",
"INDEXED_PROPERTIES", "ERROR_INDEXED_PROPERTIES",
"METRICS", "ERROR_METRICS",
"ANNOTATOR", "ERROR_ANNOTATOR",
@@ -18,6 +19,8 @@ class SubmissionStatus < LinkedData::Models::Base
"RDF" => "Parsed successfully",
"RDF_ERROR" => "Error parsing",
"INDEXED" => "Indexed terms for search",
+ "INDEXED_ALL_DATA" => "Indexed all the data of the resource",
+ "ERROR_INDEXED_ALL_DATA" => "Error indexeding all the data of the resource",
"ERROR_INDEXED" => "Error indexing terms for search",
"INDEXED_PROPERTIES" => "Indexed properties for search",
"ERROR_INDEXED_PROPERTIES" => "Error indexing properties for search",
diff --git a/lib/ontologies_linked_data/models/users/oauth_authentication.rb b/lib/ontologies_linked_data/models/users/oauth_authentication.rb
new file mode 100644
index 00000000..7d285d2c
--- /dev/null
+++ b/lib/ontologies_linked_data/models/users/oauth_authentication.rb
@@ -0,0 +1,191 @@
+require 'bcrypt'
+require 'openssl'
+require 'base64'
+require 'json'
+require 'jwt'
+require 'faraday'
+
+module LinkedData
+ module Models
+ module Users
+ module OAuthAuthentication
+
+ def self.included(base)
+ base.extend ClassMethods
+ end
+
+ module ClassMethods
+
+ def oauth_providers
+ LinkedData.settings.oauth_providers
+ end
+
+ def oauth_authenticate(token, provider)
+ user_data = case provider.to_sym
+ when :github
+ auth_github(token)
+ when :google
+ auth_google(token)
+ when :orcid
+ auth_orcid(token)
+ when :keycloak
+ auth_keycloak(token)
+ else
+ nil
+ end
+
+ create_if_not_exists(user_data) if user_data
+ end
+
+ private
+
+ def create_if_not_exists(user_data)
+ user = user_by_email(user_data[:email])
+ if user.nil?
+ auth_create_user(user_data)
+ else
+ sync_providers_id(user, user_data[:githubId], user_data[:orcidId])
+ end
+ end
+
+ def sync_providers_id(user, github_id, orcid_id)
+ user.bring_remaining
+
+ user.githubId = github_id if user.githubId&.empty? && !github_id&.empty?
+ user.orcidId = orcid_id if user.orcidId&.empty? && !orcid_id&.empty?
+
+
+ user.save(override_security: true) if user.valid?
+ user
+ end
+
+ def auth_create_user(user_data)
+ user = User.new(user_data)
+ user.password = SecureRandom.hex(16)
+
+ return nil unless user.valid?
+
+ user.save(send_notifications: true)
+ user
+ end
+
+ def user_by_email(email)
+ LinkedData::Models::User.where(email: email).first
+ end
+
+ def user_from_orcid_data(user_data)
+ {
+ email: user_data['email'],
+ firstName: user_data['name']['given-names'],
+ lastName: user_data['name']['family-name'],
+ username: user_data['email'].split('@').first,
+ orcidId: user_data['orcid']
+ }
+ end
+
+ def auth_orcid(token)
+ user_data = token_check(token, :orcid)
+
+ return nil if user_data.nil?
+
+ user_from_orcid_data user_data
+
+ end
+
+ def user_from_google_data(user_data)
+ {
+ email: user_data['email'],
+ firstName: user_data['given_name'],
+ lastName: user_data['family_name'],
+ username: user_data['email'].split('@').first
+ }
+ end
+
+ def auth_google(token)
+ user_data = token_check(token, :google)
+
+ return nil if user_data.nil?
+
+ user_from_google_data user_data
+ end
+
+ def user_from_github_data(user_data)
+ {
+ email: user_data['email'],
+ username: user_data['login'],
+ firstName: user_data['name'].split(' ').first,
+ lastName: user_data['name'].split(' ').drop(1).join(' '),
+ githubId: user_data['login']
+ }
+
+ end
+
+ def auth_github(token)
+
+ user_data = token_check(token, :github)
+
+ return nil if user_data.nil?
+
+ user_from_github_data user_data
+
+ end
+
+ def user_from_keycloak_data(user_data)
+ {
+ email: user_data['email'],
+ username: user_data['preferred_username'],
+ firstName: user_data['given_name'],
+ lastName: user_data['family_name']
+ }
+ end
+
+ def auth_keycloak(token)
+ user_data = token_check(token, :keycloak)
+
+ return nil if user_data.nil?
+
+ user_from_keycloak_data user_data
+ end
+
+ def token_check(token, provider)
+ provider_config = oauth_providers[provider.to_sym]
+
+ return nil unless provider_config
+
+ if provider_config[:check].eql?(:access_token)
+ access_token_check(token, provider_config[:link])
+ elsif provider_config[:check].eql?(:jwt_token)
+ jwt_token_check(token, provider_config[:cert])
+ end
+ end
+
+ def jwt_token_check(jwt_token, cert)
+ decode_cert = Base64.decode64(cert)
+ rsa_public = OpenSSL::X509::Certificate.new(decode_cert).public_key
+ begin
+ JWT.decode(jwt_token, rsa_public, true, { algorithm: 'HS256' })
+ rescue JWT::DecodeError
+ nil
+ end
+ end
+
+ def access_token_check(token, link)
+ response = Faraday.new(url: link) do |faraday|
+ faraday.headers['Authorization'] = "Bearer #{token}"
+ faraday.adapter Faraday.default_adapter
+ end.get
+
+ return nil unless response.success?
+
+ JSON.parse(response.body)
+ end
+ end
+
+ end
+
+ end
+
+ end
+end
+
+
diff --git a/lib/ontologies_linked_data/models/users/user.rb b/lib/ontologies_linked_data/models/users/user.rb
index 12dc043d..f7cdca58 100644
--- a/lib/ontologies_linked_data/models/users/user.rb
+++ b/lib/ontologies_linked_data/models/users/user.rb
@@ -1,6 +1,7 @@
require 'bcrypt'
require 'securerandom'
require 'ontologies_linked_data/models/users/authentication'
+require 'ontologies_linked_data/models/users/oauth_authentication'
require 'ontologies_linked_data/models/users/role'
require 'ontologies_linked_data/models/users/subscription'
@@ -9,12 +10,17 @@ module Models
class User < LinkedData::Models::Base
include BCrypt
include LinkedData::Models::Users::Authentication
+ include LinkedData::Models::Users::OAuthAuthentication
+ include LinkedData::Concerns::Analytics
+
+ ANALYTICS_REDIS_FIELD = "user_analytics"
+ PAGES_ANALYTICS_REDIS_FIELD = "pages_analytics"
attr_accessor :show_apikey
model :user, name_with: :username
attribute :username, enforce: [:unique, :existence]
- attribute :email, enforce: [:existence]
+ attribute :email, enforce: [:unique, :existence]
attribute :role, enforce: [:role, :list], :default => lambda {|x| [LinkedData::Models::Users::Role.default]}
attribute :firstName
attribute :lastName
@@ -52,6 +58,10 @@ def self.show_apikey?(inst)
end
end
+ def embedded_doc
+ "#{self.firstName} #{self.lastName} #{self.username}"
+ end
+
def initialize(attributes = {})
# Don't allow passwordHash to be set here
attributes.delete(:passwordHash)
@@ -73,6 +83,14 @@ def save(*args)
Ontology.cache_collection_invalidate
OntologySubmission.cache_collection_invalidate
end
+
+ if args.include?(:send_notifications) && args[:send_notifications]
+ begin
+ LinkedData::Utils::Notifications.new_user(user)
+ rescue Exception => e
+ end
+ end
+
super
end
@@ -99,6 +117,13 @@ def to_s
self.username.to_s
end
end
+ def self.analytics_redis_key
+ ANALYTICS_REDIS_FIELD
+ end
+
+ def self.page_visits_analytics
+ load_data(PAGES_ANALYTICS_REDIS_FIELD)
+ end
private
diff --git a/lib/ontologies_linked_data/monkeypatches/object.rb b/lib/ontologies_linked_data/monkeypatches/object.rb
index b9b97fd2..45599d1b 100644
--- a/lib/ontologies_linked_data/monkeypatches/object.rb
+++ b/lib/ontologies_linked_data/monkeypatches/object.rb
@@ -54,7 +54,7 @@ def to_flex_hash(options = {}, &block)
# Add methods
methods = methods - do_not_serialize_nested(options)
methods.each do |method|
- hash[method] = self.send(method.to_s) if self.respond_to?(method) rescue next
+ populate_attribute(hash, method) if self.respond_to?(method) rescue next
end
# Get rid of everything except the 'only'
@@ -244,7 +244,7 @@ def populate_attributes(hash, all = false, only = [], options = {})
attributes.each do |attribute|
next unless self.respond_to?(attribute)
- hash[attribute] = self.send(attribute)
+ populate_attribute(hash, attribute)
end
elsif !only.empty?
# Only get stuff we need
@@ -256,13 +256,22 @@ def populate_attributes(hash, all = false, only = [], options = {})
hash
end
+ def populate_attribute(hash, attribute)
+ if self.method(attribute).parameters.eql?([[:rest, :args]])
+ hash[attribute] = self.send(attribute, include_languages: true)
+ else
+ # a serialized method
+ hash[attribute] = self.send(attribute)
+ end
+ end
+
def populate_hash_from_list(hash, attributes)
attributes.each do |attribute|
attribute = attribute.to_sym
next unless self.respond_to?(attribute)
begin
- hash[attribute] = self.send(attribute)
+ populate_attribute(hash, attribute)
rescue Goo::Base::AttributeNotLoaded
next
rescue ArgumentError
diff --git a/lib/ontologies_linked_data/sample_data/ontology.rb b/lib/ontologies_linked_data/sample_data/ontology.rb
index 380b3bfb..61dcc04d 100644
--- a/lib/ontologies_linked_data/sample_data/ontology.rb
+++ b/lib/ontologies_linked_data/sample_data/ontology.rb
@@ -18,6 +18,9 @@ def self.create_ontologies_and_submissions(options = {})
submission_count = options[:submission_count] || 5
random_submission_count = options[:random_submission_count] || false
process_submission = options[:process_submission] || false
+ process_options = options[:process_options] || { process_rdf: true, index_search: true, index_properties: true,
+ run_metrics: true, reasoning: true }
+
submissions_to_process = options[:submissions_to_process]
acronym = options[:acronym] || "TEST-ONT"
name = options[:name]
@@ -40,12 +43,12 @@ def self.create_ontologies_and_submissions(options = {})
ont_acronyms << acronym_count
o = LinkedData::Models::Ontology.new({
- acronym: acronym_count,
- name: name || "#{acronym_count} Ontology",
- administeredBy: [u],
- summaryOnly: false,
- ontologyType: ontology_type
- })
+ acronym: acronym_count,
+ name: name ? "#{name}#{count > 0 ? count : ''}" : "#{acronym_count} Ontology",
+ administeredBy: [u],
+ summaryOnly: false,
+ ontologyType: ontology_type
+ })
if o.exist?
o = LinkedData::Models::Ontology.find(acronym_count).include(LinkedData::Models::Ontology.attributes(:all)).first
@@ -61,16 +64,16 @@ def self.create_ontologies_and_submissions(options = {})
#refresh submission to get new next submission ID after saving in a loop
o.bring(:submissions)
os = LinkedData::Models::OntologySubmission.new({
- ontology: o,
- hasOntologyLanguage: of,
- submissionId: o.next_submission_id,
- definitionProperty: (RDF::IRI.new "http://bioontology.org/ontologies/biositemap.owl#definition"),
- contact: [contact],
- released: DateTime.now - 3,
- URI: RDF::URI.new("https://test-#{o.next_submission_id}.com"),
- description: "Description #{o.next_submission_id}",
- status: 'production'
- })
+ ontology: o,
+ hasOntologyLanguage: of,
+ submissionId: o.next_submission_id,
+ definitionProperty: (RDF::IRI.new "http://bioontology.org/ontologies/biositemap.owl#definition"),
+ contact: [contact],
+ released: DateTime.now - 3,
+ URI: RDF::URI.new("https://test-#{o.next_submission_id}.com"),
+ description: "Description #{o.next_submission_id}",
+ status: 'production'
+ })
if (submissions_to_process.nil? || submissions_to_process.include?(os.submissionId))
file_path = options[:file_path]
@@ -107,14 +110,12 @@ def self.create_ontologies_and_submissions(options = {})
o.submissions.each do |ss|
ss.bring(:submissionId) if ss.bring?(:submissionId)
next if (!submissions_to_process.nil? && !submissions_to_process.include?(ss.submissionId))
-
+
test_log_file = TestLogFile.new
tmp_log = Logger.new(test_log_file)
-
+
begin
- ss.process_submission(tmp_log,
- process_rdf: true, index_search: true, index_properties: true,
- run_metrics: true, reasoning: true)
+ ss.process_submission(tmp_log, process_options)
rescue Exception => e
puts "Error processing submission: #{ss.id.to_s}"
puts "See test log for errors: #{test_log_file.path}"
@@ -132,15 +133,15 @@ def self.load_semantic_types_ontology(options = {})
file_path = "../../../../test/data/ontology_files/umls_semantictypes.ttl" if file_path.nil?
count, acronyms, sty = create_ontologies_and_submissions({
- ont_count: 1,
- submission_count: 1,
- process_submission: true,
- acronym: "STY",
- ontology_format: "UMLS",
- name: "Semantic Types Ontology",
- acronym_suffix: "",
- file_path: file_path
- })
+ ont_count: 1,
+ submission_count: 1,
+ process_submission: true,
+ acronym: "STY",
+ ontology_format: "UMLS",
+ name: "Semantic Types Ontology",
+ acronym_suffix: "",
+ file_path: file_path
+ })
sty
end
@@ -173,35 +174,39 @@ def self.delete_ontologies_and_submissions
u.delete unless u.nil?
end
- def self.sample_owl_ontologies
+ def self.sample_owl_ontologies(process_submission: false, process_options: nil)
+ process_options ||= {process_rdf: true, extract_metadata: false, index_search: false}
count, acronyms, bro = create_ontologies_and_submissions({
- process_submission: true,
- acronym: "BROTEST",
- name: "ontTEST Bla",
- file_path: "../../../../test/data/ontology_files/BRO_v3.2.owl",
- ont_count: 1,
- submission_count: 1
- })
+ process_submission: process_submission,
+ process_options: process_options,
+ acronym: "BROTEST",
+ name: "ontTEST Bla",
+ file_path: "../../../../test/data/ontology_files/BRO_v3.2.owl",
+ ont_count: 1,
+ submission_count: 1
+ })
# This one has some nasty looking IRIS with slashes in the anchor
count, acronyms, mccl = create_ontologies_and_submissions({
- process_submission: true,
- acronym: "MCCLTEST",
- name: "MCCLS TEST",
- file_path: "../../../../test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
- ont_count: 1,
- submission_count: 1
- })
+ process_submission: process_submission,
+ process_options: process_options,
+ acronym: "MCCLTEST",
+ name: "MCCLS TEST",
+ file_path: "../../../../test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl",
+ ont_count: 1,
+ submission_count: 1
+ })
# This one has resources wih accents.
count, acronyms, onto_matest = create_ontologies_and_submissions({
- process_submission: true,
- acronym: "ONTOMATEST",
- name: "OntoMA TEST",
- file_path: "../../../../test/data/ontology_files/OntoMA.1.1_vVersion_1.1_Date__11-2011.OWL",
- ont_count: 1,
- submission_count: 1
- })
+ process_submission: process_submission,
+ process_options: process_options,
+ acronym: "ONTOMATEST",
+ name: "OntoMA TEST",
+ file_path: "../../../../test/data/ontology_files/OntoMA.1.1_vVersion_1.1_Date__11-2011.OWL",
+ ont_count: 1,
+ submission_count: 1
+ })
return bro.concat(mccl).concat(onto_matest)
end
diff --git a/lib/ontologies_linked_data/security/authorization.rb b/lib/ontologies_linked_data/security/authorization.rb
index 188e1207..fc16ea67 100644
--- a/lib/ontologies_linked_data/security/authorization.rb
+++ b/lib/ontologies_linked_data/security/authorization.rb
@@ -19,67 +19,52 @@ def self.decodeJWT(encodedToken)
end
APIKEYS_FOR_AUTHORIZATION = {}
+ USER_APIKEY_PARAM = 'userapikey'.freeze
+ API_KEY_PARAM = 'apikey'.freeze
def initialize(app = nil)
@app = app
end
ROUTES_THAT_BYPASS_SECURITY = Set.new([
- "/",
- "/documentation",
- "/jsonview/jsonview.css",
- "/jsonview/jsonview.js"
- ])
+ "/",
+ "/documentation",
+ "/jsonview/jsonview.css",
+ "/jsonview/jsonview.js"
+ ])
def call(env)
req = Rack::Request.new(env)
params = req.params
- access_token = nil
- apikey = nil
-
- access_token = find_access_token(env, params) if LinkedData.settings.oauth2_enabled
- apikey = find_apikey(env, params) unless access_token
-
- if apikey
- unless authorized?(apikey, env)
- status = 401
- response = {
- status: status,
- error: "You must provide a valid API Key. " + \
- "Your API Key can be obtained by logging in at #{LinkedData.settings.ui_host}/account"
- }
- end
- elsif access_token
- begin
- Authorization::decodeJWT(access_token)
- rescue JWT::DecodeError => e
- LOGGER.debug(e.message)
- status = 401
- response = {
- status: status,
- error: "Failed to decode JWT token: " + e.message
- }
- end
- else
- status = 401
- error_message = "You must provide an API Key either using the query-string parameter `apikey` or the `Authorization` header: `Authorization: apikey token=my_apikey`. " + \
- "Your API Key can be obtained by logging in at #{LinkedData.settings.ui_host}/account"
- if LinkedData.settings.oauth2_enabled
- error_message = error_message + "Alternatively, you must supply an OAuth2 access token in the `Authorization` header: `Authorization: Bearer oauth2-access-token`."
- end
+ apikey = find_apikey(env, params)
+ status = 200
+ error_message = ''
- response = {
- status: status,
- error: error_message
- }
+ if !apikey
+ status = 401
+ error_message = <<-MESSAGE
+ You must provide an API Key either using the query-string parameter `apikey` or the `Authorization` header: `Authorization: apikey token=my_apikey`.
+ Your API Key can be obtained by logging in at #{LinkedData.settings.ui_host}/account"
+ MESSAGE
+ elsif !authorized?(apikey, env)
+ status = 401
+ error_message = "You must provide a valid API Key. Your API Key can be obtained by logging in at #{LinkedData.settings.ui_host}/account"
end
- if status == 401 && !bypass?(env)
+ response = {
+ status: status,
+ error: error_message
+ }
+
+ if status.eql?(401) && !bypass?(env)
LinkedData::Serializer.build_response(env, status: status, body: response)
else
+ # unfrozen params so that they can be encoded by Rack using occurring after updating the gem RDF to v3.0
+ env["rack.request.form_hash"]&.transform_values!(&:dup)
+ env["rack.request.query_hash"]&.transform_values!(&:dup)
status, headers, response = @app.call(env)
- apikey_cookie(env, headers, apikey, params)
+ save_apikey_in_cookie(env, headers, apikey, params)
[status, headers, response]
end
end
@@ -93,40 +78,37 @@ def bypass?(env)
##
# Inject a cookie with the API Key if it is present and we're in HTML content type
- def apikey_cookie(env, headers, apikey, params)
+ COOKIE_APIKEY_PARAM = "ncbo_apikey"
+
+ def save_apikey_in_cookie(env, headers, apikey, params)
# If we're using HTML, inject the apikey in a cookie (ignores bad accept headers)
+ best = nil
begin
best = LinkedData::Serializer.best_response_type(env, params)
- rescue LinkedData::Serializer::AcceptHeaderError; end
- if best == LinkedData::MediaTypes::HTML
- Rack::Utils.set_cookie_header!(headers, "ncbo_apikey", {:value => apikey, :path => "/", :expires => Time.now+90*24*60*60})
+ rescue LinkedData::Serializer::AcceptHeaderError
+ # Ignored
end
+
+ return unless best == LinkedData::MediaTypes::HTML
+
+ Rack::Utils.set_cookie_header!(headers, COOKIE_APIKEY_PARAM, {
+ value: apikey,
+ path: '/',
+ expires: Time.now + 90 * 24 * 60 * 60
+ })
end
def find_apikey(env, params)
- apikey = nil
- header_auth = env["HTTP_AUTHORIZATION"] || env["Authorization"]
- if params["apikey"] && params["userapikey"]
- apikey_authed = authorized?(params["apikey"], env)
- return unless apikey_authed
- apikey = params["userapikey"]
- elsif params["apikey"]
- apikey = params["apikey"]
- elsif apikey.nil? && header_auth && !header_auth.empty?
- token = Rack::Utils.parse_query(header_auth.split(" ")[1])
- # Strip spaces from start and end of string
- apikey = token["token"].gsub(/\"/, "")
- # If the user apikey is passed, use that instead
- if token["userapikey"] && !token["userapikey"].empty?
- apikey_authed = authorized?(apikey, env)
- return unless apikey_authed
- apikey = token["userapikey"].gsub(/\"/, "")
- end
- elsif apikey.nil? && env["HTTP_COOKIE"] && env["HTTP_COOKIE"].include?("ncbo_apikey")
- cookie = Rack::Utils.parse_query(env["HTTP_COOKIE"])
- apikey = cookie["ncbo_apikey"] if cookie["ncbo_apikey"]
- end
- apikey
+ apikey = user_apikey(env, params)
+ return apikey if apikey
+
+ apikey = params[API_KEY_PARAM]
+ return apikey if apikey
+
+ apikey = request_header_apikey(env)
+ return apikey if apikey
+
+ cookie_apikey(env)
end
def find_access_token(env, params)
@@ -141,17 +123,21 @@ def find_access_token(env, params)
def authorized?(apikey, env)
return false if apikey.nil?
+
if APIKEYS_FOR_AUTHORIZATION.key?(apikey)
store_user(APIKEYS_FOR_AUTHORIZATION[apikey], env)
else
- users = LinkedData::Models::User.where(apikey: apikey).include(LinkedData::Models::User.attributes(:all)).to_a
- return false if users.empty?
+ user = LinkedData::Models::User.where(apikey: apikey)
+ .include(LinkedData::Models::User.attributes(:all))
+ .first
+ return false if user.nil?
+
# This will kind-of break if multiple apikeys exist
# Though it is also kind-of ok since we just want to know if a user with corresponding key exists
- user = users.first
+
store_user(user, env)
end
- return true
+ true
end
def store_user(user, env)
@@ -159,6 +145,46 @@ def store_user(user, env)
env.update("REMOTE_USER" => user)
end
+ private
+
+ def request_header_apikey(env)
+ header_auth = get_header_auth(env)
+ return if header_auth.empty?
+
+ token = Rack::Utils.parse_query(header_auth.split(' ').last)
+ # Strip spaces from start and end of string
+ apikey = token['token'].gsub(/\"/, "")
+ # If the user apikey is passed, use that instead
+ if token[USER_APIKEY_PARAM] && !token[USER_APIKEY_PARAM].empty?
+ apikey_authed = authorized?(apikey, env)
+ return unless apikey_authed
+
+ apikey = token[USER_APIKEY_PARAM].gsub(/\"/, "")
+ end
+ apikey
+ end
+
+ def cookie_apikey(env)
+ return unless env["HTTP_COOKIE"]
+
+ cookie = Rack::Utils.parse_query(env['HTTP_COOKIE'])
+ cookie[COOKIE_APIKEY_PARAM] if cookie['ncbo_apikey']
+ end
+
+ def get_header_auth(env)
+ env["HTTP_AUTHORIZATION"] || env["Authorization"] || ''
+ end
+
+ def user_apikey(env, params)
+ return unless (params["apikey"] && params["userapikey"])
+
+ apikey_authed = authorized?(params[API_KEY_PARAM], env)
+
+ return unless apikey_authed
+
+ params[USER_APIKEY_PARAM]
+ end
+
end
end
end
diff --git a/lib/ontologies_linked_data/serializers/json.rb b/lib/ontologies_linked_data/serializers/json.rb
index 4bcf1d46..d5247822 100644
--- a/lib/ontologies_linked_data/serializers/json.rb
+++ b/lib/ontologies_linked_data/serializers/json.rb
@@ -16,7 +16,7 @@ def self.serialize(obj, options = {})
end
# Add the type
- hash["@type"] = type(current_cls, hashed_obj) if hash["@id"]
+ hash["@type"] = type(current_cls, hashed_obj) if hash["@id"]
# Generate links
# NOTE: If this logic changes, also change in xml.rb
@@ -36,8 +36,8 @@ def self.serialize(obj, options = {})
end
elsif (hashed_obj.instance_of?(LinkedData::Models::ExternalClass) || hashed_obj.instance_of?(LinkedData::Models::InterportalClass)) && !current_cls.embedded?
# Add context for ExternalClass
- context_hash = {"@vocab" => Goo.vocabulary.to_s, "prefLabel" => "http://data.bioontology.org/metadata/skosprefLabel"}
- context = {"@context" => context_hash}
+ context_hash = { "@vocab" => Goo.vocabulary.to_s, "prefLabel" => "http://data.bioontology.org/metadata/skosprefLabel" }
+ context = { "@context" => context_hash }
hash.merge!(context)
end
@@ -52,6 +52,29 @@ def self.serialize(obj, options = {})
private
+ def self.get_object_submission(obj)
+ obj.class.respond_to?(:attributes) && obj.class.attributes.include?(:submission) ? obj.submission : nil
+ end
+
+ def self.get_languages(submission, user_languages)
+ result_lang = user_languages
+
+ if submission
+ submission.bring :naturalLanguage
+ languages = get_submission_languages(submission.naturalLanguage)
+ # intersection of the two arrays , if the requested language is not :all
+ result_lang = user_languages == :all ? languages : Array(user_languages) & languages
+ result_lang = result_lang.first if result_lang.length == 1
+ end
+
+ result_lang
+ end
+
+ def self.get_submission_languages(submission_natural_language = [])
+ submission_natural_language = submission_natural_language.values.flatten if submission_natural_language.is_a?(Hash)
+ submission_natural_language.map { |natural_language| natural_language.to_s['iso639'] && natural_language.to_s.split('/').last[0..1].to_sym }.compact
+ end
+
def self.type(current_cls, hashed_obj)
if current_cls.respond_to?(:type_uri)
# For internal class
@@ -82,17 +105,17 @@ def self.generate_context(object, serialized_attrs = [], options = {})
if linked_model && linked_model.ancestors.include?(Goo::Base::Resource) && !embedded?(object, attr)
# linked object
- predicate = {"@id" => linked_model.type_uri.to_s, "@type" => "@id"}
+ predicate = { "@id" => linked_model.type_uri.to_s, "@type" => "@id" }
else
# use the original predicate property if set
- predicate_attr = current_cls.model_settings[:attributes][attr][:property] || attr
+ predicate_attr = current_cls.model_settings[:attributes][attr][:property] || attr
# predicate with custom namespace
# if the namespace can be resolved by the namespaces added in Goo then it will be resolved.
predicate = "#{Goo.vocabulary(current_cls.model_settings[:attributes][attr][:namespace])&.to_s}#{predicate_attr}"
end
hash[attr] = predicate unless predicate.nil?
end
- context = {"@context" => hash}
+ context = { "@context" => hash }
CONTEXTS[object.hash] = context
context = remove_unused_attrs(context, serialized_attrs) unless options[:params] && options[:params]["full_context"].eql?("true")
context
@@ -105,12 +128,12 @@ def self.generate_links_context(object)
links.each do |link|
links_context[link.type] = link.type_uri.to_s
end
- return {"@context" => links_context}
+ return { "@context" => links_context }
end
def self.remove_unused_attrs(context, serialized_attrs = [])
- new_context = context["@context"].reject {|k,v| !serialized_attrs.include?(k) && !k.to_s.start_with?("@")}
- {"@context" => new_context}
+ new_context = context["@context"].reject { |k, v| !serialized_attrs.include?(k) && !k.to_s.start_with?("@") }
+ { "@context" => new_context }
end
def self.embedded?(object, attribute)
@@ -127,20 +150,19 @@ def self.generate_context?(options)
params = options[:params]
params.nil? ||
(params["no_context"].nil? ||
- !params["no_context"].eql?("true")) &&
- (params["display_context"].nil? ||
- !params["display_context"].eql?("false"))
+ !params["no_context"].eql?("true")) &&
+ (params["display_context"].nil? ||
+ !params["display_context"].eql?("false"))
end
def self.generate_links?(options)
params = options[:params]
params.nil? ||
(params["no_links"].nil? ||
- !params["no_links"].eql?("true")) &&
- (params["display_links"].nil? ||
- !params["display_links"].eql?("false"))
+ !params["no_links"].eql?("true")) &&
+ (params["display_links"].nil? ||
+ !params["display_links"].eql?("false"))
end
end
end
-end
-
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/jsonld.rb b/lib/ontologies_linked_data/serializers/jsonld.rb
new file mode 100644
index 00000000..22e6b7d6
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/jsonld.rb
@@ -0,0 +1,40 @@
+require 'multi_json'
+require 'json/ld'
+
+module LinkedData
+ module Serializers
+ class JSONLD
+
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes['id'])
+ reverse = hashes['reverse'] || {}
+ hashes.delete('id')
+ hashes.delete('reverse')
+ graph = RDF::Graph.new
+
+ hashes.each do |property_url, val|
+ Array(val).each do |v|
+ if v.is_a?(Hash)
+ blank_node = RDF::Node.new
+ v.each do |blank_predicate, blank_value|
+ graph << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ v = blank_node
+ end
+ graph << RDF::Statement.new(subject, RDF::URI.new(property_url), v)
+ end
+ end
+
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ graph << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+
+ context = { '@context' => options.transform_keys(&:to_s) }
+ compacted = ::JSON::LD::API.compact(::JSON::LD::API.fromRdf(graph), context['@context'])
+ MultiJson.dump(compacted)
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/ntriples.rb b/lib/ontologies_linked_data/serializers/ntriples.rb
new file mode 100644
index 00000000..c96795a7
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/ntriples.rb
@@ -0,0 +1,37 @@
+module LinkedData
+ module Serializers
+ class NTRIPLES
+
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes['id'])
+ reverse = hashes['reverse'] || {}
+ hashes.delete('id')
+ hashes.delete('reverse')
+ RDF::Writer.for(:ntriples).buffer(prefixes: options) do |writer|
+ hashes.each do |p, o|
+ predicate = RDF::URI.new(p)
+ Array(o).each do |item|
+ if item.is_a?(Hash)
+ blank_node = RDF::Node.new
+ item.each do |blank_predicate, blank_value|
+ writer << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ item = blank_node
+ end
+ writer << RDF::Statement.new(subject, predicate, item)
+ end
+ end
+
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ writer << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+ end
+ end
+
+ end
+ end
+end
+
+
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/rdf_xml.rb b/lib/ontologies_linked_data/serializers/rdf_xml.rb
new file mode 100644
index 00000000..e06590f0
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/rdf_xml.rb
@@ -0,0 +1,43 @@
+module LinkedData
+ module Serializers
+ class RDF_XML
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes["id"])
+ reverse = hashes["reverse"] || {}
+ hashes.delete("id")
+ hashes.delete("reverse")
+ graph = RDF::Graph.new
+
+ hashes.each do |property_url, val|
+ Array(val).each do |v|
+ if v.is_a?(Hash)
+ blank_node = RDF::Node.new
+ v.each do |blank_predicate, blank_value|
+ graph << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ v = blank_node
+ end
+ graph << RDF::Statement.new(subject, RDF::URI.new(property_url), v)
+ end
+ end
+
+ inverse_graph = RDF::Graph.new
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ inverse_graph << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+
+ a = RDF::RDFXML::Writer.buffer(prefixes: options) do |writer|
+ writer << graph
+ end
+
+ b = RDF::RDFXML::Writer.buffer(prefixes: options) do |writer|
+ writer << inverse_graph
+ end
+ xml_result = "#{a.chomp("\n")}\n#{b.sub!(/^<\?xml[^>]*>\n]*>/, '').gsub(/^$\n/, '')}"
+ xml_result.gsub(/^$\n/, '')
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/serializers.rb b/lib/ontologies_linked_data/serializers/serializers.rb
index b6006280..1603c1db 100644
--- a/lib/ontologies_linked_data/serializers/serializers.rb
+++ b/lib/ontologies_linked_data/serializers/serializers.rb
@@ -1,8 +1,12 @@
require 'ontologies_linked_data/media_types'
require 'ontologies_linked_data/serializers/xml'
+require 'ontologies_linked_data/serializers/rdf_xml'
require 'ontologies_linked_data/serializers/json'
require 'ontologies_linked_data/serializers/jsonp'
+require 'ontologies_linked_data/serializers/jsonld'
require 'ontologies_linked_data/serializers/html'
+require 'ontologies_linked_data/serializers/ntriples'
+require 'ontologies_linked_data/serializers/turtle'
module LinkedData
module Serializers
@@ -10,17 +14,15 @@ def self.serialize(obj, type, options = {})
SERIALIZERS[type].serialize(obj, options)
end
- class Turtle
- def self.serialize(obj, options)
- end
- end
-
SERIALIZERS = {
LinkedData::MediaTypes::HTML => HTML,
LinkedData::MediaTypes::JSON => JSON,
LinkedData::MediaTypes::JSONP => JSONP,
+ LinkedData::MediaTypes::JSONLD => JSONLD,
LinkedData::MediaTypes::XML => XML,
- LinkedData::MediaTypes::TURTLE => JSON
+ LinkedData::MediaTypes::RDF_XML => RDF_XML,
+ LinkedData::MediaTypes::TURTLE => TURTLE,
+ LinkedData::MediaTypes::NTRIPLES => NTRIPLES
}
end
end
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/serializers/turtle.rb b/lib/ontologies_linked_data/serializers/turtle.rb
new file mode 100644
index 00000000..b0cc9ecf
--- /dev/null
+++ b/lib/ontologies_linked_data/serializers/turtle.rb
@@ -0,0 +1,38 @@
+module LinkedData
+ module Serializers
+ class TURTLE
+ def self.serialize(hashes, options = {})
+ subject = RDF::URI.new(hashes['id'])
+ reverse = hashes['reverse'] || {}
+ hashes.delete('id')
+ hashes.delete('reverse')
+ options.delete(:rdf)
+
+ RDF::Writer.for(:turtle).buffer(prefixes: options) do |writer|
+ hashes.each do |p, o|
+ predicate = RDF::URI.new(p)
+ Array(o).each do |item|
+ if item.is_a?(Hash)
+ blank_node = RDF::Node.new
+ item.each do |blank_predicate, blank_value|
+ writer << RDF::Statement.new(blank_node, RDF::URI.new(blank_predicate), blank_value)
+ end
+ item = blank_node
+ end
+ writer << RDF::Statement.new(subject, predicate, item)
+ end
+ end
+
+ reverse.each do |reverse_subject, reverse_property|
+ Array(reverse_property).each do |s|
+ writer << RDF::Statement.new(RDF::URI.new(reverse_subject), RDF::URI.new(s), subject)
+ end
+ end
+
+ end
+ end
+ end
+ end
+end
+
+
\ No newline at end of file
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_all_data_indexer.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_all_data_indexer.rb
new file mode 100644
index 00000000..0a3e46eb
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_all_data_indexer.rb
@@ -0,0 +1,154 @@
+require 'parallel'
+module LinkedData
+ module Services
+ class OntologySubmissionAllDataIndexer < OntologySubmissionProcess
+
+ def process(logger, options = nil)
+ status = LinkedData::Models::SubmissionStatus.find('INDEXED_ALL_DATA').first
+ begin
+ index_all_data(logger, options)
+ @submission.add_submission_status(status)
+ rescue StandardError
+ @submission.add_submission_status(status.get_error_status)
+ ensure
+ @submission.save
+ end
+ end
+
+ private
+
+ def index_sorted_ids(ids, ontology, conn, logger, commit = true)
+ total_triples = Parallel.map(ids.each_slice(1000), in_threads: 10) do |ids_slice|
+ index_ids = 0
+ triples_count = 0
+ documents = {}
+ time = Benchmark.realtime do
+ documents, triples_count = fetch_triples(ids_slice, ontology)
+ end
+
+ return if documents.empty?
+
+ logger.info("Worker #{Parallel.worker_number} > Fetched #{triples_count} triples of #{@submission.id} in #{time} sec.") if triples_count.positive?
+
+ time = Benchmark.realtime do
+ conn.index_document(documents.values, commit: false)
+ conn.index_commit if commit
+ index_ids = documents.size
+ documents = {}
+ end
+ logger.info("Worker #{Parallel.worker_number} > Indexed #{index_ids} ids of #{@submission.id} in #{time} sec.")
+ triples_count
+ end
+ total_triples.sum
+ end
+
+ def index_all_data(logger, commit: true)
+ page = 1
+ size = 10_000
+ count_ids = 0
+ total_time = 0
+ total_triples = 0
+ old_count = -1
+
+ ontology = @submission.bring(:ontology).ontology
+ .bring(:acronym).acronym
+ conn = init_search_collection(ontology)
+
+ ids = {}
+
+ while count_ids != old_count
+ old_count = count_ids
+ count = 0
+ time = Benchmark.realtime do
+ ids = fetch_sorted_ids(size, page)
+ count = ids.size
+ end
+
+ count_ids += count
+ total_time += time
+ page += 1
+
+ next unless count.positive?
+
+ logger.info("Fetched #{count} ids of #{@submission.id} page: #{page} in #{time} sec.")
+
+ time = Benchmark.realtime do
+ total_triples += index_sorted_ids(ids, ontology, conn, logger, commit)
+ end
+ logger.info("Indexed #{total_triples} triples of #{@submission.id} page: #{page} in #{time} sec.")
+
+ total_time += time
+ end
+ logger.info("Completed indexing all ontology data: #{@submission.id} in #{total_time} sec. (#{count_ids} ids / #{total_triples} triples)")
+ logger.flush
+ end
+
+ def fetch_sorted_ids(size, page)
+ query = Goo.sparql_query_client.select(:id)
+ .distinct
+ .from(RDF::URI.new(@submission.id))
+ .where(%i[id p v])
+ .limit(size)
+ .offset((page - 1) * size)
+
+ query.each_solution.map(&:id).sort
+ end
+
+ def update_doc(doc, property, new_val)
+ unescaped_prop = property.gsub('___', '://')
+
+ unescaped_prop = unescaped_prop.gsub('_', '/')
+ existent_val = doc["#{unescaped_prop}_t"] || doc["#{unescaped_prop}_txt"]
+
+ if !existent_val && !property['#']
+ unescaped_prop = unescaped_prop.sub(%r{/([^/]+)$}, '#\1') # change latest '/' with '#'
+ existent_val = doc["#{unescaped_prop}_t"] || doc["#{unescaped_prop}_txt"]
+ end
+
+ if existent_val && new_val || new_val.is_a?(Array)
+ doc.delete("#{unescaped_prop}_t")
+ doc["#{unescaped_prop}_txt"] = Array(existent_val) + Array(new_val).map(&:to_s)
+ elsif existent_val.nil? && new_val
+ doc["#{unescaped_prop}_t"] = new_val.to_s
+ end
+ doc
+ end
+
+ def init_search_collection(ontology)
+ @submission.class.clear_indexed_content(ontology)
+ end
+
+ def fetch_triples(ids_slice, ontology)
+ documents = {}
+ count = 0
+ filter = ids_slice.map { |x| "?id = <#{x}>" }.join(' || ')
+ query = Goo.sparql_query_client.select(:id, :p, :v)
+ .from(RDF::URI.new(@submission.id))
+ .where(%i[id p v])
+ .filter(filter)
+ query.each_solution do |sol|
+ count += 1
+ doc = documents[sol[:id].to_s]
+ doc ||= {
+ id: "#{sol[:id]}_#{ontology}", submission_id_t: @submission.id.to_s,
+ ontology_t: ontology, resource_model: @submission.class.model_name,
+ resource_id: sol[:id].to_s
+ }
+ property = sol[:p].to_s
+ value = sol[:v]
+
+ if property.to_s.eql?(RDF.type.to_s)
+ update_doc(doc, 'type', value)
+ else
+ update_doc(doc, property, value)
+ end
+ documents[sol[:id].to_s] = doc
+ end
+ [documents, count]
+ end
+
+ end
+ end
+end
+
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_archiver.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_archiver.rb
new file mode 100644
index 00000000..f5e18e34
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_archiver.rb
@@ -0,0 +1,63 @@
+module LinkedData
+ module Services
+ class OntologySubmissionArchiver < OntologySubmissionProcess
+
+ FILES_TO_DELETE = ['labels.ttl', 'mappings.ttl', 'obsolete.ttl', 'owlapi.xrdf', 'errors.log']
+ FOLDERS_TO_DELETE = ['unzipped']
+ FILE_SIZE_ZIPPING_THRESHOLD = 100 * 1024 * 1024 # 100MB
+
+ def process
+ archive_submission
+ end
+
+ private
+
+ def archive_submission
+ @submission.ontology.bring(:submissions)
+ submissions = @submission.ontology.submissions
+ return if submissions.nil?
+
+ submissions.each { |s| s.bring(:submissionId) }
+ submission = submissions.sort { |a, b| b.submissionId <=> a.submissionId }.first
+
+ return unless @submission.submissionId < submission.submissionId
+
+ @submission.submissionStatus = nil
+ status = LinkedData::Models::SubmissionStatus.find("ARCHIVED").first
+ @submission.add_submission_status(status)
+
+ @submission.unindex
+
+ # Delete everything except for original ontology file.
+ delete_old_submission_files
+ @submission.uploadFilePath = zip_submission_uploaded_file
+ end
+
+ def zip_submission_uploaded_file
+ @submission.bring(:uploadFilePath) if @submission.bring?(:uploadFilePath)
+ return @submission.uploadFilePath if @submission.zipped?
+
+ return @submission.uploadFilePath if @submission.uploadFilePath.nil? || @submission.uploadFilePath.empty?
+
+ return @submission.uploadFilePath if File.size(@submission.uploadFilePath) < FILE_SIZE_ZIPPING_THRESHOLD
+
+ old_path = @submission.uploadFilePath
+ zip_file = Utils::FileHelpers.zip_file(old_path)
+ FileUtils.rm(old_path, force: true)
+ zip_file
+ end
+
+ def delete_old_submission_files
+ path_to_repo = @submission.data_folder
+ submission_files = FILES_TO_DELETE.map { |f| File.join(path_to_repo, f) }
+ submission_files.push(@submission.csv_path)
+ submission_files.push(@submission.parsing_log_path) unless @submission.parsing_log_path.nil?
+ FileUtils.rm(submission_files, force: true)
+ submission_folders = FOLDERS_TO_DELETE.map { |f| File.join(path_to_repo, f) }
+ submission_folders.each { |d| FileUtils.remove_dir(d) if File.directory?(d) }
+ end
+
+ end
+
+ end
+end
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_diff_generator.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_diff_generator.rb
new file mode 100644
index 00000000..b6dda351
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_diff_generator.rb
@@ -0,0 +1,86 @@
+module LinkedData
+ module Services
+ class SubmissionDiffGenerator < OntologySubmissionProcess
+
+ def process(logger, options = nil)
+ process_diff(logger)
+ end
+
+ def diff(logger, older)
+ generate_diff(logger, init_diff_tool(older))
+ end
+
+ private
+
+ # accepts another submission in 'older' (it should be an 'older' ontology version)
+ def init_diff_tool(older)
+ @submission.bring(:uploadFilePath)
+ older.bring(:uploadFilePath)
+
+ LinkedData::Diff::BubastisDiffCommand.new(
+ File.expand_path(older.uploadFilePath),
+ File.expand_path(@submission.uploadFilePath))
+ end
+
+ def process_diff(logger)
+ status = LinkedData::Models::SubmissionStatus.find('DIFF').first
+ # Get previous submission from ontology.submissions
+ @submission.ontology.bring(:submissions)
+ submissions = @submission.ontology.submissions
+
+ if submissions.nil?
+ logger.info("Diff process: no submissions available for #{@submission.id}.")
+ else
+ submissions.each { |s| s.bring(:submissionId, :diffFilePath) }
+ # Sort submissions in descending order of submissionId, extract last two submissions
+ recent_submissions = submissions.sort { |a, b| b.submissionId <=> a.submissionId }[0..1]
+
+ if recent_submissions.length > 1
+ # validate that the most recent submission is the current submission
+ if @submission.submissionId == recent_submissions.first.submissionId
+ prev = recent_submissions.last
+
+ # Ensure that prev is older than the current submission
+ if @submission.submissionId > prev.submissionId
+ # generate a diff
+ begin
+ diff(logger,prev)
+ @submission.add_submission_status(status)
+ rescue Exception => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ @submission.add_submission_status(status.get_error_status)
+ ensure
+ @submission.save
+ end
+ end
+ end
+ else
+ logger.info("Diff process: no older submissions available for #{@submission.id}.")
+ end
+ end
+ end
+
+
+ def generate_diff(logger, diff_tool)
+ begin
+ @submission.bring_remaining
+ @submission.bring(:diffFilePath)
+
+ LinkedData::Diff.logger = logger
+ @submission.diffFilePath = diff_tool.diff
+ @submission.save
+ logger.info("Diff generated successfully for #{@submission.id}")
+ logger.flush
+ rescue StoreError => e
+ logger.error("Diff process for #{@submission.id} failed - #{e.class}: #{e.message}")
+ logger.flush
+ raise e
+ end
+ end
+
+ end
+ end
+end
+
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_extract_metadata.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_extract_metadata.rb
new file mode 100644
index 00000000..0fb8fe0a
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_extract_metadata.rb
@@ -0,0 +1,262 @@
+module LinkedData
+ module Services
+ class SubmissionMetadataExtractor < OntologySubmissionProcess
+
+ def process(logger, options = nil)
+ extract_metadata(logger, options[:user_params], heavy_extraction: options[:heavy_extraction].eql?(true))
+ end
+
+ private
+
+ def extract_metadata(logger, user_params, heavy_extraction: true)
+ version_info = extract_version
+ ontology_iri = extract_ontology_iri
+ @submission.version = version_info if version_info
+ @submission.uri = ontology_iri if ontology_iri
+ if heavy_extraction
+ begin
+ # Extract metadata directly from the ontology
+ extract_ontology_metadata(logger, user_params)
+ logger.info('Additional metadata extracted.')
+ rescue StandardError => e
+ e.backtrace
+ logger.error("Error while extracting additional metadata: #{e}")
+ end
+ end
+ @submission.save
+ end
+
+ def extract_version
+
+ query = Goo.sparql_query_client.select(:versionInfo).distinct
+ .from(@submission.id)
+ .where([RDF::URI.new('http://bioportal.bioontology.org/ontologies/versionSubject'),
+ RDF::URI.new('http://www.w3.org/2002/07/owl#versionInfo'),
+ :versionInfo])
+
+ sol = query.each_solution.first || {}
+ sol[:versionInfo]&.to_s
+ end
+
+ def extract_ontology_iri
+ query = Goo.sparql_query_client.select(:uri).distinct
+ .from(@submission.id)
+ .where([:uri,
+ RDF::URI.new('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
+ RDF::URI.new('http://www.w3.org/2002/07/owl#Ontology')])
+ sol = query.each_solution.first || {}
+ RDF::URI.new(sol[:uri]) if sol[:uri]
+ end
+
+ # Extract additional metadata about the ontology
+ # First it extracts the main metadata, then the mapped metadata
+ def extract_ontology_metadata(logger, user_params)
+ user_params = {} if user_params.nil? || !user_params
+ ontology_uri = @submission.uri
+ logger.info("Extraction metadata from ontology #{ontology_uri}")
+
+ # go through all OntologySubmission attributes. Returns symbols
+ LinkedData::Models::OntologySubmission.attributes(:all).each do |attr|
+ # for attribute with the :extractedMetadata setting on, and that have not been defined by the user
+ attr_settings = LinkedData::Models::OntologySubmission.attribute_settings(attr)
+
+ attr_not_excluded = user_params && !(user_params.key?(attr) && !user_params[attr].nil? && !user_params[attr].empty?)
+
+ next unless attr_settings[:extractedMetadata] && attr_not_excluded
+
+ # a boolean to check if a value that should be single have already been extracted
+ single_extracted = false
+ type = enforce?(attr, :list) ? :list : :string
+ old_value = value(attr, type)
+
+ unless attr_settings[:namespace].nil?
+ property_to_extract = "#{attr_settings[:namespace].to_s}:#{attr.to_s}"
+ hash_results = extract_each_metadata(ontology_uri, attr, property_to_extract, logger)
+ single_extracted = send_value(attr, hash_results) unless hash_results.empty?
+ end
+
+ # extracts attribute value from metadata mappings
+ attr_settings[:metadataMappings] ||= []
+
+ attr_settings[:metadataMappings].each do |mapping|
+ break if single_extracted
+
+ hash_mapping_results = extract_each_metadata(ontology_uri, attr, mapping.to_s, logger)
+ single_extracted = send_value(attr, hash_mapping_results) unless hash_mapping_results.empty?
+ end
+
+ new_value = value(attr, type)
+
+ send_value(attr, old_value) if empty_value?(new_value) && !empty_value?(old_value)
+ end
+ end
+
+ # Set some metadata to default values if nothing extracted
+ def set_default_metadata
+
+ end
+
+ def empty_value?(value)
+ value.nil? || (value.is_a?(Array) && value.empty?) || value.to_s.strip.empty?
+ end
+
+ def value(attr, type)
+ val = @submission.send(attr.to_s)
+ type.eql?(:list) ? Array(val) || [] : val || ''
+ end
+
+ def send_value(attr, value)
+
+ if enforce?(attr, :list)
+ # Add the retrieved value(s) to the attribute if the attribute take a list of objects
+ metadata_values = value(attr, :list)
+ metadata_values = metadata_values.dup
+
+ metadata_values.push(*value.values)
+
+ @submission.send("#{attr}=", metadata_values.uniq)
+ elsif enforce?(attr, :concatenate)
+ # if multiple value for this attribute, then we concatenate it
+ # Add the concat at the very end, to easily join the content of the array
+ metadata_values = value(attr, :string)
+ metadata_values = metadata_values.split(', ')
+ new_values = value.values.map { |x| x.to_s.split(', ') }.flatten
+
+ @submission.send("#{attr}=", (metadata_values + new_values).uniq.join(', '))
+ else
+ # If multiple value for a metadata that should have a single value: taking one value randomly (the first in the hash)
+
+ @submission.send("#{attr}=", value.values.first)
+ return true
+ end
+ false
+ end
+
+ # Return a hash with the best literal value for an URI
+ # it selects the literal according to their language: no language > english > french > other languages
+ def select_metadata_literal(metadata_uri, metadata_literal, hash)
+ return unless metadata_literal.is_a?(RDF::Literal)
+
+ if hash.key?(metadata_uri)
+ if metadata_literal.has_language?
+ if !hash[metadata_uri].has_language?
+ return hash
+ else
+ case metadata_literal.language
+ when :en, :eng
+ # Take the value with english language over other languages
+ hash[metadata_uri] = metadata_literal
+ return hash
+ when :fr, :fre
+ # If no english, take french
+ if hash[metadata_uri].language == :en || hash[metadata_uri].language == :eng
+ return hash
+ else
+ hash[metadata_uri] = metadata_literal
+ return hash
+ end
+ else
+ return hash
+ end
+ end
+ else
+ # Take the value with no language in priority (considered as a default)
+ hash[metadata_uri] = metadata_literal
+ return hash
+ end
+ else
+ hash[metadata_uri] = metadata_literal
+ hash
+ end
+ end
+
+ # A function to extract additional metadata
+ # Take the literal data if the property is pointing to a literal
+ # If pointing to an URI: first it takes the "omv:name" of the object pointed by the property, if nil it takes the "rdfs:label".
+ # If not found it check for "omv:firstName + omv:lastName" (for "omv:Person") of this object. And to finish it takes the "URI"
+ # The hash_results contains the metadataUri (objet pointed on by the metadata property) with the value we are using from it
+ def extract_each_metadata(ontology_uri, attr, prop_to_extract, logger)
+
+ query_metadata = < #{prop_to_extract} ?extractedObject .
+ OPTIONAL { ?extractedObject omv:name ?omvname } .
+ OPTIONAL { ?extractedObject omv:firstName ?omvfirstname } .
+ OPTIONAL { ?extractedObject omv:lastName ?omvlastname } .
+ OPTIONAL { ?extractedObject rdfs:label ?rdfslabel } .
+}
+eos
+ Goo.namespaces.each do |prefix, uri|
+ query_metadata = "PREFIX #{prefix}: <#{uri}>\n" + query_metadata
+ end
+
+ # logger.info(query_metadata)
+ # This hash will contain the "literal" metadata for each object (uri or literal) pointed by the metadata predicate
+ hash_results = {}
+ Goo.sparql_query_client.query(query_metadata).each_solution do |sol|
+ value = sol[:extractedObject]
+ if enforce?(attr, :uri)
+ # If the attr is enforced as URI then it directly takes the URI
+ uri_value = value ? RDF::URI.new(value.to_s.strip) : nil
+ hash_results[value] = uri_value if uri_value&.valid?
+ elsif enforce?(attr, :date_time)
+ begin
+ hash_results[value] = DateTime.iso8601(value.to_s)
+ rescue StandardError => e
+ logger.error("Impossible to extract DateTime metadata for #{attr}: #{value}. It should follow iso8601 standards. Error message: #{e}")
+ end
+ elsif enforce?(attr, :integer)
+ begin
+ hash_results[value] = value.to_s.to_i
+ rescue StandardError => e
+ logger.error("Impossible to extract integer metadata for #{attr}: #{value}. Error message: #{e}")
+ end
+ elsif enforce?(attr, :boolean)
+ case value.to_s.downcase
+ when 'true'
+ hash_results[value] = true
+ when 'false'
+ hash_results[value] = false
+ else
+ logger.error("Impossible to extract boolean metadata for #{attr}: #{value}. Error message: #{e}")
+ end
+ elsif value.is_a?(RDF::URI)
+ hash_results = find_object_label(hash_results, sol, value)
+ else
+ # If this is directly a literal
+ hash_results = select_metadata_literal(value, value, hash_results)
+ end
+ end
+ hash_results
+ end
+
+ def find_object_label(hash_results, sol, value)
+ if !sol[:omvname].nil?
+ hash_results = select_metadata_literal(value, sol[:omvname], hash_results)
+ elsif !sol[:rdfslabel].nil?
+ hash_results = select_metadata_literal(value, sol[:rdfslabel], hash_results)
+ elsif !sol[:omvfirstname].nil?
+ hash_results = select_metadata_literal(value, sol[:omvfirstname], hash_results)
+ # if first and last name are defined (for omv:Person)
+ hash_results[value] = "#{hash_results[value]} #{sol[:omvlastname]}" unless sol[:omvlastname].nil?
+ elsif !sol[:omvlastname].nil?
+ # if only last name is defined
+ hash_results = select_metadata_literal(value, sol[:omvlastname], hash_results)
+ else
+ # if the object is an URI but we are requesting a String
+ hash_results[value] = value.to_s
+ end
+ hash_results
+ end
+
+ def enforce?(attr, type)
+ LinkedData::Models::OntologySubmission.attribute_settings(attr)[:enforce].include?(type)
+ end
+
+ end
+ end
+end
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_indexer.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_indexer.rb
new file mode 100644
index 00000000..ac8b22f4
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_indexer.rb
@@ -0,0 +1,198 @@
+module LinkedData
+ module Services
+ class OntologySubmissionIndexer < OntologySubmissionProcess
+
+ def process(logger, options = nil)
+ process_indexation(logger, options)
+ end
+
+ private
+
+ def process_indexation(logger, options)
+
+ status = LinkedData::Models::SubmissionStatus.find('INDEXED').first
+ begin
+ index(logger, options[:commit], false)
+ @submission.add_submission_status(status)
+ rescue StandardError => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ @submission.add_submission_status(status.get_error_status)
+ FileUtils.rm(@submission.csv_path) if File.file?(@submission.csv_path)
+ ensure
+ @submission.save
+ end
+ end
+
+ def index(logger, commit = true, optimize = true)
+ page = 0
+ size = 1000
+ count_classes = 0
+ time = Benchmark.realtime do
+ @submission.bring(:ontology) if @submission.bring?(:ontology)
+ @submission.ontology.bring(:acronym) if @submission.ontology.bring?(:acronym)
+ @submission.ontology.bring(:provisionalClasses) if @submission.ontology.bring?(:provisionalClasses)
+ csv_writer = LinkedData::Utils::OntologyCSVWriter.new
+ csv_writer.open(@submission.ontology, @submission.csv_path)
+
+ begin
+ logger.info("Indexing ontology terms: #{@submission.ontology.acronym}...")
+ t0 = Time.now
+ @submission.ontology.unindex_all_data(false)
+ logger.info("Removed ontology terms index (#{Time.now - t0}s)"); logger.flush
+
+ paging = LinkedData::Models::Class.in(@submission).include(:unmapped).aggregate(:count, :children).page(page, size)
+ # a fix for SKOS ontologies, see https://github.com/ncbo/ontologies_api/issues/20
+ @submission.bring(:hasOntologyLanguage) unless @submission.loaded_attributes.include?(:hasOntologyLanguage)
+ cls_count = @submission.hasOntologyLanguage.skos? ? -1 : @submission.class_count(logger)
+ paging.page_count_set(cls_count) unless cls_count < 0
+ total_pages = paging.page(1, size).all.total_pages
+ num_threads = [total_pages, LinkedData.settings.indexing_num_threads].min
+ threads = []
+ page_classes = nil
+
+ num_threads.times do |num|
+ threads[num] = Thread.new {
+ Thread.current["done"] = false
+ Thread.current["page_len"] = -1
+ Thread.current["prev_page_len"] = -1
+
+ while !Thread.current["done"]
+ @submission.synchronize do
+ page = (page == 0 || page_classes.next?) ? page + 1 : nil
+
+ if page.nil?
+ Thread.current["done"] = true
+ else
+ Thread.current["page"] = page || "nil"
+ RequestStore.store[:requested_lang] = :ALL
+ page_classes = paging.page(page, size).all
+ count_classes += page_classes.length
+ Thread.current["page_classes"] = page_classes
+ Thread.current["page_len"] = page_classes.length
+ Thread.current["t0"] = Time.now
+
+ # nothing retrieved even though we're expecting more records
+ if total_pages > 0 && page_classes.empty? && (Thread.current["prev_page_len"] == -1 || Thread.current["prev_page_len"] == size)
+ j = 0
+ num_calls = LinkedData.settings.num_retries_4store
+
+ while page_classes.empty? && j < num_calls do
+ j += 1
+ logger.error("Thread #{num + 1}: Empty page encountered. Retrying #{j} times...")
+ sleep(2)
+ page_classes = paging.page(page, size).all
+ logger.info("Thread #{num + 1}: Success retrieving a page of #{page_classes.length} classes after retrying #{j} times...") unless page_classes.empty?
+ end
+
+ if page_classes.empty?
+ msg = "Thread #{num + 1}: Empty page #{Thread.current["page"]} of #{total_pages} persisted after retrying #{j} times. Indexing of #{@submission.id.to_s} aborted..."
+ logger.error(msg)
+ raise msg
+ else
+ Thread.current["page_classes"] = page_classes
+ end
+ end
+
+ if page_classes.empty?
+ if total_pages > 0
+ logger.info("Thread #{num + 1}: The number of pages reported for #{@submission.id.to_s} - #{total_pages} is higher than expected #{page - 1}. Completing indexing...")
+ else
+ logger.info("Thread #{num + 1}: Ontology #{@submission.id.to_s} contains #{total_pages} pages...")
+ end
+
+ break
+ end
+
+ Thread.current["prev_page_len"] = Thread.current["page_len"]
+ end
+ end
+
+ break if Thread.current["done"]
+
+ logger.info("Thread #{num + 1}: Page #{Thread.current["page"]} of #{total_pages} - #{Thread.current["page_len"]} ontology terms retrieved in #{Time.now - Thread.current["t0"]} sec.")
+ Thread.current["t0"] = Time.now
+
+ Thread.current["page_classes"].each do |c|
+ begin
+ # this cal is needed for indexing of properties
+ LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates, include_languages: true)
+ rescue Exception => e
+ i = 0
+ num_calls = LinkedData.settings.num_retries_4store
+ success = nil
+
+ while success.nil? && i < num_calls do
+ i += 1
+ logger.error("Thread #{num + 1}: Exception while mapping attributes for #{c.id.to_s}. Retrying #{i} times...")
+ sleep(2)
+
+ begin
+ LinkedData::Models::Class.map_attributes(c, paging.equivalent_predicates, include_languages: true)
+ logger.info("Thread #{num + 1}: Success mapping attributes for #{c.id.to_s} after retrying #{i} times...")
+ success = true
+ rescue Exception => e1
+ success = nil
+
+ if i == num_calls
+ logger.error("Thread #{num + 1}: Error mapping attributes for #{c.id.to_s}:")
+ logger.error("Thread #{num + 1}: #{e1.class}: #{e1.message} after retrying #{i} times...\n#{e1.backtrace.join("\n\t")}")
+ logger.flush
+ end
+ end
+ end
+ end
+
+ @submission.synchronize do
+ csv_writer.write_class(c)
+ end
+ end
+ logger.info("Thread #{num + 1}: Page #{Thread.current["page"]} of #{total_pages} attributes mapped in #{Time.now - Thread.current["t0"]} sec.")
+
+ Thread.current["t0"] = Time.now
+ LinkedData::Models::Class.indexBatch(Thread.current["page_classes"])
+ logger.info("Thread #{num + 1}: Page #{Thread.current["page"]} of #{total_pages} - #{Thread.current["page_len"]} ontology terms indexed in #{Time.now - Thread.current["t0"]} sec.")
+ logger.flush
+ end
+ }
+ end
+
+ threads.map { |t| t.join }
+ csv_writer.close
+
+ begin
+ # index provisional classes
+ @submission.ontology.provisionalClasses.each { |pc| pc.index }
+ rescue Exception => e
+ logger.error("Error while indexing provisional classes for ontology #{@submission.ontology.acronym}:")
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ end
+
+ if commit
+ t0 = Time.now
+ LinkedData::Models::Class.indexCommit()
+ logger.info("Ontology terms index commit in #{Time.now - t0} sec.")
+ end
+ rescue StandardError => e
+ csv_writer.close
+ logger.error("\n\n#{e.class}: #{e.message}\n")
+ logger.error(e.backtrace)
+ raise e
+ end
+ end
+ logger.info("Completed indexing ontology terms: #{@submission.ontology.acronym} in #{time} sec. #{count_classes} classes.")
+ logger.flush
+
+ if optimize
+ logger.info("Optimizing ontology terms index...")
+ time = Benchmark.realtime do
+ LinkedData::Models::Class.indexOptimize()
+ end
+ logger.info("Completed optimizing ontology terms index in #{time} sec.")
+ end
+ end
+ end
+ end
+end
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_mertrics_calculator.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_mertrics_calculator.rb
new file mode 100644
index 00000000..b41c06f9
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_mertrics_calculator.rb
@@ -0,0 +1,110 @@
+module LinkedData
+ module Services
+ class SubmissionMetricsCalculator < OntologySubmissionProcess
+ def process(logger, options = nil)
+ process_metrics(logger)
+ end
+
+ def generate_umls_metrics_file(tr_file_path=nil)
+ tr_file_path ||= @submission.triples_file_path
+ class_count = 0
+ indiv_count = 0
+ prop_count = 0
+
+ File.foreach(tr_file_path) do |line|
+ class_count += 1 if line =~ /owl:Class/
+ indiv_count += 1 if line =~ /owl:NamedIndividual/
+ prop_count += 1 if line =~ /owl:ObjectProperty/
+ prop_count += 1 if line =~ /owl:DatatypeProperty/
+ end
+ generate_metrics_file(class_count, indiv_count, prop_count)
+ end
+
+ private
+
+ def process_metrics(logger)
+ status = LinkedData::Models::SubmissionStatus.find('METRICS').first
+ begin
+ compute_metrics(logger)
+ @submission.add_submission_status(status)
+ rescue StandardError => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ @submission.metrics = nil
+ @submission.add_submission_status(status.get_error_status)
+ ensure
+ @submission.save
+ end
+ end
+
+ def compute_metrics(logger)
+ metrics = metrics_for_submission(logger)
+ metrics.id = RDF::URI.new(@submission.id.to_s + '/metrics')
+ exist_metrics = LinkedData::Models::Metric.find(metrics.id).first
+ exist_metrics.delete if exist_metrics
+ metrics.save
+ @submission.metrics = metrics
+ @submission
+ end
+
+ def metrics_for_submission(logger)
+ logger.info('metrics_for_submission start')
+ logger.flush
+ begin
+ @submission.bring(:submissionStatus) if @submission.bring?(:submissionStatus)
+ cls_metrics = LinkedData::Metrics.class_metrics(@submission, logger)
+ logger.info('class_metrics finished')
+ logger.flush
+ metrics = LinkedData::Models::Metric.new
+
+ cls_metrics.each do |k,v|
+ unless v.instance_of?(Integer)
+ begin
+ v = Integer(v)
+ rescue ArgumentError
+ v = 0
+ rescue TypeError
+ v = 0
+ end
+ end
+ metrics.send("#{k}=",v)
+ end
+ indiv_count = LinkedData::Metrics.number_individuals(logger, @submission)
+ metrics.individuals = indiv_count
+ logger.info('individuals finished')
+ logger.flush
+ prop_count = LinkedData::Metrics.number_properties(logger, @submission)
+ metrics.properties = prop_count
+ logger.info('properties finished')
+ logger.flush
+ # re-generate metrics file
+ generate_metrics_file(cls_metrics[:classes], indiv_count, prop_count)
+ logger.info('generation of metrics file finished')
+ logger.flush
+ rescue StandardError => e
+ logger.error(e.message)
+ logger.error(e)
+ logger.flush
+ metrics = nil
+ end
+ metrics
+ end
+
+ def generate_metrics_file(class_count, indiv_count, prop_count)
+ CSV.open(@submission.metrics_path, 'wb') do |csv|
+ csv << ['Class Count', 'Individual Count', 'Property Count']
+ csv << [class_count, indiv_count, prop_count]
+ end
+ end
+
+ # TODO to find usage in NCBO code
+ def generate_metrics_file2(class_count, indiv_count, prop_count, max_depth)
+ CSV.open(self.metrics_path, "wb") do |csv|
+ csv << ["Class Count", "Individual Count", "Property Count", "Max Depth"]
+ csv << [class_count, indiv_count, prop_count, max_depth]
+ end
+ end
+
+ end
+ end
+end
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_missing_labels.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_missing_labels.rb
new file mode 100644
index 00000000..7da7d127
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_missing_labels.rb
@@ -0,0 +1,269 @@
+module LinkedData
+ module Services
+
+ class GenerateMissingLabels < OntologySubmissionProcess
+ def process(logger, options = {})
+ handle_missing_labels(options[:file_path], logger)
+ end
+
+ private
+
+ def handle_missing_labels(file_path, logger)
+ callbacks = {
+ missing_labels: {
+ op_name: 'Missing Labels Generation',
+ required: true,
+ status: LinkedData::Models::SubmissionStatus.find('RDF_LABELS').first,
+ artifacts: {
+ file_path: file_path
+ },
+ caller_on_pre: :generate_missing_labels_pre,
+ caller_on_pre_page: :generate_missing_labels_pre_page,
+ caller_on_each: :generate_missing_labels_each,
+ caller_on_post_page: :generate_missing_labels_post_page,
+ caller_on_post: :generate_missing_labels_post
+ }
+ }
+
+ raw_paging = LinkedData::Models::Class.in(@submission).include(:prefLabel, :synonym, :label)
+ loop_classes(logger, raw_paging, @submission, callbacks)
+ end
+
+ def process_callbacks(logger, callbacks, action_name)
+ callbacks.delete_if do |_, callback|
+ begin
+ if callback[action_name]
+ callable = self.method(callback[action_name])
+ yield(callable, callback)
+ end
+ false
+ rescue Exception => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+
+ if callback[:status]
+ @submission.add_submission_status(callback[:status].get_error_status)
+ @submission.save
+ end
+
+ # halt the entire processing if :required is set to true
+ raise e if callback[:required]
+ # continue processing of other callbacks, but not this one
+ true
+ end
+ end
+ end
+
+ def loop_classes(logger, raw_paging, submission, callbacks)
+ page = 1
+ size = 2500
+ count_classes = 0
+ acr = submission.id.to_s.split("/")[-1]
+ operations = callbacks.values.map { |v| v[:op_name] }.join(", ")
+
+ time = Benchmark.realtime do
+ paging = raw_paging.page(page, size)
+ cls_count_set = false
+ cls_count = submission.class_count(logger)
+
+ if cls_count > -1
+ # prevent a COUNT SPARQL query if possible
+ paging.page_count_set(cls_count)
+ cls_count_set = true
+ else
+ cls_count = 0
+ end
+
+ iterate_classes = false
+ # 1. init artifacts hash if not explicitly passed in the callback
+ # 2. determine if class-level iteration is required
+ callbacks.each { |_, callback| callback[:artifacts] ||= {};
+ if callback[:caller_on_each]
+ iterate_classes = true
+ end }
+
+ process_callbacks(logger, callbacks, :caller_on_pre) {
+ |callable, callback| callable.call(callback[:artifacts], logger, paging) }
+
+ page_len = -1
+ prev_page_len = -1
+
+ begin
+ t0 = Time.now
+ page_classes = paging.page(page, size).all
+ total_pages = page_classes.total_pages
+ page_len = page_classes.length
+
+ # nothing retrieved even though we're expecting more records
+ if total_pages > 0 && page_classes.empty? && (prev_page_len == -1 || prev_page_len == size)
+ j = 0
+ num_calls = LinkedData.settings.num_retries_4store
+
+ while page_classes.empty? && j < num_calls do
+ j += 1
+ logger.error("Empty page encountered. Retrying #{j} times...")
+ sleep(2)
+ page_classes = paging.page(page, size).all
+ unless page_classes.empty?
+ logger.info("Success retrieving a page of #{page_classes.length} classes after retrying #{j} times...")
+ end
+ end
+
+ if page_classes.empty?
+ msg = "Empty page #{page} of #{total_pages} persisted after retrying #{j} times. #{operations} of #{acr} aborted..."
+ logger.error(msg)
+ raise msg
+ end
+ end
+
+ if page_classes.empty?
+ if total_pages > 0
+ logger.info("The number of pages reported for #{acr} - #{total_pages} is higher than expected #{page - 1}. Completing #{operations}...")
+ else
+ logger.info("Ontology #{acr} contains #{total_pages} pages...")
+ end
+ break
+ end
+
+ prev_page_len = page_len
+ logger.info("#{acr}: page #{page} of #{total_pages} - #{page_len} ontology terms retrieved in #{Time.now - t0} sec.")
+ logger.flush
+ count_classes += page_classes.length
+
+ process_callbacks(logger, callbacks, :caller_on_pre_page) {
+ |callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
+
+ page_classes.each { |c|
+ process_callbacks(logger, callbacks, :caller_on_each) {
+ |callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page, c) }
+ } if iterate_classes
+
+ process_callbacks(logger, callbacks, :caller_on_post_page) {
+ |callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
+ cls_count += page_classes.length unless cls_count_set
+
+ page = page_classes.next? ? page + 1 : nil
+ end while !page.nil?
+
+ callbacks.each { |_, callback| callback[:artifacts][:count_classes] = cls_count }
+ process_callbacks(logger, callbacks, :caller_on_post) {
+ |callable, callback| callable.call(callback[:artifacts], logger, paging) }
+ end
+
+ logger.info("Completed #{operations}: #{acr} in #{time} sec. #{count_classes} classes.")
+ logger.flush
+
+ # set the status on actions that have completed successfully
+ callbacks.each do |_, callback|
+ if callback[:status]
+ @submission.add_submission_status(callback[:status])
+ @submission.save
+ end
+ end
+ end
+
+ def generate_missing_labels_pre(artifacts = {}, logger, paging)
+ file_path = artifacts[:file_path]
+ artifacts[:save_in_file] = File.join(File.dirname(file_path), "labels.ttl")
+ artifacts[:save_in_file_mappings] = File.join(File.dirname(file_path), "mappings.ttl")
+ property_triples = LinkedData::Utils::Triples.rdf_for_custom_properties(@submission)
+ Goo.sparql_data_client.append_triples(@submission.id, property_triples, mime_type = "application/x-turtle")
+ fsave = File.open(artifacts[:save_in_file], "w")
+ fsave.write(property_triples)
+ fsave_mappings = File.open(artifacts[:save_in_file_mappings], "w")
+ artifacts[:fsave] = fsave
+ artifacts[:fsave_mappings] = fsave_mappings
+ end
+
+ def generate_missing_labels_pre_page(artifacts = {}, logger, paging, page_classes, page)
+ artifacts[:label_triples] = []
+ artifacts[:mapping_triples] = []
+ end
+
+ def generate_missing_labels_each(artifacts = {}, logger, paging, page_classes, page, c)
+ prefLabel = nil
+
+ if c.prefLabel.nil?
+ rdfs_labels = c.label
+
+ if rdfs_labels && rdfs_labels.length > 1 && c.synonym.length > 0
+ rdfs_labels = (Set.new(c.label) - Set.new(c.synonym)).to_a.first
+
+ rdfs_labels = c.label if rdfs_labels.nil? || rdfs_labels.length == 0
+ end
+
+ rdfs_labels = [rdfs_labels] if rdfs_labels and not (rdfs_labels.instance_of? Array)
+ label = nil
+
+ if rdfs_labels && rdfs_labels.length > 0
+ label = rdfs_labels[0]
+ else
+ label = LinkedData::Utils::Triples.last_iri_fragment c.id.to_s
+ end
+ artifacts[:label_triples] << LinkedData::Utils::Triples.label_for_class_triple(
+ c.id, Goo.vocabulary(:metadata_def)[:prefLabel], label)
+ prefLabel = label
+ else
+ prefLabel = c.prefLabel
+ end
+
+ if @submission.ontology.viewOf.nil?
+ loomLabel = LinkedData::Models::OntologySubmission.loom_transform_literal(prefLabel.to_s)
+
+ if loomLabel.length > 2
+ artifacts[:mapping_triples] << LinkedData::Utils::Triples.loom_mapping_triple(
+ c.id, Goo.vocabulary(:metadata_def)[:mappingLoom], loomLabel)
+ end
+ artifacts[:mapping_triples] << LinkedData::Utils::Triples.uri_mapping_triple(
+ c.id, Goo.vocabulary(:metadata_def)[:mappingSameURI], c.id)
+ end
+ end
+
+ def generate_missing_labels_post_page(artifacts = {}, logger, paging, page_classes, page)
+ rest_mappings = LinkedData::Mappings.migrate_rest_mappings(@submission.ontology.acronym)
+ artifacts[:mapping_triples].concat(rest_mappings)
+
+ if artifacts[:label_triples].length > 0
+ logger.info("Asserting #{artifacts[:label_triples].length} labels in " +
+ "#{@submission.id.to_ntriples}")
+ logger.flush
+ artifacts[:label_triples] = artifacts[:label_triples].join("\n")
+ artifacts[:fsave].write(artifacts[:label_triples])
+ t0 = Time.now
+ Goo.sparql_data_client.append_triples(@submission.id, artifacts[:label_triples], mime_type = "application/x-turtle")
+ t1 = Time.now
+ logger.info("Labels asserted in #{t1 - t0} sec.")
+ logger.flush
+ else
+ logger.info("No labels generated in page #{page}.")
+ logger.flush
+ end
+
+ if artifacts[:mapping_triples].length > 0
+ logger.info("Asserting #{artifacts[:mapping_triples].length} mappings in " +
+ "#{@submission.id.to_ntriples}")
+ logger.flush
+ artifacts[:mapping_triples] = artifacts[:mapping_triples].join("\n")
+ artifacts[:fsave_mappings].write(artifacts[:mapping_triples])
+
+ t0 = Time.now
+ Goo.sparql_data_client.append_triples(@submission.id, artifacts[:mapping_triples], mime_type = "application/x-turtle")
+ t1 = Time.now
+ logger.info("Mapping labels asserted in #{t1 - t0} sec.")
+ logger.flush
+ end
+ end
+
+ def generate_missing_labels_post(artifacts = {}, logger, pagging)
+ logger.info("end generate_missing_labels traversed #{artifacts[:count_classes]} classes")
+ logger.info("Saved generated labels in #{artifacts[:save_in_file]}")
+ artifacts[:fsave].close()
+ artifacts[:fsave_mappings].close()
+ logger.flush
+ end
+
+ end
+
+ end
+end
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_obsolete_classes.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_obsolete_classes.rb
new file mode 100644
index 00000000..aedb70a4
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_obsolete_classes.rb
@@ -0,0 +1,82 @@
+module LinkedData
+ module Services
+
+ class ObsoleteClassesGenerator < OntologySubmissionProcess
+
+ def process(logger, options)
+ status = LinkedData::Models::SubmissionStatus.find('OBSOLETE').first
+ begin
+ generate_obsolete_classes(logger, options[:file_path])
+ @submission.add_submission_status(status)
+ @submission.save
+ rescue Exception => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ @submission.add_submission_status(status.get_error_status)
+ @submission.save
+ # if obsolete fails the parsing fails
+ raise e
+ end
+ @submission
+ end
+
+ private
+
+ def generate_obsolete_classes(logger, file_path)
+ @submission.bring(:obsoleteProperty) if @submission.bring?(:obsoleteProperty)
+ @submission.bring(:obsoleteParent) if @submission.bring?(:obsoleteParent)
+ classes_deprecated = []
+ if @submission.obsoleteProperty &&
+ @submission.obsoleteProperty.to_s != "http://www.w3.org/2002/07/owl#deprecated"
+
+ predicate_obsolete = RDF::URI.new(@submission.obsoleteProperty.to_s)
+ query_obsolete_predicate = < 0
+ classes_deprecated.uniq!
+ logger.info("Asserting owl:deprecated statement for #{classes_deprecated} classes")
+ save_in_file = File.join(File.dirname(file_path), "obsolete.ttl")
+ fsave = File.open(save_in_file, "w")
+ classes_deprecated.each do |class_id|
+ fsave.write(LinkedData::Utils::Triples.obselete_class_triple(class_id) + "\n")
+ end
+ fsave.close()
+ result = Goo.sparql_data_client.append_triples_from_file(
+ @submission.id,
+ save_in_file,
+ mime_type = "application/x-turtle")
+ end
+ end
+ end
+ end
+end
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_properties_indexer.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_properties_indexer.rb
new file mode 100644
index 00000000..61272376
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_properties_indexer.rb
@@ -0,0 +1,70 @@
+module LinkedData
+ module Services
+ class SubmissionPropertiesIndexer < OntologySubmissionProcess
+
+ def process(logger, options = nil)
+ process_indexation(logger, options)
+ end
+
+ private
+
+ def process_indexation(logger, options)
+ status = LinkedData::Models::SubmissionStatus.find('INDEXED_PROPERTIES').first
+ begin
+ index_properties(logger, commit: options[:commit], optimize: false)
+ @submission.add_submission_status(status)
+ rescue StandardError => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ @submission.add_submission_status(status.get_error_status)
+ ensure
+ @submission.save
+ end
+ end
+
+ def index_properties(logger, commit: true, optimize: true)
+ page = 1
+ size = 2500
+ count_props = 0
+
+ time = Benchmark.realtime do
+ @submission.bring(:ontology) if @submission.bring?(:ontology)
+ @submission.ontology.bring(:acronym) if @submission.ontology.bring?(:acronym)
+ logger.info("Indexing ontology properties: #{@submission.ontology.acronym}...")
+ t0 = Time.now
+ @submission.ontology.unindex_properties(commit)
+ logger.info("Removed ontology properties index in #{Time.now - t0} seconds."); logger.flush
+
+ props = @submission.ontology.properties
+ count_props = props.length
+ total_pages = (count_props/size.to_f).ceil
+ logger.info("Indexing a total of #{total_pages} pages of #{size} properties each.")
+
+ props.each_slice(size) do |prop_batch|
+ t = Time.now
+ LinkedData::Models::OntologyProperty.indexBatch(prop_batch)
+ logger.info("Page #{page} of ontology properties indexed in #{Time.now - t} seconds."); logger.flush
+ page += 1
+ end
+
+ if commit
+ t0 = Time.now
+ LinkedData::Models::OntologyProperty.indexCommit
+ logger.info("Ontology properties index commit in #{Time.now - t0} seconds.")
+ end
+ end
+ logger.info("Completed indexing ontology properties of #{@submission.ontology.acronym} in #{time} sec. Total of #{count_props} properties indexed.")
+ logger.flush
+
+ if optimize
+ logger.info('Optimizing ontology properties index...')
+ time = Benchmark.realtime do
+ LinkedData::Models::OntologyProperty.indexOptimize
+ end
+ logger.info("Completed optimizing ontology properties index in #{time} seconds.")
+ end
+ end
+ end
+ end
+end
+
diff --git a/lib/ontologies_linked_data/services/submission_process/operations/submission_rdf_generator.rb b/lib/ontologies_linked_data/services/submission_process/operations/submission_rdf_generator.rb
new file mode 100644
index 00000000..7976690c
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/operations/submission_rdf_generator.rb
@@ -0,0 +1,93 @@
+module LinkedData
+ module Services
+
+ class SubmissionRDFGenerator < OntologySubmissionProcess
+
+ def process(logger, options)
+ reasoning = options[:reasoning]
+
+ # Remove processing status types before starting RDF parsing etc.
+ @submission.submissionStatus = nil
+ status = LinkedData::Models::SubmissionStatus.find('UPLOADED').first
+ @submission.add_submission_status(status)
+ @submission.save
+
+ # Parse RDF
+ begin
+ unless @submission.valid?
+ error = 'Submission is not valid, it cannot be processed. Check errors.'
+ raise ArgumentError, error
+ end
+ unless @submission.uploadFilePath
+ error = 'Submission is missing an ontology file, cannot parse.'
+ raise ArgumentError, error
+ end
+ status = LinkedData::Models::SubmissionStatus.find('RDF').first
+ @submission.remove_submission_status(status) # remove RDF status before starting
+ generate_rdf(logger, reasoning: reasoning)
+ @submission.add_submission_status(status)
+ @submission.save
+ rescue StandardError => e
+ logger.error("#{e.class}: #{e.message}\n#{e.backtrace.join("\n\t")}")
+ logger.flush
+ @submission.add_submission_status(status.get_error_status)
+ @submission.save
+ # If RDF generation fails, no point of continuing
+ raise e
+ end
+ end
+
+ private
+
+ def generate_rdf(logger, reasoning: true)
+ mime_type = nil
+
+ if @submission.hasOntologyLanguage.umls?
+ triples_file_path = @submission.triples_file_path
+ logger.info("Using UMLS turtle file found, skipping OWLAPI parse")
+ logger.flush
+ mime_type = LinkedData::MediaTypes.media_type_from_base(LinkedData::MediaTypes::TURTLE)
+ SubmissionMetricsCalculator.new(@submission).generate_umls_metrics_file(triples_file_path)
+ else
+ output_rdf = @submission.rdf_path
+
+ if File.exist?(output_rdf)
+ logger.info("deleting old owlapi.xrdf ..")
+ deleted = FileUtils.rm(output_rdf)
+
+ if !deleted.empty?
+ logger.info("deleted")
+ else
+ logger.info("error deleting owlapi.rdf")
+ end
+ end
+
+ owlapi = @submission.owlapi_parser(logger: logger)
+ owlapi.disable_reasoner unless reasoning
+
+ triples_file_path, missing_imports = owlapi.parse
+
+ if missing_imports && !missing_imports.empty?
+ @submission.missingImports = missing_imports
+
+ missing_imports.each do |imp|
+ logger.info("OWL_IMPORT_MISSING: #{imp}")
+ end
+ else
+ @submission.missingImports = nil
+ end
+ logger.flush
+ end
+ delete_and_append(triples_file_path, logger, mime_type)
+ end
+
+ def delete_and_append(triples_file_path, logger, mime_type = nil)
+ Goo.sparql_data_client.delete_graph(@submission.id)
+ Goo.sparql_data_client.put_triples(@submission.id, triples_file_path, mime_type)
+ logger.info("Triples #{triples_file_path} appended in #{@submission.id.to_ntriples}")
+ logger.flush
+ end
+ end
+ end
+end
+
diff --git a/lib/ontologies_linked_data/services/submission_process/submission_process.rb b/lib/ontologies_linked_data/services/submission_process/submission_process.rb
new file mode 100644
index 00000000..f6c16a1a
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/submission_process.rb
@@ -0,0 +1,18 @@
+module LinkedData
+ module Services
+ class OntologySubmissionProcess
+
+ def initialize(submission)
+ @submission = submission
+ end
+
+ def process(logger, options = {})
+ call
+ end
+
+ def call
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/lib/ontologies_linked_data/services/submission_process/submission_processor.rb b/lib/ontologies_linked_data/services/submission_process/submission_processor.rb
new file mode 100644
index 00000000..9fc03020
--- /dev/null
+++ b/lib/ontologies_linked_data/services/submission_process/submission_processor.rb
@@ -0,0 +1,128 @@
+module LinkedData
+ module Services
+ class OntologyProcessor < OntologySubmissionProcess
+
+ ################################################################
+ # Possible options with their defaults:
+ # process_rdf = false
+ # index_search = false
+ # index_properties = false
+ # index_commit = false
+ # run_metrics = false
+ # reasoning = false
+ # diff = false
+ # archive = false
+ # if no options passed, ALL actions, except for archive = true
+ ################################################################
+ def process(logger, options = nil)
+ process_submission(logger, options)
+ end
+
+ private
+
+ def process_submission(logger, options = {})
+ # Wrap the whole process so we can email results
+ begin
+ @submission.bring_remaining
+ @submission.ontology.bring_remaining
+
+ logger.info("Starting to process #{@submission.ontology.acronym}/submissions/#{@submission.submissionId}")
+ logger.flush
+ LinkedData::Parser.logger = logger
+
+ if process_archive?(options)
+ @submission.archive
+ else
+
+ @submission.generate_rdf(logger, reasoning: process_reasoning?(options)) if process_rdf?(options)
+
+ parsed = @submission.ready?(status: %i[rdf])
+
+ @submission.extract_metadata(logger, user_params: options[:params], heavy_extraction: extract_metadata?(options))
+
+ @submission.generate_missing_labels(logger) if generate_missing_labels?(options)
+
+ @submission.generate_obsolete_classes(logger) if generate_obsolete_classes?(options)
+
+ if !parsed && (index_search?(options) || index_properties?(options) || index_all_data?(options))
+ raise StandardError, "The submission #{@submission.ontology.acronym}/submissions/#{@submission.submissionId}
+ cannot be indexed because it has not been successfully parsed"
+ end
+
+ @submission.index_all(logger, commit: process_index_commit?(options)) if index_all_data?(options)
+
+ @submission.index_terms(logger, commit: process_index_commit?(options)) if index_search?(options)
+
+ @submission.index_properties(logger, commit: process_index_commit?(options)) if index_properties?(options)
+
+ @submission.generate_metrics(logger) if process_metrics?(options)
+
+ @submission.generate_diff(logger) if process_diff?(options)
+ end
+ @submission.save
+ logger.info("Submission processing of #{@submission.id} completed successfully")
+ logger.flush
+ ensure
+ # make sure results get emailed
+ notify_submission_processed(logger)
+ end
+ @submission
+ end
+
+ def notify_submission_processed(logger)
+ LinkedData::Utils::Notifications.submission_processed(@submission)
+ rescue StandardError => e
+ logger.error("Email sending failed: #{e.message}\n#{e.backtrace.join("\n\t")}"); logger.flush
+ end
+
+ def process_archive?(options)
+ options[:archive].eql?(true)
+ end
+
+ def process_rdf?(options)
+ options.empty? || options[:process_rdf].eql?(true)
+ end
+
+ def generate_missing_labels?(options)
+ options[:generate_missing_labels].nil? && process_rdf?(options) || options[:generate_missing_labels].eql?(true)
+ end
+
+ def generate_obsolete_classes?(options)
+ options[:generate_obsolete_classes].nil? && process_rdf?(options) || options[:generate_obsolete_classes].eql?(true)
+ end
+
+ def index_all_data?(options)
+ options.empty? || options[:index_all_data].eql?(true)
+ end
+
+ def index_search?(options)
+ options.empty? || options[:index_search].eql?(true)
+ end
+
+ def index_properties?(options)
+ options.empty? || options[:index_properties].eql?(true)
+ end
+
+ def process_index_commit?(options)
+ index_search?(options) || index_properties?(options) || index_all_data?(options)
+ end
+
+ def process_diff?(options)
+ options.empty? || options[:diff].eql?(true)
+ end
+
+ def process_metrics?(options)
+ options.empty? || options[:run_metrics].eql?(true)
+ end
+
+ def process_reasoning?(options)
+ options.empty? && options[:reasoning].eql?(true)
+ end
+
+ def extract_metadata?(options)
+ options.empty? || options[:extract_metadata].eql?(true)
+ end
+
+ end
+ end
+end
diff --git a/lib/ontologies_linked_data/utils/notifications.rb b/lib/ontologies_linked_data/utils/notifications.rb
index e2664fc9..1d37cca0 100644
--- a/lib/ontologies_linked_data/utils/notifications.rb
+++ b/lib/ontologies_linked_data/utils/notifications.rb
@@ -25,6 +25,7 @@ def self.new_note(note)
note.relatedOntology.each do |ont|
Notifier.notify_subscribed_separately subject, body, ont, 'NOTES'
+ Notifier.notify_mails_grouped subject, body, Notifier.support_mails + Notifier.admin_mails(ont)
end
end
@@ -32,15 +33,15 @@ def self.submission_processed(submission)
submission.bring_remaining
ontology = submission.ontology
ontology.bring(:name, :acronym)
- result = submission.ready? ? 'Success' : 'Failure'
+ result = submission.ready? || submission.archived? ? 'Success' : 'Failure'
status = LinkedData::Models::SubmissionStatus.readable_statuses(submission.submissionStatus)
-
+ ontology_location = "#{LinkedData::Hypermedia.generate_links(ontology)['ui']}?invalidate_cache=true"
subject = "[#{LinkedData.settings.ui_name}] #{ontology.name} Parsing #{result}"
body = SUBMISSION_PROCESSED.gsub('%ontology_name%', ontology.name)
.gsub('%ontology_acronym%', ontology.acronym)
.gsub('%statuses%', status.join('
'))
.gsub('%admin_email%', LinkedData.settings.email_sender)
- .gsub('%ontology_location%', LinkedData::Hypermedia.generate_links(ontology)['ui'])
+ .gsub('%ontology_location%', ontology_location)
.gsub('%ui_name%', LinkedData.settings.ui_name)
Notifier.notify_subscribed_separately subject, body, ontology, 'PROCESSING'
diff --git a/lib/ontologies_linked_data/utils/notifier.rb b/lib/ontologies_linked_data/utils/notifier.rb
index c4d9ad71..d6e93a17 100644
--- a/lib/ontologies_linked_data/utils/notifier.rb
+++ b/lib/ontologies_linked_data/utils/notifier.rb
@@ -40,7 +40,7 @@ def self.notify_subscribed_separately(subject, body, ontology, notification_type
end
def self.notify_administrators_grouped(subject, body, ontology)
- notify_support_grouped subject, body, admin_mails(ontology)
+ notify_support_grouped subject, body
end
def self.notify_mails_separately(subject, body, mails)
@@ -111,7 +111,7 @@ def self.mail_options
}
if LinkedData.settings.smtp_auth_type && LinkedData.settings.smtp_auth_type != :none
- options.merge({
+ options.merge!({
user_name: LinkedData.settings.smtp_user,
password: LinkedData.settings.smtp_password,
authentication: LinkedData.settings.smtp_auth_type
diff --git a/rakelib/docker_based_test.rake b/rakelib/docker_based_test.rake
new file mode 100644
index 00000000..1f337023
--- /dev/null
+++ b/rakelib/docker_based_test.rake
@@ -0,0 +1,121 @@
+# Rake tasks for running unit tests with backend services running as docker containers
+
+desc 'Run unit tests with docker based backend'
+namespace :test do
+ namespace :docker do
+ task :up do
+ system("docker compose up -d") || abort("Unable to start docker containers")
+ unless system("curl -sf http://localhost:8983/solr || exit 1")
+ printf("waiting for Solr container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:8983/solr || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ abort(" Solr container hasn't initialized properly")
+ end
+ end
+ printf("\n")
+ end
+ end
+ task :down do
+ #system("docker compose --profile fs --profile ag stop")
+ #system("docker compose --profile fs --profile ag kill")
+ end
+ desc "run tests with docker AG backend"
+ task :ag do
+ ENV["GOO_BACKEND_NAME"]="allegrograph"
+ ENV["GOO_PORT"]="10035"
+ ENV["GOO_PATH_QUERY"]="/repositories/ontoportal_test"
+ ENV["GOO_PATH_DATA"]="/repositories/ontoportal_test/statements"
+ ENV["GOO_PATH_UPDATE"]="/repositories/ontoportal_test/statements"
+ ENV["COMPOSE_PROFILES"]="ag"
+ Rake::Task["test:docker:up"].invoke
+ # AG takes some time to start and create databases/accounts
+ # TODO: replace system curl command with native ruby code
+ unless system("curl -sf http://127.0.0.1:10035/repositories/ontoportal_test/status | grep -iqE '(^running|^lingering)' || exit 1")
+ printf("waiting for AllegroGraph container to initialize")
+ sec = 0
+ until system("curl -sf http://127.0.0.1:10035/repositories/ontoportal_test/status | grep -iqE '(^running|^lingering)' || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ end
+ end
+ puts
+ system("docker compose ps") # TODO: remove after GH actions troubleshooting is complete
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+ desc "run tests with docker 4store backend"
+ task :fs do
+ ENV["GOO_PORT"]="9000"
+ ENV["COMPOSE_PROFILES"]='fs'
+ Rake::Task["test:docker:up"].invoke
+
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+ desc "run tests with docker Virtuoso backend"
+ task :vo do
+ ENV["GOO_BACKEND_NAME"]="virtuoso"
+ ENV["GOO_PORT"]="8890"
+ ENV["GOO_PATH_QUERY"]="/sparql"
+ ENV["GOO_PATH_DATA"]="/sparql"
+ ENV["GOO_PATH_UPDATE"]="/sparql"
+ ENV["COMPOSE_PROFILES"]="vo"
+ Rake::Task["test:docker:up"].invoke
+ #
+ unless system("curl -sf http://localhost:8890/sparql || exit 1")
+ printf("waiting for Virtuoso container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:8890/sparql || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ system("docker compose logs virtuoso-ut")
+ abort(" Virtuoso container hasn't initialized properly")
+ end
+ end
+ end
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+
+ desc "run tests with docker GraphDb backend"
+ task :gb do
+ ENV["GOO_BACKEND_NAME"]="graphdb"
+ ENV["GOO_PORT"]="7200"
+ ENV["GOO_PATH_QUERY"]="/repositories/ontoportal"
+ ENV["GOO_PATH_DATA"]="/repositories/ontoportal/statements"
+ ENV["GOO_PATH_UPDATE"]="/repositories/ontoportal/statements"
+ ENV["COMPOSE_PROFILES"]="gb"
+ Rake::Task["test:docker:up"].invoke
+
+ #system("docker compose cp ./test/data/graphdb-repo-config.ttl graphdb:/opt/graphdb/dist/configs/templates/graphdb-repo-config.ttl")
+ #system("docker compose cp ./test/data/graphdb-test-load.nt graphdb:/opt/graphdb/dist/configs/templates/graphdb-test-load.nt")
+ #system('docker compose exec graphdb sh -c "importrdf load -f -c /opt/graphdb/dist/configs/templates/graphdb-repo-config.ttl -m parallel /opt/graphdb/dist/configs/templates/graphdb-test-load.nt ;"')
+ unless system("curl -sf http://localhost:7200/repositories || exit 1")
+ printf("waiting for Graphdb container to initialize")
+ sec = 0
+ until system("curl -sf http://localhost:7200/repositories || exit 1") do
+ sleep(1)
+ printf(".")
+ sec += 1
+ if sec > 30
+ system("docker compose logs graphdb")
+ abort(" Graphdb container hasn't initialized properly")
+ end
+ end
+ end
+ Rake::Task["test"].invoke
+ Rake::Task["test:docker:down"].invoke
+ end
+
+ end
+end
diff --git a/test/data/graphdb-repo-config.ttl b/test/data/graphdb-repo-config.ttl
new file mode 100644
index 00000000..9200da9a
--- /dev/null
+++ b/test/data/graphdb-repo-config.ttl
@@ -0,0 +1,33 @@
+@prefix rdfs: .
+@prefix rep: .
+@prefix sail: .
+@prefix xsd: .
+
+<#ontoportal> a rep:Repository;
+ rep:repositoryID "ontoportal";
+ rep:repositoryImpl [
+ rep:repositoryType "graphdb:SailRepository";
+ [
+ "http://example.org/owlim#";
+ "false";
+ "";
+ "true";
+ "false";
+ "true";
+ "true";
+ "32";
+ "10000000";
+ "";
+ "true";
+ "";
+ "0";
+ "0";
+ "false";
+ "file-repository";
+ "rdfsplus-optimized";
+ "storage";
+ "false";
+ sail:sailType "owlim:Sail"
+ ]
+ ];
+ rdfs:label "" .
\ No newline at end of file
diff --git a/test/data/graphdb-test-load.nt b/test/data/graphdb-test-load.nt
new file mode 100644
index 00000000..e69de29b
diff --git a/test/data/ontology_files/BRO_v3.5.owl b/test/data/ontology_files/BRO_v3.5.owl
index aee5caa9..33f16c9d 100644
--- a/test/data/ontology_files/BRO_v3.5.owl
+++ b/test/data/ontology_files/BRO_v3.5.owl
@@ -616,6 +616,8 @@
Activity
+ ActivityEnglish
+ Activité
Activity of interest that may be related to a BRO:Resource.
activities
diff --git a/test/data/ontology_files/thesaurusINRAE_nouv_structure.skos b/test/data/ontology_files/thesaurusINRAE_nouv_structure.skos
index fef6bbe2..3123a0d9 100644
--- a/test/data/ontology_files/thesaurusINRAE_nouv_structure.skos
+++ b/test/data/ontology_files/thesaurusINRAE_nouv_structure.skos
@@ -30,7 +30,7 @@
1331561625299
- aktivite
+ aktivite
2012-03-12T22:13:45Z
2017-09-22T14:09:06Z
@@ -39,7 +39,7 @@
00008d7b
- air-water exchanges
+ air-water exchanges
2019-09-04T12:02:37
diff --git a/test/http_cache/test_http_cache.rb b/test/http_cache/test_http_cache.rb
index 782ea72b..a80012c3 100644
--- a/test/http_cache/test_http_cache.rb
+++ b/test/http_cache/test_http_cache.rb
@@ -24,7 +24,7 @@ def self.after_suite
def _ontology_and_class
results = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
ontology = results[2].first
- cls = LinkedData::Models::Class.where.include(:prefLabel).in(ontology.latest_submission).page(1, 1).first
+ cls = LinkedData::Models::Class.where.include(:prefLabel).in(ontology.latest_submission).first
return ontology, cls
end
diff --git a/test/mappings/test_mappings_bulk_load.rb b/test/mappings/test_mappings_bulk_load.rb
index 176d91b6..d035a71c 100644
--- a/test/mappings/test_mappings_bulk_load.rb
+++ b/test/mappings/test_mappings_bulk_load.rb
@@ -7,24 +7,19 @@ class TestMappingBulkLoad < LinkedData::TestOntologyCommon
ONT_ACR2 = 'MAPPING_TEST2'
def self.before_suite
- LinkedData::TestCase.backend_4s_delete
- self.ontologies_parse
- end
-
- def self.ontologies_parse
helper = LinkedData::TestOntologyCommon.new(self)
+ # indexation is needed
helper.submission_parse(ONT_ACR1,
'MappingOntTest1',
'./test/data/ontology_files/BRO_v3.3.owl', 11,
- process_rdf: true, index_search: true,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false, index_search: true)
helper.submission_parse(ONT_ACR2,
'MappingOntTest2',
'./test/data/ontology_files/CNO_05.owl', 22,
- process_rdf: true, index_search: true,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false, index_search: true)
end
+
def test_mapping_classes_found
ontology_id = 'http://bioontology.org/ontologies/BiomedicalResources.owl'
mapping_hash = {
@@ -43,7 +38,7 @@ def test_mapping_classes_found
"source_contact_info": 'orcid:1234,orcid:5678',
"date": '2020-05-30'
}
- commun_test(mapping_hash, ontology_id)
+ common_test(mapping_hash, ontology_id)
end
def test_mapping_classes_not_found
@@ -174,7 +169,7 @@ def test_mapping_with_no_source_ids
"source_contact_info": 'orcid:1234,orcid:5678',
"date": '2020-05-30'
}
- commun_test(mapping_hash, ontology_id)
+ common_test(mapping_hash, ontology_id)
end
private
@@ -185,7 +180,7 @@ def delete_rest_mappings
end
end
- def commun_test(mapping_hash, ontology_id)
+ def common_test(mapping_hash, ontology_id)
mappings = mapping_load(mapping_hash, ontology_id)
selected = mappings.select do |m|
@@ -210,7 +205,7 @@ def mapping_load(mapping_hash, ontology_id)
user_name = 'test_mappings_user'
user = LinkedData::Models::User.where(username: user_name).include(:username).first
if user.nil?
- user = LinkedData::Models::User.new(username: user_name, email: 'some@email.org')
+ user = LinkedData::Models::User.new(username: user_name, email: "some#{rand}@email.org")
user.passwordHash = 'some random pass hash'
user.save
end
@@ -218,10 +213,8 @@ def mapping_load(mapping_hash, ontology_id)
raise ArgumentError, errors unless errors.empty?
- LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
- ct = LinkedData::Models::MappingCount.where.all.length
- assert ct > 2
- o = LinkedData::Models::Ontology.where(submissions: { URI: ontology_id })
+ assert create_count_mapping > 2
+ o = LinkedData::Models::Ontology.where(submissions: { URI: RDF::URI.new(ontology_id) })
.include(submissions: %i[submissionId submissionStatus])
.first
latest_sub = o.nil? ? nil : o.latest_submission
diff --git a/test/models/notes/test_note.rb b/test/models/notes/test_note.rb
index ecc3cde6..4a53820a 100644
--- a/test/models/notes/test_note.rb
+++ b/test/models/notes/test_note.rb
@@ -19,9 +19,11 @@ def self.after_suite
end
def _ontology_and_class
- count, acronyms, ontologies = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
+ count, acronyms, ontologies = create_ontologies_and_submissions(ont_count: 1, submission_count: 1,
+ process_submission: true,
+ process_options: {process_rdf: true, extract_metadata: false})
ontology = ontologies.first
- cls = LinkedData::Models::Class.where.include(:prefLabel).in(ontology.latest_submission).read_only.page(1, 1).first
+ cls = LinkedData::Models::Class.where.include(:prefLabel).in(ontology.latest_submission).read_only.first
return ontology, cls
end
diff --git a/test/models/skos/test_collections.rb b/test/models/skos/test_collections.rb
index 9a1993a7..b14bbe5a 100644
--- a/test/models/skos/test_collections.rb
+++ b/test/models/skos/test_collections.rb
@@ -6,14 +6,13 @@ class TestCollections < LinkedData::TestOntologyCommon
def self.before_suite
LinkedData::TestCase.backend_4s_delete
+ self.new('').submission_parse('INRAETHES', 'Testing skos',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
+ 1, process_rdf: true, extract_metadata: false,
+ generate_missing_labels: false)
end
def test_collections_all
- submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
- 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
ont = 'INRAETHES'
sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
collections = LinkedData::Models::SKOS::Collection.in(sub).include(:members, :prefLabel).all
@@ -30,11 +29,6 @@ def test_collections_all
end
def test_collection_members
- submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
- 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
ont = 'INRAETHES'
sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
collection_test = test_data.first
diff --git a/test/models/skos/test_schemes.rb b/test/models/skos/test_schemes.rb
index 9f52e303..1b6aa671 100644
--- a/test/models/skos/test_schemes.rb
+++ b/test/models/skos/test_schemes.rb
@@ -6,20 +6,23 @@ class TestSchemes < LinkedData::TestOntologyCommon
def self.before_suite
LinkedData::TestCase.backend_4s_delete
+ self.new('').submission_parse('INRAETHES', 'Testing skos',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
+ 1,
+ process_rdf: true, extract_metadata: false,
+ generate_missing_labels: false)
end
def test_schemes_all
- submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
- 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
+
ont = 'INRAETHES'
sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
schemes = LinkedData::Models::SKOS::Scheme.in(sub).include(:prefLabel).all
assert_equal 66, schemes.size
schemes_test = test_data
+ schemes_test = schemes_test.sort_by{|x| x[:id]}
+ schemes = schemes.sort_by{|x| x.id.to_s}
schemes.each_with_index do |x, i|
scheme_test = schemes_test[i]
diff --git a/test/models/skos/test_skos_xl.rb b/test/models/skos/test_skos_xl.rb
index aa781a4c..70ae2383 100644
--- a/test/models/skos/test_skos_xl.rb
+++ b/test/models/skos/test_skos_xl.rb
@@ -5,14 +5,13 @@ class TestSkosXlLabel < LinkedData::TestOntologyCommon
def self.before_suite
LinkedData::TestCase.backend_4s_delete
+ self.new('').submission_parse('INRAETHES', 'Testing skos',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
+ 1,
+ process_rdf: true, extract_metadata: false, generate_missing_labels: false)
end
def test_skos_xl_label_all
- submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
- 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
ont = 'INRAETHES'
sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
labels = LinkedData::Models::SKOS::Label.in(sub).include(:literalForm).all
@@ -26,11 +25,6 @@ def test_skos_xl_label_all
end
def test_class_skos_xl_label
- submission_parse('INRAETHES', 'Testing skos',
- 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
- 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
ont = 'INRAETHES'
ont = LinkedData::Models::Ontology.find(ont).first
sub = ont.latest_submission
diff --git a/test/models/test_agent.rb b/test/models/test_agent.rb
index ae7d199d..9f8dbe7e 100644
--- a/test/models/test_agent.rb
+++ b/test/models/test_agent.rb
@@ -3,8 +3,9 @@
class TestAgent < LinkedData::TestCase
def self.before_suite
+ backend_4s_delete
self.new("before_suite").teardown
- @@user1 = LinkedData::Models::User.new(:username => "user11111", :email => "some1111@email.org" )
+ @@user1 = LinkedData::Models::User.new(:username => "user11111", :email => "some1111@email.org")
@@user1.passwordHash = "some random pass hash"
@@user1.save
end
@@ -15,32 +16,28 @@ def self.after_suite
@@user1.delete
end
-
-
def test_agent_no_valid
- @agents =[
- LinkedData::Models::Agent.new(name:"name 0", email:"test_0@test.com", agentType: 'organization',creator: @@user1 ),
- LinkedData::Models::Agent.new(name:"name 1", email:"test_1@test.com", agentType: 'person', creator: @@user1 ),
- LinkedData::Models::Agent.new(name:"name 2", email:"test_2@test.com", agentType: 'person', creator: @@user1 )
+ @agents = [
+ LinkedData::Models::Agent.new(name: "name 0", email: "test_0@test.com", agentType: 'organization', creator: @@user1),
+ LinkedData::Models::Agent.new(name: "name 1", email: "test_1@test.com", agentType: 'person', creator: @@user1),
+ LinkedData::Models::Agent.new(name: "name 2", email: "test_2@test.com", agentType: 'person', creator: @@user1)
]
@identifiers = [
LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ROR', creator: @@user1),
LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ORCID', creator: @@user1),
]
- @identifiers.each {|i| i.save}
+ @identifiers.each { |i| i.save }
- affiliations = @agents[0..2].map{ |a| a.save }
+ affiliations = @agents[0..2].map { |a| a.save }
agent = @agents.last
agent.affiliations = affiliations
-
refute agent.valid?
refute_nil agent.errors[:affiliations][:is_organization]
- affiliations.each{|x| x.delete}
-
+ affiliations.each { |x| x.delete }
agents = @agents[0..2].map do |a|
a.identifiers = @identifiers
@@ -54,8 +51,7 @@ def test_agent_no_valid
refute second_agent.valid?
refute_nil second_agent.errors[:identifiers][:unique_identifiers]
-
- @identifiers.each{|i| i.delete}
+ @identifiers.each { |i| i.delete }
end
def test_identifier_find
@@ -69,17 +65,92 @@ def test_identifier_find
id.delete
end
+
def test_identifier_no_valid
refute LinkedData::Models::AgentIdentifier.new(notation: 'https://ror.org/000h6jb29', schemaAgency: 'ROR', creator: @@user1).valid?
- id = LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29"', schemaAgency: 'ROR', creator: @@user1)
+ id = LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ROR', creator: @@user1)
assert id.valid?
id.save
- refute LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29"', schemaAgency: 'ROR', creator: @@user1).valid?
+ refute LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ROR', creator: @@user1).valid?
- assert LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29"', schemaAgency: 'ORCID', creator: @@user1).valid?
+ assert LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ORCID', creator: @@user1).valid?
id.delete
end
+ def test_all_agents_usages_load
+ count, acronyms, ontologies = create_ontologies_and_submissions(ont_count: 3, submission_count: 1,
+ process_submission: false)
+ agents, sub1, sub2, sub3 = agent_usages_test_setup(ontologies)
+ ## using batch load
+ t1 = Benchmark.measure('batch load') do
+ LinkedData::Models::Agent.load_agents_usages(agents)
+ agent_usages_test(agents, sub1, sub2, sub3)
+ end
+
+ ## using by elems loafing
+ t2 = Benchmark.measure('eager load') do
+ agents, sub1, sub2, sub3 = agent_usages_test_setup(ontologies)
+ agent_usages_test(agents, sub1, sub2, sub3)
+ end
+
+ assert t1.total < t2.total, "batch loading should be more faster than eager loading"
+ end
+ private
+
+ def agent_usages_test_setup(ontologies)
+ o1 = ontologies[0]
+ o2 = ontologies[1]
+ o3 = ontologies[2]
+ sub1 = o1.latest_submission(status: :any)
+ sub2 = o2.latest_submission(status: :any)
+ sub3 = o3.latest_submission(status: :any)
+ refute_nil sub1
+ refute_nil sub2
+ refute_nil sub3
+
+ agents = [LinkedData::Models::Agent.new(name: "name 0", email: "test_0@test.com", agentType: 'organization', creator: @@user1).save,
+ LinkedData::Models::Agent.new(name: "name 1", email: "test_1@test.com", agentType: 'organization', creator: @@user1).save,
+ LinkedData::Models::Agent.new(name: "name 2", email: "test_2@test.com", agentType: 'person', creator: @@user1).save]
+
+ sub1.hasCreator = [agents.last]
+ sub1.publisher = agents[0..1]
+ sub1.fundedBy = [agents[0]]
+ sub1.bring_remaining
+ assert sub1.valid?
+ sub1.save
+
+ sub2.hasCreator = [agents.last]
+ sub2.endorsedBy = [agents[0]]
+ sub2.fundedBy = agents[0..1]
+ sub2.bring_remaining
+ assert sub2.valid?
+ sub2.save
+
+ [agents, sub1, sub2, sub3]
+ end
+ def agent_usages_test(agents, sub1, sub2, sub3)
+ usages = agents[0].usages
+
+ assert_equal 2, usages.size
+
+ refute_nil usages[sub1.id]
+ assert_equal usages[sub1.id].map(&:to_s).sort, ["http://purl.org/dc/terms/publisher", "http://xmlns.com/foaf/0.1/fundedBy"].sort
+ refute_nil usages[sub2.id].map(&:to_s).sort, ["http://omv.ontoware.org/2005/05/ontology#endorsedBy", "http://xmlns.com/foaf/0.1/fundedBy"].sort
+
+ sub3.copyrightHolder = agents[0]
+ sub3.bring_remaining
+ sub3.save
+
+ usages = agents[0].usages(force_update: true)
+ assert_equal 3, usages.size
+
+ refute_nil usages[sub1.id]
+ assert_equal usages[sub1.id].map(&:to_s).sort, ["http://purl.org/dc/terms/publisher", "http://xmlns.com/foaf/0.1/fundedBy"].sort
+ assert_equal usages[sub2.id].map(&:to_s).sort, ["http://omv.ontoware.org/2005/05/ontology#endorsedBy", "http://xmlns.com/foaf/0.1/fundedBy"].sort
+ assert_equal usages[sub3.id].map(&:to_s), ["http://schema.org/copyrightHolder"]
+
+ agents.each{|x| x.delete}
+ end
end
diff --git a/test/models/test_analytics.rb b/test/models/test_analytics.rb
new file mode 100644
index 00000000..c325c66e
--- /dev/null
+++ b/test/models/test_analytics.rb
@@ -0,0 +1,95 @@
+require_relative "../test_case"
+
+class LinkedData::Models::User
+ @@user_analytics = {}
+
+ def self.update_class_variable(new_value)
+ @@user_analytics = new_value
+ end
+ def self.load_data(field_name)
+ @@user_analytics
+ end
+end
+
+class LinkedData::Models::Ontology
+ def self.load_analytics_data
+ ontologies_analytics = {}
+ acronyms = %w[E-PHY AGROVOC TEST]
+ acronyms.each do |acronym|
+ ontologies_analytics[acronym] = {
+ "2021" => (1..12).map { |i| [i.to_s, i * 2021] }.to_h,
+ "2022" => (1..12).map { |i| [i.to_s, i * 2022] }.to_h,
+ "2023" => (1..12).map { |i| [i.to_s, i * 2023] }.to_h,
+ }
+ end
+ ontologies_analytics
+ end
+end
+
+class TestAnalytics < LinkedData::TestCase
+
+ def test_ontologies_analytics
+ ontologies_analytics = LinkedData::Models::Ontology.load_analytics_data
+ analytics = LinkedData::Models::Ontology.analytics
+ assert_equal ontologies_analytics, analytics
+
+
+ month_analytics = LinkedData::Models::Ontology.analytics(2023, 1)
+ refute_empty month_analytics
+ month_analytics.each do |_, month_analytic|
+ exp = { "2023" => { "1" => 2023 } }
+ assert_equal exp, month_analytic
+ end
+
+ analytics = LinkedData::Models::Ontology.analytics(nil, nil, 'TEST')
+ exp = { "TEST" => ontologies_analytics["TEST"] }
+ assert_equal exp, analytics
+
+
+ month_analytics = LinkedData::Models::Ontology.analytics(2021, 2, 'TEST')
+ refute_empty month_analytics
+ month_analytics.each do |_, month_analytic|
+ exp = { "2021" => { "2" => 2 * 2021 } }
+ assert_equal exp, month_analytic
+ end
+ end
+
+ def test_user_analytics
+
+ user_analytics = { 'all_users' => {
+ "2021" => (1..12).map { |i| [i.to_s, i * 2021] }.to_h,
+ "2022" => (1..12).map { |i| [i.to_s, i * 2022] }.to_h,
+ "2023" => (1..12).map { |i| [i.to_s, i * 2023] }.to_h,
+ } }
+ LinkedData::Models::User.update_class_variable(user_analytics)
+
+
+ analytics = LinkedData::Models::User.analytics
+ assert_equal user_analytics, analytics
+
+ month_analytics = LinkedData::Models::User.analytics(2023, 1)
+ refute_empty month_analytics
+ month_analytics.each do |_, month_analytic|
+ exp = { "2023" => { "1" => 2023 } }
+ assert_equal exp, month_analytic
+ end
+ end
+
+ def test_page_visits_analytics
+ user_analytics = { 'all_pages' => { "/annotator" => 229,
+ "/mappings" => 253,
+ "/login" => 258,
+ "/ontologies/CSOPRA" => 273,
+ "/admin" => 280,
+ "/search" => 416,
+ "/" => 4566 }
+ }
+
+ LinkedData::Models::User.update_class_variable(user_analytics)
+
+ analytics = LinkedData::Models::User.page_visits_analytics
+ assert_equal user_analytics, analytics
+
+ end
+
+end
\ No newline at end of file
diff --git a/test/models/test_class_main_lang.rb b/test/models/test_class_portal_lang.rb
similarity index 53%
rename from test/models/test_class_main_lang.rb
rename to test/models/test_class_portal_lang.rb
index 352dbe7f..c3518d6d 100644
--- a/test/models/test_class_main_lang.rb
+++ b/test/models/test_class_portal_lang.rb
@@ -1,71 +1,81 @@
require_relative './test_ontology_common'
-class TestClassMainLang < LinkedData::TestOntologyCommon
+class TestClassPortalLang < LinkedData::TestOntologyCommon
def self.before_suite
@@old_main_languages = Goo.main_languages
+ RequestStore.store[:requested_lang] = nil
+ parse
end
def self.after_suite
Goo.main_languages = @@old_main_languages
end
+ def self.parse
+ new('').submission_parse('AGROOE', 'AGROOE Test extract metadata ontology',
+ './test/data/ontology_files/agrooeMappings-05-05-2016.owl', 1,
+ process_rdf: true, index_search: false,
+ run_metrics: false, reasoning: true)
+ end
+
def test_map_attribute_found
cls = parse_and_get_class lang: ['fr']
cls.bring :unmapped
LinkedData::Models::Class.map_attributes(cls)
- assert_equal 'entité matérielle detaillée', cls.label.first
- assert_equal 'skos prefLabel fr', cls.prefLabel
- assert_equal ['entité fra', 'entite rien'], cls.synonym
+ assert_equal ['entité matérielle detaillée'], cls.label
+ assert_includes ['skos prefLabel fr', 'skos prefLabel rien'], cls.prefLabel
+ assert_equal ['entité fra', 'entite rien'].sort, cls.synonym.sort
end
def test_map_attribute_not_found
cls = parse_and_get_class lang: ['es']
cls.bring :unmapped
LinkedData::Models::Class.map_attributes(cls)
- assert_equal ['material detailed entity', 'entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
- assert_equal ['entita esp' , 'entite rien' ], cls.synonym
+ assert_equal ['entita esp', 'entite rien'].sort, cls.synonym.sort
end
def test_map_attribute_secondary_lang
cls = parse_and_get_class lang: %w[es fr]
cls.bring :unmapped
LinkedData::Models::Class.map_attributes(cls)
- assert_equal ['entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
- assert_equal ['entita esp', 'entite rien'], cls.synonym
+ assert_equal ['entita esp', 'entite rien'].sort, cls.synonym.sort
end
def test_label_main_lang_fr_found
- cls = parse_and_get_class lang: ['fr']
- assert_equal 'entité matérielle detaillée', cls.label.first
- assert_equal 'skos prefLabel fr', cls.prefLabel
- assert_equal ['entité fra', 'entite rien'], cls.synonym
+ cls = parse_and_get_class lang: [:FR]
+ assert_equal ['entité matérielle detaillée'], cls.label
+ assert_includes ['skos prefLabel fr', 'skos prefLabel rien'], cls.prefLabel
+ assert_equal ['entité fra', 'entite rien'].sort, cls.synonym.sort
end
def test_label_main_lang_not_found
cls = parse_and_get_class lang: ['es']
- assert_equal ['material detailed entity', 'entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
- assert_equal ['entita esp' , 'entite rien' ], cls.synonym
+ assert_equal ['entita esp' , 'entite rien' ].sort, cls.synonym.sort
end
def test_label_secondary_lang
- # 'es' will not be found so will take 'fr' if fond or anything else
- cls = parse_and_get_class lang: %w[es fr]
+ # This feature is obsolete with the request language feature
+ # 'es' will not be found
+ cls = parse_and_get_class lang: %i[ES FR]
- assert_equal ['entité matérielle detaillée'], cls.label
+ assert_empty cls.label
assert_equal 'skos prefLabel rien', cls.prefLabel
- assert_equal ['entita esp', 'entite rien'], cls.synonym
+ assert_equal ['entita esp', 'entite rien'].sort, cls.synonym.sort
end
def test_label_main_lang_en_found
cls = parse_and_get_class lang: ['en']
assert_equal 'material detailed entity', cls.label.first
- assert_equal 'skos prefLabel en', cls.prefLabel
- assert_equal ['entity eng', 'entite rien'], cls.synonym
+ assert_includes ['skos prefLabel en', 'skos prefLabel rien'], cls.prefLabel # TODO fix in Goo to show en in priority
+ assert_equal ['entity eng', 'entite rien'].sort, cls.synonym.sort
end
@@ -73,11 +83,6 @@ def test_label_main_lang_en_found
def parse_and_get_class(lang:, klass: 'http://lirmm.fr/2015/resource/AGROOE_c_03')
lang_set lang
- submission_parse('AGROOE', 'AGROOE Test extract metadata ontology',
- './test/data/ontology_files/agrooeMappings-05-05-2016.owl', 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
-
cls = get_class(klass,'AGROOE')
assert !cls.nil?
@@ -89,12 +94,9 @@ def lang_set(lang)
Goo.main_languages = lang
end
- def get_ontology_last_submission(ont)
- LinkedData::Models::Ontology.find(ont).first.latest_submission()
- end
def get_class(cls, ont)
- sub = LinkedData::Models::Ontology.find(ont).first.latest_submission()
+ sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
LinkedData::Models::Class.find(cls).in(sub).first
end
end
\ No newline at end of file
diff --git a/test/models/test_class_request_lang.rb b/test/models/test_class_request_lang.rb
new file mode 100644
index 00000000..d4713a89
--- /dev/null
+++ b/test/models/test_class_request_lang.rb
@@ -0,0 +1,119 @@
+require_relative './test_ontology_common'
+require 'request_store'
+
+class TestClassRequestedLang < LinkedData::TestOntologyCommon
+
+ def self.before_suite
+ @@old_main_languages = Goo.main_languages
+ RequestStore.store[:requested_lang] = nil
+
+ parse
+ end
+
+ def self.after_suite
+ Goo.main_languages = @@old_main_languages
+ RequestStore.store[:requested_lang] = nil
+ end
+
+ def self.parse
+ new('').submission_parse('INRAETHES', 'Testing skos',
+ 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos', 1,
+ process_rdf: true, index_search: false,
+ run_metrics: false, reasoning: false
+ )
+ end
+
+ def teardown
+ reset_lang
+ end
+
+ def test_requested_language_found
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :FR)
+ assert_equal 'industrialisation', cls.prefLabel
+ assert_equal ['développement industriel'], cls.synonym
+
+ properties = cls.properties
+ assert_equal ['développement industriel'], properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s)
+ assert_equal ['industrialisation'], properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s)
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :EN)
+ assert_equal 'industrialization', cls.prefLabel
+ assert_equal ['industrial development'], cls.synonym
+
+ properties = cls.properties
+ assert_equal ['industrial development'], properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s)
+ assert_equal ['industrialization'], properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s)
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_13078',
+ requested_lang: :FR)
+ assert_equal 'carbone renouvelable', cls.prefLabel
+
+ end
+
+ def test_requested_language_not_found
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :ES)
+ assert_nil cls.prefLabel
+ assert_empty cls.synonym
+
+ properties = cls.properties
+ assert_empty properties.select { |x| x.to_s['altLabel'] }.values
+ assert_empty properties.select { |x| x.to_s['prefLabel'] }.values
+ end
+
+ def test_request_all_languages
+
+ cls = get_class_by_lang('http://opendata.inrae.fr/thesaurusINRAE/c_22817',
+ requested_lang: :ALL)
+
+ pref_label_all_languages = { en: 'industrialization', fr: 'industrialisation' }
+ assert_includes pref_label_all_languages.values, cls.prefLabel
+ assert_equal pref_label_all_languages, cls.prefLabel(include_languages: true)
+
+ synonym_all_languages = { en: ['industrial development'], fr: ['développement industriel'] }
+
+ assert_equal synonym_all_languages.values.flatten.sort, cls.synonym.sort
+ assert_equal synonym_all_languages, cls.synonym(include_languages: true)
+
+ properties = cls.properties
+
+ assert_equal synonym_all_languages.values.flatten.sort, properties.select { |x| x.to_s['altLabel'] }.values.first.map(&:to_s).sort
+ assert_equal pref_label_all_languages.values.sort, properties.select { |x| x.to_s['prefLabel'] }.values.first.map(&:to_s).sort
+
+ properties = cls.properties(include_languages: true)
+
+ assert_equal synonym_all_languages.stringify_keys,
+ properties.select { |x| x.to_s['altLabel'] }.values.first.transform_values{|v| v.map(&:object)}
+ assert_equal pref_label_all_languages.stringify_keys,
+ properties.select { |x| x.to_s['prefLabel'] }.values.first.transform_values{|v| v.first.object}
+ end
+
+ private
+
+ def lang_set(requested_lang: nil, portal_languages: nil)
+ Goo.main_languages = portal_languages if portal_languages
+ RequestStore.store[:requested_lang] = requested_lang
+ end
+
+ def reset_lang
+ lang_set requested_lang: nil, portal_languages: @@old_main_languages
+ end
+
+ def get_class(cls, ont)
+ sub = LinkedData::Models::Ontology.find(ont).first.latest_submission
+ LinkedData::Models::Class.find(cls).in(sub).first
+ end
+
+ def get_class_by_lang(cls, requested_lang:, portal_languages: nil)
+ lang_set requested_lang: requested_lang, portal_languages: portal_languages
+ cls = get_class(cls, 'INRAETHES')
+ refute_nil cls
+ cls.bring_remaining
+ cls.bring :unmapped
+ cls
+ end
+end
\ No newline at end of file
diff --git a/test/models/test_instances.rb b/test/models/test_instances.rb
index a814f8c9..2d77621a 100644
--- a/test/models/test_instances.rb
+++ b/test/models/test_instances.rb
@@ -8,18 +8,17 @@ class TestInstances < LinkedData::TestOntologyCommon
PROP_OBSERVABLE_TRAIT = RDF::URI.new'http://www.owl-ontologies.com/OntologyXCT.owl#isObservableTraitof'.freeze
PROP_HAS_OCCURRENCE = RDF::URI.new'http://www.owl-ontologies.com/OntologyXCT.owl#hasOccurrenceIn'.freeze
- def self.before_suite
- LinkedData::TestCase.backend_4s_delete
- end
- def test_instance_counts_class
- submission_parse('TESTINST', 'Testing instances',
+ def self.before_suite
+ self.new('').submission_parse('TESTINST', 'Testing instances',
'test/data/ontology_files/XCTontologyvtemp2_vvtemp2.zip',
12,
masterFileName: 'XCTontologyvtemp2/XCTontologyvtemp2.owl',
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
- submission_id = LinkedData::Models::OntologySubmission.all.first.id
+ process_rdf: true, extract_metadata: false, generate_missing_labels: false)
+ end
+
+ def test_instance_counts_class
+ submission_id = RDF::URI.new("http://data.bioontology.org/ontologies/TESTINST/submissions/12")
class_id = RDF::URI.new('http://www.owl-ontologies.com/OntologyXCT.owl#ClinicalManifestation')
instances = LinkedData::InstanceLoader.get_instances_by_class(submission_id, class_id)
@@ -30,25 +29,13 @@ def test_instance_counts_class
end
def test_instance_counts_ontology
- submission_parse('TESTINST', 'Testing instances',
- 'test/data/ontology_files/XCTontologyvtemp2_vvtemp2.zip',
- 12,
- masterFileName: 'XCTontologyvtemp2/XCTontologyvtemp2.owl',
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
- submission_id = LinkedData::Models::OntologySubmission.all.first.id
+ submission_id = RDF::URI.new("http://data.bioontology.org/ontologies/TESTINST/submissions/12")
instances = LinkedData::InstanceLoader.get_instances_by_ontology(submission_id, page_no: 1, size: 800)
assert_equal 714, instances.length
end
def test_instance_types
- submission_parse('TESTINST', 'Testing instances',
- 'test/data/ontology_files/XCTontologyvtemp2_vvtemp2.zip',
- 12,
- masterFileName: 'XCTontologyvtemp2/XCTontologyvtemp2.owl',
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
- submission_id = LinkedData::Models::OntologySubmission.all.first.id
+ submission_id = RDF::URI.new("http://data.bioontology.org/ontologies/TESTINST/submissions/12")
class_id = RDF::URI.new('http://www.owl-ontologies.com/OntologyXCT.owl#ClinicalManifestation')
instances = LinkedData::InstanceLoader.get_instances_by_class(submission_id, class_id)
@@ -69,13 +56,7 @@ def test_instance_types
def test_instance_properties
known_properties = [PROP_TYPE, PROP_CLINICAL_MANIFESTATION, PROP_OBSERVABLE_TRAIT, PROP_HAS_OCCURRENCE]
- submission_parse('TESTINST', 'Testing instances',
- 'test/data/ontology_files/XCTontologyvtemp2_vvtemp2.zip',
- 12,
- masterFileName: 'XCTontologyvtemp2/XCTontologyvtemp2.owl',
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
- submission_id = LinkedData::Models::OntologySubmission.all.first.id
+ submission_id = RDF::URI.new("http://data.bioontology.org/ontologies/TESTINST/submissions/12")
class_id = RDF::URI.new('http://www.owl-ontologies.com/OntologyXCT.owl#ClinicalManifestation')
instances = LinkedData::InstanceLoader.get_instances_by_class(submission_id, class_id)
diff --git a/test/models/test_mappings.rb b/test/models/test_mappings.rb
index 6b4e2e29..ef76cfd6 100644
--- a/test/models/test_mappings.rb
+++ b/test/models/test_mappings.rb
@@ -11,44 +11,33 @@ class TestMapping < LinkedData::TestOntologyCommon
def self.before_suite
- LinkedData::TestCase.backend_4s_delete
- ontologies_parse()
+ backend_4s_delete
+ ontologies_parse
end
- def self.ontologies_parse()
+ def self.ontologies_parse
helper = LinkedData::TestOntologyCommon.new(self)
helper.submission_parse(ONT_ACR1,
"MappingOntTest1",
"./test/data/ontology_files/BRO_v3.3.owl", 11,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false)
helper.submission_parse(ONT_ACR2,
"MappingOntTest2",
"./test/data/ontology_files/CNO_05.owl", 22,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false)
helper.submission_parse(ONT_ACR3,
"MappingOntTest3",
"./test/data/ontology_files/aero.owl", 33,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false)
helper.submission_parse(ONT_ACR4,
"MappingOntTest4",
"./test/data/ontology_files/fake_for_mappings.owl", 44,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
- LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
+ process_rdf: true, extract_metadata: false)
end
- def delete_all_rest_mappings
- LinkedData::Models::RestBackupMapping.all.each do |m|
- LinkedData::Mappings.delete_rest_mapping(m.id)
- end
- end
def test_mapping_count_models
- LinkedData::Models::MappingCount.where.all do |x|
- x.delete
- end
+ LinkedData::Models::MappingCount.where.all(&:delete)
+
m = LinkedData::Models::MappingCount.new
assert !m.valid?
m.ontologies = ["BRO"]
@@ -75,46 +64,17 @@ def test_mapping_count_models
.all
assert result.length == 1
assert result.first.count == 321
- LinkedData::Models::MappingCount.where.all do |x|
- x.delete
- end
+ LinkedData::Models::MappingCount.where.all(&:delete)
end
- def validate_mapping(map)
- prop = map.source.downcase.to_sym
- prop = :prefLabel if map.source == "LOOM"
- prop = nil if map.source == "SAME_URI"
- classes = []
- map.classes.each do |t|
- sub = LinkedData::Models::Ontology.find(t.submission.ontology.id)
- .first.latest_submission
- cls = LinkedData::Models::Class.find(t.id).in(sub)
- unless prop.nil?
- cls.include(prop)
- end
- cls = cls.first
- classes << cls unless cls.nil?
- end
- if map.source == "SAME_URI"
- return classes[0].id.to_s == classes[1].id.to_s
- end
- if map.source == "LOOM"
- ldOntSub = LinkedData::Models::OntologySubmission
- label0 = ldOntSub.loom_transform_literal(classes[0].prefLabel)
- label1 = ldOntSub.loom_transform_literal(classes[1].prefLabel)
- return label0 == label1
- end
- if map.source == "CUI"
- return classes[0].cui == classes[1].cui
- end
- return false
- end
def test_mappings_ontology
- delete_all_rest_mappings
- LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
- assert LinkedData::Models::MappingCount.where.all.length > 2
+ LinkedData::Models::RestBackupMapping.all.each do |m|
+ LinkedData::Mappings.delete_rest_mapping(m.id)
+ end
+
+ assert create_count_mapping > 2
#bro
ont1 = LinkedData::Models::Ontology.where({ :acronym => ONT_ACR1 }).to_a[0]
@@ -149,6 +109,8 @@ def test_mappings_ontology
end
assert validate_mapping(map), "mapping is not valid"
end
+ assert create_count_mapping > 2
+
by_ont_counts = LinkedData::Mappings.mapping_ontologies_count(latest_sub,nil)
total = 0
by_ont_counts.each do |k,v|
@@ -176,9 +138,7 @@ def test_mappings_ontology
end
def test_mappings_two_ontologies
- LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
- map_ct = LinkedData::Models::MappingCount.where.all.length
- assert map_ct > 2, "Mapping count should exceed the value of 2"
+ assert create_count_mapping > 2, "Mapping count should exceed the value of 2"
#bro
ont1 = LinkedData::Models::Ontology.where({ :acronym => ONT_ACR1 }).to_a[0]
#fake ont
@@ -229,7 +189,9 @@ def test_mappings_two_ontologies
end
def test_mappings_rest
- delete_all_rest_mappings
+ LinkedData::Models::RestBackupMapping.all.each do |m|
+ LinkedData::Mappings.delete_rest_mapping(m.id)
+ end
mapping_term_a, mapping_term_b, submissions_a, submissions_b, relations, user = rest_mapping_data
mappings_created = []
@@ -246,9 +208,7 @@ def test_mappings_rest
ont_id = submissions_a.first.split("/")[0..-3].join("/")
latest_sub = LinkedData::Models::Ontology.find(RDF::URI.new(ont_id)).first.latest_submission
- LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
- ct = LinkedData::Models::MappingCount.where.all.length
- assert ct > 2
+ assert create_count_mapping > 2
mappings = LinkedData::Mappings.mappings_ontology(latest_sub, 1, 1000)
rest_mapping_count = 0
@@ -277,18 +237,15 @@ def test_mappings_rest
helper.submission_parse(ONT_ACR1,
"MappingOntTest1",
"./test/data/ontology_files/BRO_v3.3.owl", 12,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false)
+
+ assert create_count_mapping > 2
+
latest_sub1 = LinkedData::Models::Ontology.find(RDF::URI.new(ont_id)).first.latest_submission
- LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
- ct1 = LinkedData::Models::MappingCount.where.all.length
- assert ct1 > 2
mappings = LinkedData::Mappings.mappings_ontology(latest_sub1, 1, 1000)
rest_mapping_count = 0
mappings.each do |m|
- if m.source == "REST"
- rest_mapping_count += 1
- end
+ rest_mapping_count += 1 if m.source == "REST"
end
assert_equal 3, rest_mapping_count
end
@@ -366,4 +323,35 @@ def create_rest_mapping(relation:, user:, name:, classes:)
process.save
LinkedData::Mappings.create_rest_mapping(classes, process)
end
+
+ def validate_mapping(map)
+ prop = map.source.downcase.to_sym
+ prop = :prefLabel if map.source == "LOOM"
+ prop = nil if map.source == "SAME_URI"
+
+ classes = []
+ map.classes.each do |t|
+ sub = LinkedData::Models::Ontology.find(t.submission.ontology.id)
+ .first.latest_submission
+ cls = LinkedData::Models::Class.find(t.id).in(sub)
+ unless prop.nil?
+ cls.include(prop)
+ end
+ cls = cls.first
+ classes << cls unless cls.nil?
+ end
+ if map.source == "SAME_URI"
+ return classes[0].id.to_s == classes[1].id.to_s
+ end
+ if map.source == "LOOM"
+ ldOntSub = LinkedData::Models::OntologySubmission
+ label0 = ldOntSub.loom_transform_literal(classes[0].prefLabel)
+ label1 = ldOntSub.loom_transform_literal(classes[1].prefLabel)
+ return label0 == label1
+ end
+ if map.source == "CUI"
+ return classes[0].cui == classes[1].cui
+ end
+ return false
+ end
end
diff --git a/test/models/test_ontology.rb b/test/models/test_ontology.rb
index 5e1a1849..b240bd0f 100644
--- a/test/models/test_ontology.rb
+++ b/test/models/test_ontology.rb
@@ -5,15 +5,7 @@
class TestOntology < LinkedData::TestOntologyCommon
def self.before_suite
- @@port = Random.rand(55000..65535) # http://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Dynamic.2C_private_or_ephemeral_ports
- @@thread = Thread.new do
- Rack::Server.start(
- app: lambda do |e|
- [200, {'Content-Type' => 'text/plain'}, ['test file']]
- end,
- Port: @@port
- )
- end
+ url , @@thread, @@port= self.new('').start_server
end
def self.after_suite
@@ -299,7 +291,7 @@ def test_valid_ontology
end
def test_ontology_delete
- count, acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: true)
+ count, acronyms, ontologies = create_ontologies_and_submissions(ont_count: 2, submission_count: 1, process_submission: false)
u, of, contact = ontology_objects()
o1 = ontologies[0]
o2 = ontologies[1]
@@ -393,7 +385,7 @@ def test_latest_parsed_submission
count, acronyms, ont = create_ontologies_and_submissions(ont_count: 1, submission_count: 3)
ont = ont.first
ont.bring(submissions: [:submissionId])
- sub = ont.submissions[1]
+ sub = ont.submissions.sort_by(&:id)[1]
sub.bring(*LinkedData::Models::OntologySubmission.attributes)
sub.set_ready
sub.save
@@ -426,25 +418,25 @@ def test_duplicate_contacts
# A test to benchmark the time taken by bring_remaining (query not optimized, can take a long time if a lot of value in the list attributes)
def test_ontology_bring_remaining
# Creating the users
- user1 = LinkedData::Models::User.new(:username => "user1", :email => "some@email.org" )
+ user1 = LinkedData::Models::User.new(:username => "user1", :email => "some1@email.org" )
user1.passwordHash = "some random pass hash"
user1.save
- user2 = LinkedData::Models::User.new(:username => "user2", :email => "some@email.org" )
+ user2 = LinkedData::Models::User.new(:username => "user2", :email => "some2@email.org" )
user2.passwordHash = "some random pass hash"
user2.save
- user3 = LinkedData::Models::User.new(:username => "user3", :email => "some@email.org" )
+ user3 = LinkedData::Models::User.new(:username => "user3", :email => "some3@email.org" )
user3.passwordHash = "some random pass hash"
user3.save
- user4 = LinkedData::Models::User.new(:username => "user4", :email => "some@email.org" )
+ user4 = LinkedData::Models::User.new(:username => "user4", :email => "some4@email.org" )
user4.passwordHash = "some random pass hash"
user4.save
- user5 = LinkedData::Models::User.new(:username => "user5", :email => "some@email.org" )
+ user5 = LinkedData::Models::User.new(:username => "user5", :email => "some5@email.org" )
user5.passwordHash = "some random pass hash"
user5.save
- user6 = LinkedData::Models::User.new(:username => "user6", :email => "some@email.org" )
+ user6 = LinkedData::Models::User.new(:username => "user6", :email => "some6@email.org" )
user6.passwordHash = "some random pass hash"
user6.save
- user7 = LinkedData::Models::User.new(:username => "user7", :email => "some@email.org" )
+ user7 = LinkedData::Models::User.new(:username => "user7", :email => "some7@email.org" )
user7.passwordHash = "some random pass hash"
user7.save
diff --git a/test/models/test_ontology_common.rb b/test/models/test_ontology_common.rb
index 7033a657..7efe6cbb 100644
--- a/test/models/test_ontology_common.rb
+++ b/test/models/test_ontology_common.rb
@@ -1,7 +1,16 @@
require_relative "../test_case"
+require 'rack'
module LinkedData
class TestOntologyCommon < LinkedData::TestCase
+ def create_count_mapping
+ count = LinkedData::Models::MappingCount.where.all.length
+ unless count > 2
+ LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new))
+ count = LinkedData::Models::MappingCount.where.all.length
+ end
+ count
+ end
def submission_dependent_objects(format, acronym, user_name, name_ont)
#ontology format
owl = LinkedData::Models::OntologyFormat.where(:acronym => format).first
@@ -43,8 +52,14 @@ def submission_dependent_objects(format, acronym, user_name, name_ont)
# delete = true # delete any existing submissions
##############################################
def submission_parse(acronym, name, ontologyFile, id, parse_options={})
+ if Goo.backend_vo?
+ old_slices = Goo.slice_loading_size
+ Goo.slice_loading_size = 20
+ end
return if ENV["SKIP_PARSING"]
- parse_options[:process_rdf] = true
+ parse_options[:process_rdf].nil? && parse_options[:process_rdf] = true
+ parse_options[:index_search].nil? && parse_options[:index_search] = false
+ parse_options[:extract_metadata].nil? && parse_options[:extract_metadata] = false
parse_options[:delete].nil? && parse_options[:delete] = true
if parse_options[:delete]
ont = LinkedData::Models::Ontology.find(acronym).first
@@ -97,10 +112,17 @@ def submission_parse(acronym, name, ontologyFile, id, parse_options={})
assert_equal true, ont_submission.exist?(reload=true)
begin
tmp_log = Logger.new(TestLogFile.new)
- ont_submission.process_submission(tmp_log, parse_options)
+ t = Benchmark.measure do
+ ont_submission.process_submission(tmp_log, parse_options)
+ end
+ puts "process submission time: #{t} "
rescue Exception => e
puts "Error, logged in #{tmp_log.instance_variable_get("@logdev").dev.path}"
raise e
+ ensure
+ if Goo.backend_vo?
+ Goo.slice_loading_size = old_slices
+ end
end
end
@@ -149,7 +171,7 @@ def init_test_ontology_msotest(acr)
assert (ont_submission.valid?)
ont_submission.save
assert_equal true, ont_submission.exist?(reload=true)
- parse_options = {process_rdf: true, index_search: true, run_metrics: true, reasoning: true}
+ parse_options = {process_rdf: true, extract_metadata: false}
begin
tmp_log = Logger.new(TestLogFile.new)
ont_submission.process_submission(tmp_log, parse_options)
@@ -194,6 +216,44 @@ def init_test_ontology_msotest(acr)
assert (count > 0)
end
end
+
+ def start_server
+ max_retries = 5
+ retries = 0
+ server_port = Random.rand(55000..65535)
+
+ while port_in_use?(server_port)
+ retries += 1
+ break if retries >= max_retries
+ server_port = Random.rand(55000..65535)
+ end
+
+ raise "Could not find an available port after #{max_retries} retries" if retries >= max_retries
+
+ server_url = 'http://localhost:' + server_port.to_s
+ server_thread = Thread.new do
+ Rack::Server.start(
+ app: lambda do |e|
+ [200, {'Content-Type' => 'text/plain'}, ['test file']]
+ end,
+ Port: server_port
+ )
+ end
+ Thread.pass
+
+ [server_url, server_thread, server_port]
+ end
+
+ private
+ def port_in_use?(port)
+ begin
+ server = TCPServer.new(port)
+ server.close
+ false
+ rescue Errno::EADDRINUSE
+ true
+ end
+ end
end
end
diff --git a/test/models/test_ontology_submission.rb b/test/models/test_ontology_submission.rb
index 6c55f29a..1e0750c0 100644
--- a/test/models/test_ontology_submission.rb
+++ b/test/models/test_ontology_submission.rb
@@ -4,6 +4,10 @@
class TestOntologySubmission < LinkedData::TestOntologyCommon
+ def setup
+ LinkedData::TestCase.backend_4s_delete
+ end
+
def test_valid_ontology
acronym = "BRO-TST"
@@ -38,7 +42,7 @@ def test_valid_ontology
os.status = 'beta'
assert os.valid?
end
-
+
def test_sanity_check_zip
acronym = "ADARTEST"
@@ -74,7 +78,6 @@ def test_sanity_check_zip
ont_submision.masterFileName = ont_submision.errors[:uploadFilePath][0][:options][0]
assert ont_submision.valid?
assert_equal 0, ont_submision.errors.length
- LinkedData::TestCase.backend_4s_delete
end
def test_automaster_from_zip
@@ -118,26 +121,26 @@ def test_skos_ontology
sub = LinkedData::Models::OntologySubmission.where(ontology: [acronym: "SKOS-TEST"],
submissionId: 987)
- .include(:version)
- .first
+ .include(:version)
+ .first
assert sub.roots.map { |x| x.id.to_s}.sort == ["http://www.ebi.ac.uk/efo/EFO_0000311",
- "http://www.ebi.ac.uk/efo/EFO_0001444",
- "http://www.ifomis.org/bfo/1.1/snap#Disposition",
- "http://www.ebi.ac.uk/chebi/searchId.do?chebiId=CHEBI:37577",
- "http://www.ebi.ac.uk/efo/EFO_0000635",
- "http://www.ebi.ac.uk/efo/EFO_0000324"].sort
+ "http://www.ebi.ac.uk/efo/EFO_0001444",
+ "http://www.ifomis.org/bfo/1.1/snap#Disposition",
+ "http://www.ebi.ac.uk/chebi/searchId.do?chebiId=CHEBI:37577",
+ "http://www.ebi.ac.uk/efo/EFO_0000635",
+ "http://www.ebi.ac.uk/efo/EFO_0000324"].sort
roots = sub.roots
LinkedData::Models::Class.in(sub).models(roots).include(:children).all
roots.each do |root|
-q_broader = <<-eos
+ q_broader = <<-eos
SELECT ?children WHERE {
- ?children #{RDF::SKOS[:broader].to_ntriples} #{root.id.to_ntriples} }
+ ?children #{RDF::Vocab::SKOS[:broader].to_ntriples} #{root.id.to_ntriples} }
eos
- children_query = []
- Goo.sparql_query_client.query(q_broader).each_solution do |sol|
- children_query << sol[:children].to_s
- end
- assert root.children.map { |x| x.id.to_s }.sort == children_query.sort
+ children_query = []
+ Goo.sparql_query_client.query(q_broader).each_solution do |sol|
+ children_query << sol[:children].to_s
+ end
+ assert root.children.map { |x| x.id.to_s }.sort == children_query.sort
end
end
@@ -149,11 +152,11 @@ def test_multiple_syn_multiple_predicate
#test for version info
sub = LinkedData::Models::OntologySubmission.where(ontology: [acronym: "HP-TEST"],
submissionId: 55)
- .include(:version)
- .first
+ .include(:version)
+ .first
paging = LinkedData::Models::Class.in(sub).page(1,100)
- .include(:unmapped)
+ .include(:unmapped)
found = false
begin
@@ -184,7 +187,7 @@ def test_obo_part_of
PREFIX rdfs:
SELECT DISTINCT * WHERE {
rdfs:subClassOf ?x . }
-eos
+ eos
count = 0
Goo.sparql_query_client.query(qthing).each_solution do |sol|
count += 1
@@ -195,7 +198,7 @@ def test_obo_part_of
PREFIX rdfs:
SELECT DISTINCT * WHERE {
?x . }
-eos
+ eos
count = 0
Goo.sparql_query_client.query(qthing).each_solution do |sol|
count += 1
@@ -208,7 +211,7 @@ def test_obo_part_of
SELECT DISTINCT * WHERE {
?x . }
-eos
+ eos
count = 0
Goo.sparql_query_client.query(qcount).each_solution do |sol|
count += 1
@@ -222,24 +225,24 @@ def test_obo_part_of
#strict comparison to be sure the merge with the tree_view branch goes fine
LinkedData::Models::Class.where.in(sub).include(:prefLabel,:synonym,:notation).each do |cls|
- assert_instance_of String,cls.prefLabel
- if cls.notation.nil?
- assert false,"notation empty"
- end
- assert_instance_of String,cls.notation
- assert cls.notation[-6..-1] == cls.id.to_s[-6..-1]
- #NCBO-1007 - hasNarrowSynonym
- if cls.id.to_s["CL_0000003"]
- assert cls.synonym[0] == "cell in vivo"
- end
- #NCBO-1007 - hasBroadSynonym
- if cls.id.to_s["CL_0000137"]
- assert cls.synonym[0] == "bone cell"
- end
- #NCBO-1007 - hasRelatedSynonym
- if cls.id.to_s["TAO_0000223"]
- assert cls.synonym.length == 6
- end
+ assert_instance_of String,cls.prefLabel
+ if cls.notation.nil?
+ assert false,"notation empty"
+ end
+ assert_instance_of String,cls.notation
+ assert cls.notation[-6..-1] == cls.id.to_s[-6..-1]
+ #NCBO-1007 - hasNarrowSynonym
+ if cls.id.to_s["CL_0000003"]
+ assert cls.synonym[0] == "cell in vivo"
+ end
+ #NCBO-1007 - hasBroadSynonym
+ if cls.id.to_s["CL_0000137"]
+ assert cls.synonym[0] == "bone cell"
+ end
+ #NCBO-1007 - hasRelatedSynonym
+ if cls.id.to_s["TAO_0000223"]
+ assert cls.synonym.length == 6
+ end
end
# This is testing that treeView is used to traverse the hierarchy
@@ -247,17 +250,17 @@ def test_obo_part_of
assert sub.hasOntologyLanguage.tree_property == Goo.vocabulary(:metadata)[:treeView]
bm = LinkedData::Models::Class
- .find(RDF::URI.new("http://purl.obolibrary.org/obo/GO_0070977"))
- .in(sub)
- .include(:prefLabel,:children,:parents)
- .first
+ .find(RDF::URI.new("http://purl.obolibrary.org/obo/GO_0070977"))
+ .in(sub)
+ .include(:prefLabel,:children,:parents)
+ .first
assert bm.children.first.id == RDF::URI.new("http://purl.obolibrary.org/obo/GO_0043931")
assert_equal 2, bm.parents.length
roots = sub.roots
assert roots.map { |x| x.id.to_s }.sort ==
- ["http://purl.obolibrary.org/obo/PATO_0000001",
- "http://purl.obolibrary.org/obo/CARO_0000000",
- "http://purl.obolibrary.org/obo/GO_0008150"].sort
+ ["http://purl.obolibrary.org/obo/PATO_0000001",
+ "http://purl.obolibrary.org/obo/CARO_0000000",
+ "http://purl.obolibrary.org/obo/GO_0008150"].sort
end
def test_submission_parse_subfolders_zip
@@ -265,8 +268,7 @@ def test_submission_parse_subfolders_zip
"./test/data/ontology_files/XCTontologyvtemp2_vvtemp2.zip",
34,
masterFileName: "XCTontologyvtemp2/XCTontologyvtemp2.owl",
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: true)
+ process_rdf: true, extract_metadata: false, generate_missing_labels: false)
sub = LinkedData::Models::OntologySubmission.where(ontology: [acronym: "CTXTEST"]).first
@@ -280,53 +282,43 @@ def test_submission_parse
# This one has some nasty looking IRIS with slashes in the anchor
unless ENV["BP_SKIP_HEAVY_TESTS"] == "1"
submission_parse("MCCLTEST", "MCCLS TEST",
- "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl", 11,
- process_rdf: true, index_search: true,
- run_metrics: false, reasoning: true)
+ "./test/data/ontology_files/CellLine_OWL_BioPortal_v1.0.owl", 11,
+ process_rdf: true, extract_metadata: false)
sub = LinkedData::Models::OntologySubmission.where(ontology: [acronym: "MCCLTEST"],
submissionId: 11)
- .include(:version)
- .first
+ .include(:version)
+ .first
assert sub.version == "3.0"
end
#This one has resources wih accents.
submission_parse("ONTOMATEST",
- "OntoMA TEST",
- "./test/data/ontology_files/OntoMA.1.1_vVersion_1.1_Date__11-2011.OWL", 15,
- process_rdf: true, index_search: true,
- run_metrics: false, reasoning: true)
+ "OntoMA TEST",
+ "./test/data/ontology_files/OntoMA.1.1_vVersion_1.1_Date__11-2011.OWL", 15,
+ process_rdf: true, extract_metadata: false)
sub = LinkedData::Models::OntologySubmission.where(ontology: [acronym: "ONTOMATEST"],
submissionId: 15)
- .include(:version)
- .first
+ .include(:version)
+ .first
assert sub.version["Version 1.1"]
assert sub.version["Date: 11-2011"]
end
def test_process_submission_diff
- # Cleanup
- LinkedData::TestCase.backend_4s_delete
acronym = 'BRO'
# Create a 1st version for BRO
submission_parse(acronym, "BRO",
- "./test/data/ontology_files/BRO_v3.4.owl", 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false,
- diff: true, delete: false)
+ "./test/data/ontology_files/BRO_v3.4.owl", 1, process_rdf: false)
# Create a later version for BRO
submission_parse(acronym, "BRO",
- "./test/data/ontology_files/BRO_v3.5.owl", 2,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false,
- diff: true, delete: false)
- onts = LinkedData::Models::Ontology.find(acronym)
- bro = onts.first
- bro.bring(:submissions)
+ "./test/data/ontology_files/BRO_v3.5.owl", 2, process_rdf: false, diff: true, delete: false)
+
+ bro = LinkedData::Models::Ontology.find(acronym).include(submissions:[:submissionId,:diffFilePath]).first
+ #bro.bring(:submissions)
submissions = bro.submissions
- submissions.each {|s| s.bring(:submissionId, :diffFilePath)}
+ #submissions.each {|s| s.bring(:submissionId, :diffFilePath)}
# Sort submissions in descending order of submissionId, extract last two submissions
recent_submissions = submissions.sort {|a,b| b.submissionId <=> a.submissionId}[0..1]
sub1 = recent_submissions.last # descending order, so last is older submission
@@ -334,20 +326,18 @@ def test_process_submission_diff
assert(sub1.submissionId < sub2.submissionId, 'submissionId is in the wrong order')
assert(sub1.diffFilePath == nil, 'Should not create diff for older submission.')
assert(sub2.diffFilePath != nil, 'Failed to create diff for the latest submission.')
- # Cleanup
- LinkedData::TestCase.backend_4s_delete
end
def test_process_submission_archive
- parse_options = { process_rdf: false, index_search: false, index_commit: false,
- run_metrics: false, reasoning: false, archive: true }
- old_threshold = LinkedData::Models::OntologySubmission::FILE_SIZE_ZIPPING_THRESHOLD
- LinkedData::Models::OntologySubmission.const_set(:FILE_SIZE_ZIPPING_THRESHOLD, 0)
+ old_threshold = LinkedData::Services::OntologySubmissionArchiver::FILE_SIZE_ZIPPING_THRESHOLD
+ LinkedData::Services::OntologySubmissionArchiver.const_set(:FILE_SIZE_ZIPPING_THRESHOLD, 0)
ont_count, ont_acronyms, ontologies =
create_ontologies_and_submissions(ont_count: 1, submission_count: 2,
- process_submission: true, acronym: 'NCBO-545')
+ process_submission: true,
+ acronym: 'NCBO-545', process_options: {process_rdf: true, index_search: true,
+ extract_metadata: false})
# Sanity check.
assert_equal 1, ontologies.count
assert_equal 2, ontologies.first.submissions.count
@@ -357,46 +347,47 @@ def test_process_submission_archive
# Process latest submission. No files should be deleted.
latest_sub = sorted_submissions.first
- latest_sub.process_submission(Logger.new(latest_sub.parsing_log_path), parse_options)
- assert latest_sub.archived?
+ latest_sub.process_submission(Logger.new(latest_sub.parsing_log_path), {archive: true})
+
+ refute latest_sub.archived?
assert File.file?(File.join(latest_sub.data_folder, 'labels.ttl')),
- %-Missing ontology submission file: 'labels.ttl'-
+ %-Missing ontology submission file: 'labels.ttl'-
assert File.file?(File.join(latest_sub.data_folder, 'owlapi.xrdf')),
- %-Missing ontology submission file: 'owlapi.xrdf'-
+ %-Missing ontology submission file: 'owlapi.xrdf'-
assert File.file?(latest_sub.csv_path),
- %-Missing ontology submission file: '#{latest_sub.csv_path}'-
+ %-Missing ontology submission file: '#{latest_sub.csv_path}'-
assert File.file?(latest_sub.parsing_log_path),
- %-Missing ontology submission file: '#{latest_sub.parsing_log_path}'-
+ %-Missing ontology submission file: '#{latest_sub.parsing_log_path}'-
# Process one prior to latest submission. Some files should be deleted.
old_sub = sorted_submissions.last
old_file_path = old_sub.uploadFilePath
- old_sub.process_submission(Logger.new(old_sub.parsing_log_path), parse_options)
+ old_sub.process_submission(Logger.new(old_sub.parsing_log_path), {archive: true})
assert old_sub.archived?
- assert_equal false, File.file?(File.join(old_sub.data_folder, 'labels.ttl')),
- %-File deletion failed for 'labels.ttl'-
+ refute File.file?(File.join(old_sub.data_folder, 'labels.ttl')),
+ %-File deletion failed for 'labels.ttl'-
- assert_equal false, File.file?(File.join(old_sub.data_folder, 'mappings.ttl')),
- %-File deletion failed for 'mappings.ttl'-
+ refute File.file?(File.join(old_sub.data_folder, 'mappings.ttl')),
+ %-File deletion failed for 'mappings.ttl'-
- assert_equal false, File.file?(File.join(old_sub.data_folder, 'obsolete.ttl')),
- %-File deletion failed for 'obsolete.ttl'-
+ refute File.file?(File.join(old_sub.data_folder, 'obsolete.ttl')),
+ %-File deletion failed for 'obsolete.ttl'-
- assert_equal false, File.file?(File.join(old_sub.data_folder, 'owlapi.xrdf')),
- %-File deletion failed for 'owlapi.xrdf'-
+ refute File.file?(File.join(old_sub.data_folder, 'owlapi.xrdf')),
+ %-File deletion failed for 'owlapi.xrdf'-
- assert_equal false, File.file?(old_sub.csv_path),
- %-File deletion failed for '#{old_sub.csv_path}'-
+ refute File.file?(old_sub.csv_path),
+ %-File deletion failed for '#{old_sub.csv_path}'-
- assert_equal false, File.file?(old_sub.parsing_log_path),
+ refute File.file?(old_sub.parsing_log_path),
%-File deletion failed for '#{old_sub.parsing_log_path}'-
- assert_equal false, File.file?(old_file_path),
+ refute File.file?(old_file_path),
%-File deletion failed for '#{old_file_path}'-
assert old_sub.zipped?
@@ -408,8 +399,7 @@ def test_submission_diff_across_ontologies
# Create a 1st version for BRO
submission_parse("BRO34", "BRO3.4",
"./test/data/ontology_files/BRO_v3.4.owl", 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
+ process_rdf: false)
onts = LinkedData::Models::Ontology.find('BRO34')
bro34 = onts.first
bro34.bring(:submissions)
@@ -417,14 +407,13 @@ def test_submission_diff_across_ontologies
# Create a later version for BRO
submission_parse("BRO35", "BRO3.5",
"./test/data/ontology_files/BRO_v3.5.owl", 1,
- process_rdf: true, index_search: false,
- run_metrics: false, reasoning: false)
+ process_rdf: false)
onts = LinkedData::Models::Ontology.find('BRO35')
bro35 = onts.first
bro35.bring(:submissions)
sub35 = bro35.submissions.first
# Calculate the ontology diff: bro35 - bro34
- tmp_log = Logger.new(TestLogFile.new)
+ tmp_log = Logger.new($stdout)
sub35.diff(tmp_log, sub34)
assert(sub35.diffFilePath != nil, 'Failed to create submission diff file.')
end
@@ -432,9 +421,9 @@ def test_submission_diff_across_ontologies
def test_index_properties
submission_parse("BRO", "BRO Ontology",
"./test/data/ontology_files/BRO_v3.5.owl", 1,
- process_rdf: true, reasoning: false, index_properties: true)
- res = LinkedData::Models::Class.search("*:*", {:fq => "submissionAcronym:\"BRO\"", :start => 0, :rows => 80}, :property)
- assert_equal 81, res["response"]["numFound"]
+ process_rdf: true, extract_metadata: false, index_properties: true)
+ res = LinkedData::Models::OntologyProperty.search("*:*", {:fq => "submissionAcronym:\"BRO\"", :start => 0, :rows => 80})
+ assert_includes [81, 52] , res["response"]["numFound"] # if 81 if owlapi import skos properties
found = 0
res["response"]["docs"].each do |doc|
@@ -458,11 +447,47 @@ def test_index_properties
break if found == 2
end
- assert_equal 2, found
+ assert_includes [1,2], found # if owliap does not import skos properties
ont = LinkedData::Models::Ontology.find('BRO').first
ont.unindex_properties(true)
- res = LinkedData::Models::Class.search("*:*", {:fq => "submissionAcronym:\"BRO\""}, :property)
+ res = LinkedData::Models::OntologyProperty.search("*:*", {:fq => "submissionAcronym:\"BRO\""})
+ assert_equal 0, res["response"]["numFound"]
+ end
+
+ def test_index_multilingual
+
+ submission_parse("BRO", "BRO Ontology",
+ "./test/data/ontology_files/BRO_v3.5.owl", 1,
+ process_rdf: true, extract_metadata: false, generate_missing_labels: false,
+ index_search: true, index_properties: false)
+
+
+ res = LinkedData::Models::Class.search("prefLabel:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
+ refute_equal 0, res["response"]["numFound"]
+
+ doc = res["response"]["docs"].select{|doc| doc["resource_id"].to_s.eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+ refute_nil doc
+ #binding.pry
+ #assert_equal 30, doc.keys.select{|k| k['prefLabel'] || k['synonym']}.size # test that all the languages are indexed
+
+
+ res = LinkedData::Models::Class.search("prefLabel_none:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+ res = LinkedData::Models::Class.search("prefLabel_fr:Activité", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+
+ res = LinkedData::Models::Class.search("prefLabel_en:ActivityEnglish", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
+ refute_equal 0, res["response"]["numFound"]
+ refute_nil res["response"]["docs"].select{|doc| doc["resource_id"].eql?('http://bioontology.org/ontologies/Activity.owl#Activity')}.first
+
+
+ res = LinkedData::Models::Class.search("prefLabel_fr:Activity", {:fq => "submissionAcronym:BRO", :start => 0, :rows => 80})
assert_equal 0, res["response"]["numFound"]
end
@@ -488,7 +513,7 @@ def test_zipped_submission_process
ont_submision.status = 'production'
assert ont_submision.valid?
ont_submision.save
- parse_options = {process_rdf: true, reasoning: true, index_search: false, run_metrics: false, diff: true}
+ parse_options = {process_rdf: false, diff: true}
begin
tmp_log = Logger.new(TestLogFile.new)
ont_submision.process_submission(tmp_log, parse_options)
@@ -498,8 +523,7 @@ def test_zipped_submission_process
end
archived_submission = ont_submision if i.zero?
end
- parse_options = { process_rdf: false, index_search: false, index_commit: false,
- run_metrics: false, reasoning: false, archive: true }
+ parse_options = { process_rdf: false, archive: true }
archived_submission.process_submission(Logger.new(TestLogFile.new), parse_options)
assert_equal false, File.file?(archived_submission.zip_folder),
@@ -568,17 +592,7 @@ def test_submission_parse_zip
def test_download_ontology_file
begin
- server_port = Random.rand(55000..65535) # http://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Dynamic.2C_private_or_ephemeral_ports
- server_url = 'http://localhost:' + server_port.to_s
- server_thread = Thread.new do
- Rack::Server.start(
- app: lambda do |e|
- [200, {'Content-Type' => 'text/plain'}, ['test file']]
- end,
- Port: server_port
- )
- end
- Thread.pass
+ server_url, server_thread, server_port = start_server
sleep 3 # Allow the server to startup
assert(server_thread.alive?, msg="Rack::Server thread should be alive, it's not!")
ont_count, ont_names, ont_models = create_ontologies_and_submissions(ont_count: 1, submission_count: 1)
@@ -733,7 +747,7 @@ def test_submission_root_classes
acr = "CSTPROPS"
init_test_ontology_msotest acr
os = LinkedData::Models::OntologySubmission.where(ontology: [ acronym: acr ], submissionId: 1)
- .include(LinkedData::Models::OntologySubmission.attributes).all
+ .include(LinkedData::Models::OntologySubmission.attributes).all
assert(os.length == 1)
os = os[0]
roots = os.roots
@@ -777,8 +791,6 @@ def test_submission_root_classes
roots = os.roots(nil, 1, 300)
assert_equal 6, roots.length
-
- LinkedData::TestCase.backend_4s_delete
end
#escaping sequences
@@ -788,9 +800,7 @@ def test_submission_parse_sbo
ontologyFile = "./test/data/ontology_files/SBO.obo"
id = 10
- LinkedData::TestCase.backend_4s_delete
-
- ont_submision = LinkedData::Models::OntologySubmission.new({ :submissionId => id,})
+ ont_submision = LinkedData::Models::OntologySubmission.new({ :submissionId => id })
uploadFilePath = LinkedData::Models::OntologySubmission.copy_file_repository(acronym, id,ontologyFile)
ont_submision.uploadFilePath = uploadFilePath
owl, sbo, user, contact = submission_dependent_objects("OBO", acronym, "test_linked_models", name)
@@ -807,7 +817,7 @@ def test_submission_parse_sbo
sub = LinkedData::Models::OntologySubmission.where(ontology: [ acronym: acronym ], submissionId: id).all
sub = sub[0]
- parse_options = {process_rdf: true, index_search: false, run_metrics: false, reasoning: true}
+ parse_options = {process_rdf: true, extract_metadata: false}
begin
tmp_log = Logger.new(TestLogFile.new)
sub.process_submission(tmp_log, parse_options)
@@ -817,8 +827,8 @@ def test_submission_parse_sbo
end
assert sub.ready?({status: [:uploaded, :rdf, :rdf_labels]})
page_classes = LinkedData::Models::Class.in(sub)
- .page(1,1000)
- .include(:prefLabel, :synonym).all
+ .page(1,1000)
+ .include(:prefLabel, :synonym).all
page_classes.each do |c|
if c.id.to_s == "http://purl.obolibrary.org/obo/SBO_0000004"
assert c.prefLabel == "modelling framework"
@@ -836,7 +846,6 @@ def test_submission_parse_sbo
end
end
- LinkedData::TestCase.backend_4s_delete
end
#ontology with import errors
@@ -846,7 +855,6 @@ def test_submission_parse_cno
ontologyFile = "./test/data/ontology_files/CNO_05.owl"
id = 10
- LinkedData::TestCase.backend_4s_delete
ont_submision = LinkedData::Models::OntologySubmission.new({ :submissionId => id,})
uploadFilePath = LinkedData::Models::OntologySubmission.copy_file_repository(acronym, id,ontologyFile)
@@ -865,7 +873,7 @@ def test_submission_parse_cno
sub = LinkedData::Models::OntologySubmission.where(ontology: [ acronym: acronym ], submissionId: id).all
sub = sub[0]
#this is the only ontology that indexes and tests for no error
- parse_options = {process_rdf: true, index_search: true, run_metrics: false, reasoning: true}
+ parse_options = {process_rdf: true, extract_metadata: false}
begin
tmp_log = Logger.new(TestLogFile.new)
sub.process_submission(tmp_log, parse_options)
@@ -879,12 +887,11 @@ def test_submission_parse_cno
sub.submissionStatus.select { |x| x.id.to_s["ERROR"] }.length == 0
LinkedData::Models::Class.where.in(sub)
- .include(:prefLabel, :notation, :prefixIRI).each do |cls|
+ .include(:prefLabel, :notation, :prefixIRI).each do |cls|
assert !cls.notation.nil? || !cls.prefixIRI.nil?
assert !cls.id.to_s.start_with?(":")
end
- LinkedData::TestCase.backend_4s_delete
end
#multiple preflables
@@ -966,9 +973,9 @@ def test_submission_parse_aero
assert count_headers > 2
page_classes = LinkedData::Models::Class.in(sub)
- .page(1,1000)
- .read_only
- .include(:prefLabel, :synonym, :definition).all
+ .page(1,1000)
+ .read_only
+ .include(:prefLabel, :synonym, :definition).all
page_classes.each do |c|
if c.id.to_s == "http://purl.obolibrary.org/obo/AERO_0000040"
assert c.prefLabel == "shaking finding"
@@ -988,7 +995,7 @@ def test_submission_parse_aero
#for indexing in search
paging = LinkedData::Models::Class.in(sub).page(1,100)
- .include(:unmapped)
+ .include(:unmapped)
page = nil
defs = 0
syns = 0
@@ -1010,8 +1017,7 @@ def test_submission_parse_aero
def test_submission_metrics
submission_parse("CDAOTEST", "CDAOTEST testing metrics",
"./test/data/ontology_files/cdao_vunknown.owl", 22,
- process_rdf: true, index_search: false,
- run_metrics: true, reasoning: true)
+ process_rdf: true, run_metrics: true, extract_metadata: false)
sub = LinkedData::Models::Ontology.find("CDAOTEST").first.latest_submission(status: [:rdf, :metrics])
sub.bring(:metrics)
@@ -1031,8 +1037,8 @@ def test_submission_metrics
submission_parse("BROTEST-METRICS", "BRO testing metrics",
"./test/data/ontology_files/BRO_v3.2.owl", 33,
- process_rdf: true, index_search: false,
- run_metrics: true, reasoning: true)
+ process_rdf: true, extract_metadata: false,
+ run_metrics: true)
sub = LinkedData::Models::Ontology.find("BROTEST-METRICS").first.latest_submission(status: [:rdf, :metrics])
sub.bring(:metrics)
@@ -1051,11 +1057,11 @@ def test_submission_metrics
metrics.bring_remaining
assert_instance_of LinkedData::Models::Metric, metrics
- assert_equal 486, metrics.classes
- assert_equal 63, metrics.properties
+ assert_includes [481, 486], metrics.classes # 486 if owlapi imports skos classes
+ assert_includes [63, 45], metrics.properties # 63 if owlapi imports skos properties
assert_equal 124, metrics.individuals
- assert_equal 14, metrics.classesWithOneChild
- assert_equal 474, metrics.classesWithNoDefinition
+ assert_includes [13, 14], metrics.classesWithOneChild # 14 if owlapi imports skos properties
+ assert_includes [473, 474], metrics.classesWithNoDefinition # 474 if owlapi imports skos properties
assert_equal 2, metrics.classesWithMoreThan25Children
assert_equal 65, metrics.maxChildCount
assert_equal 5, metrics.averageChildCount
@@ -1063,8 +1069,8 @@ def test_submission_metrics
submission_parse("BROTEST-ISFLAT", "BRO testing metrics flat",
"./test/data/ontology_files/BRO_v3.2.owl", 33,
- process_rdf: true, index_search: false,
- run_metrics: true, reasoning: true)
+ process_rdf: true, extract_metadata: false,
+ run_metrics: true)
sub = LinkedData::Models::Ontology.find("BROTEST-ISFLAT").first
.latest_submission(status: [:rdf, :metrics])
@@ -1073,12 +1079,12 @@ def test_submission_metrics
metrics.bring_remaining
#all the child metrics should be 0 since we declare it as flat
- assert_equal 486, metrics.classes
- assert_equal 63, metrics.properties
+ assert_includes [481, 486], metrics.classes # 486 if owlapi imports skos properties
+ assert_includes [63, 45], metrics.properties # 63 if owlapi imports skos properties
assert_equal 124, metrics.individuals
assert_equal 0, metrics.classesWithOneChild
#cause it has not the subproperty added
- assert_equal 474, metrics.classesWithNoDefinition
+ assert_includes [473, 474] , metrics.classesWithNoDefinition # 474 if owlapi imports skos properties
assert_equal 0, metrics.classesWithMoreThan25Children
assert_equal 0, metrics.maxChildCount
assert_equal 0, metrics.averageChildCount
@@ -1087,8 +1093,8 @@ def test_submission_metrics
#test UMLS metrics
acronym = 'UMLS-TST'
submission_parse(acronym, "Test UMLS Ontologory", "./test/data/ontology_files/umls_semantictypes.ttl", 1,
- process_rdf: true, index_search: false,
- run_metrics: true, reasoning: true)
+ process_rdf: true, extract_metadata: false,
+ run_metrics: true)
sub = LinkedData::Models::Ontology.find(acronym).first.latest_submission(status: [:rdf, :metrics])
sub.bring(:metrics)
metrics = sub.metrics
@@ -1101,10 +1107,12 @@ def test_submission_metrics
def test_submission_extract_metadata
2.times.each do |i|
submission_parse("AGROOE", "AGROOE Test extract metadata ontology",
- "./test/data/ontology_files/agrooeMappings-05-05-2016.owl", 1,
- process_rdf: true, index_search: false,
- run_metrics: true, reasoning: false)
- sub = LinkedData::Models::Ontology.find("AGROOE").first.latest_submission
+ "./test/data/ontology_files/agrooeMappings-05-05-2016.owl", i+1,
+ process_rdf: true, extract_metadata: true, generate_missing_labels: false)
+ ont = LinkedData::Models::Ontology.find("AGROOE").first
+ sub = ont.latest_submission
+ refute_nil sub
+
sub.bring_remaining
assert_equal false, sub.deprecated
assert_equal '2015-09-28', sub.creationDate.to_date.to_s
@@ -1115,9 +1123,10 @@ def test_submission_extract_metadata
assert_equal ["http://lexvo.org/id/iso639-3/fra", "http://lexvo.org/id/iso639-3/eng"].sort, sub.naturalLanguage.sort
#assert_equal ["Léontine Dessaiterm", "Anne Toulet", "Benjamine Dessay", "Augustine Doap", "Vincent Emonet"].sort, sub.hasContributor.sort
assert_equal [RDF::URI.new("http://lirmm.fr/2015/ontology/door-relation.owl"), RDF::URI.new("http://lirmm.fr/2015/ontology/dc-relation.owl"),
- RDF::URI.new("http://lirmm.fr/2015/ontology/dcterms-relation.owl"), RDF::URI.new("http://lirmm.fr/2015/ontology/voaf-relation.owl")].sort, sub.ontologyRelatedTo.sort
-
-
+ RDF::URI.new("http://lirmm.fr/2015/ontology/dcterms-relation.owl"),
+ RDF::URI.new("http://lirmm.fr/2015/ontology/voaf-relation.owl"),
+ RDF::URI.new("http://lirmm.fr/2015/ontology/void-import.owl")
+ ].sort, sub.ontologyRelatedTo.sort
sub.description = "test changed value"
sub.save
end
@@ -1129,8 +1138,7 @@ def test_submission_delete_remove_files
submission_parse("ONTOMATEST",
"OntoMA TEST",
"./test/data/ontology_files/OntoMA.1.1_vVersion_1.1_Date__11-2011.OWL", 15,
- process_rdf: true, index_search: true,
- run_metrics: false, reasoning: true)
+ process_rdf: false)
sub = LinkedData::Models::OntologySubmission.where(ontology: [acronym: "ONTOMATEST"],
submissionId: 15)
@@ -1141,4 +1149,4 @@ def test_submission_delete_remove_files
sub.delete
assert !Dir.exist?(data_folder)
end
-end
+end
\ No newline at end of file
diff --git a/test/models/test_ontology_submission_validators.rb b/test/models/test_ontology_submission_validators.rb
index 7d88c59b..05ebe6ff 100644
--- a/test/models/test_ontology_submission_validators.rb
+++ b/test/models/test_ontology_submission_validators.rb
@@ -5,7 +5,7 @@
class TestOntologySubmissionValidators < LinkedData::TestOntologyCommon
def test_enforce_symmetric_ontologies
- skip('skip new callbacks tests until reimplemented')
+ skip 'complex validators disabled'
ontologies_properties_callbacks(:ontologyRelatedTo)
end
@@ -18,7 +18,7 @@ def test_lexvo_language_validator
sub.bring_remaining
assert sub.valid?
- sub.naturalLanguage = ["fr" , "http://iso639-3/eng"]
+ sub.naturalLanguage = ["fr", "http://iso639-3/eng"]
refute sub.valid?
assert sub.errors[:naturalLanguage][:lexvo_language]
@@ -31,7 +31,8 @@ def test_lexvo_language_validator
# Regroup all validity test related to a submission retired status (deprecated, valid date)
def test_submission_retired_validity
- skip('skip new callbacks tests until reimplemented')
+ skip 'complex validators disabled'
+
sorted_submissions = sorted_submissions_init
latest = sorted_submissions.first
@@ -75,6 +76,7 @@ def test_submission_retired_validity
end
def test_modification_date_previous_align
+ skip 'complex validators disabled'
sorted_submissions = sorted_submissions_init
latest = sorted_submissions[0]
@@ -107,17 +109,21 @@ def test_modification_date_previous_align
end
def test_has_prior_version_callback
+ skip 'complex validators disabled'
+
sorted_submissions = sorted_submissions_init
sorted_submissions.each_cons(2) do |current, previous|
current.bring :hasPriorVersion
+ refute_nil current.hasPriorVersion
assert previous.id, current.hasPriorVersion
end
end
def test_update_submissions_has_part
- skip('skip new callbacks tests until reimplemented')
+ skip 'complex validators disabled'
+
ont_count, ont_acronyms, ontologies =
create_ontologies_and_submissions(ont_count: 3, submission_count: 1,
process_submission: false, acronym: 'NCBO-545')
@@ -169,6 +175,7 @@ def test_update_submissions_has_part
end
def test_inverse_use_imports_callback
+ skip 'complex validators disabled'
ontologies_properties_callbacks(:useImports, :usedBy)
end
@@ -188,7 +195,6 @@ def sorted_submissions_init(submission_count = 3)
ont.submissions.sort { |a, b| b.submissionId <=> a.submissionId }
end
-
def ontologies_properties_callbacks(attr, inverse_attr = nil)
skip('skip new callbacks tests until reimplemented')
inverse_attr = attr unless inverse_attr
@@ -196,7 +202,6 @@ def ontologies_properties_callbacks(attr, inverse_attr = nil)
create_ontologies_and_submissions(ont_count: 3, submission_count: 1,
process_submission: false, acronym: 'NCBO-545')
-
assert_equal 3, ontologies.size
ontologies[0].bring :submissions
@@ -207,7 +212,7 @@ def ontologies_properties_callbacks(attr, inverse_attr = nil)
assert_empty first_sub.send(attr)
first_sub.bring_remaining
- first_sub.send( "#{attr}=",[ontologies[1].id, ontologies[2].id])
+ first_sub.send("#{attr}=", [ontologies[1].id, ontologies[2].id])
assert first_sub.valid?
@@ -221,7 +226,7 @@ def ontologies_properties_callbacks(attr, inverse_attr = nil)
assert_equal [ontologies[0].id], sub.send(inverse_attr)
end
- #sub is the submission of the ontology 2
+ # sub is the submission of the ontology 2
sub.bring_remaining
sub.send("#{inverse_attr}=", [])
sub.save
diff --git a/test/models/test_project.rb b/test/models/test_project.rb
index 00fb7a79..1081732e 100644
--- a/test/models/test_project.rb
+++ b/test/models/test_project.rb
@@ -90,7 +90,7 @@ def test_project_creator_multiple
users = Array.new(3) { LinkedData::Models::User.new }
users.each_with_index do |user, i|
user.username = "Test User #{i}"
- user.email = 'test_user@example.org'
+ user.email = "test_user#{i}@example.org"
user.password = 'password'
user.save
assert user.valid?, user.errors
diff --git a/test/models/test_provisional_class.rb b/test/models/test_provisional_class.rb
index 75e52cba..c0ab44cf 100644
--- a/test/models/test_provisional_class.rb
+++ b/test/models/test_provisional_class.rb
@@ -3,12 +3,11 @@
class TestProvisionalClass < LinkedData::TestOntologyCommon
def self.before_suite
- self._delete
-
@@user = LinkedData::Models::User.new({username: "Test User", email: "tester@example.org", password: "password"})
@@user.save
- ont_count, ont_names, ont_models = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 1, submission_count: 1)
+ ont_count, ont_names, ont_models = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 1,
+ submission_count: 1)
@@ontology = ont_models.first
@@ontology.bring(:name)
@@ontology.bring(:acronym)
@@ -21,16 +20,16 @@ def self.before_suite
def self.after_suite
pc = LinkedData::Models::ProvisionalClass.find(@@provisional_class.id).first
pc.delete unless pc.nil?
- LinkedData::Models::Ontology.indexClear
- LinkedData::Models::Ontology.indexCommit
- end
- def self._delete
+ LinkedData::Models::ProvisionalClass.indexClear
+ LinkedData::Models::ProvisionalClass.indexCommit
LinkedData::SampleData::Ontology.delete_ontologies_and_submissions
user = LinkedData::Models::User.find("Test User").first
user.delete unless user.nil?
+
end
+
def test_provisional_class_lifecycle
label = "Test Provisional Class Lifecycle"
pc = LinkedData::Models::ProvisionalClass.new({label: label, :creator => @@user})
@@ -71,7 +70,7 @@ def test_provisional_class_retrieval
pc_array = Array.new(3) { LinkedData::Models::ProvisionalClass.new }
pc_array.each_with_index do |pc, i|
pc.label = "Test PC #{i}"
- pc.creator = LinkedData::Models::User.new({username: creators[i], email: "tester@example.org", password: "password"}).save
+ pc.creator = LinkedData::Models::User.new({username: creators[i], email: "tester#{i}@example.org", password: "password"}).save
pc.save
assert pc.valid?, "#{pc.errors}"
end
@@ -91,7 +90,7 @@ def test_provisional_class_retrieval
def test_provisional_class_filter_by_creator
username = "User Testing Filtering"
- user = LinkedData::Models::User.new({username: username, email: "tester@example.org", password: "password"})
+ user = LinkedData::Models::User.new({username: username, email: "tester#{rand}@example.org", password: "password"})
user.save
assert user.valid?, "#{user.errors}"
@@ -286,13 +285,13 @@ def test_provisional_class_search_indexing
pc = @@provisional_class
pc.ontology = @@ontology
pc.unindex
- resp = LinkedData::Models::Ontology.search("\"#{pc.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc.label}\"", params)
assert_equal 0, resp["response"]["numFound"]
pc.index
- resp = LinkedData::Models::Ontology.search("\"#{pc.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc.label}\"", params)
assert_equal 1, resp["response"]["numFound"]
- assert_equal pc.label, resp["response"]["docs"][0]["prefLabel"]
+ assert_equal pc.label, resp["response"]["docs"][0]["prefLabel"].first
pc.unindex
acr = "CSTPROPS"
@@ -313,18 +312,18 @@ def test_provisional_class_search_indexing
pc3.save
pc3 = LinkedData::Models::ProvisionalClass.find(pc3.id).include(:label).first
- resp = LinkedData::Models::Ontology.search("\"#{pc1.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc1.label}\"", params)
assert_equal 1, resp["response"]["numFound"]
- assert_equal pc1.label, resp["response"]["docs"][0]["prefLabel"]
+ assert_equal pc1.label, resp["response"]["docs"][0]["prefLabel"].first
par_len = resp["response"]["docs"][0]["parents"].length
assert_equal 5, par_len
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == class_id.to_s }).length
- resp = LinkedData::Models::Ontology.search("\"#{pc2.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc2.label}\"", params)
assert_equal par_len + 1, resp["response"]["docs"][0]["parents"].length
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == pc1.id.to_s }).length
- resp = LinkedData::Models::Ontology.search("\"#{pc3.label}\"", params)
+ resp = LinkedData::Models::ProvisionalClass.search("\"#{pc3.label}\"", params)
assert_equal par_len + 2, resp["response"]["docs"][0]["parents"].length
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == pc1.id.to_s }).length
assert_equal 1, (resp["response"]["docs"][0]["parents"].select { |x| x == pc2.id.to_s }).length
diff --git a/test/models/test_provisional_relation.rb b/test/models/test_provisional_relation.rb
index 43add0cf..78839f75 100644
--- a/test/models/test_provisional_relation.rb
+++ b/test/models/test_provisional_relation.rb
@@ -2,52 +2,45 @@
class TestProvisionalRelation < LinkedData::TestCase
- def setup
- _delete
-
- @user = LinkedData::Models::User.new({username: "Test User", email: "tester@example.org", password: "password"})
- assert @user.valid?, "Invalid User object #{@user.errors}"
- @user.save
-
- ont_count, ont_names, ont_models = create_ontologies_and_submissions(ont_count: 1, submission_count: 1, process_submission: true)
- @ontology = ont_models.first
- @ontology.bring(:name)
- @ontology.bring(:acronym)
- @submission = @ontology.bring(:submissions).submissions.first
-
- @provisional_class = LinkedData::Models::ProvisionalClass.new({label: "Test Provisional Class", creator: @user, ontology: @ontology})
- assert @provisional_class.valid?, "Invalid ProvisionalClass object #{@provisional_class.errors}"
- @provisional_class.save
-
- @relation_type = RDF::IRI.new "http://www.w3.org/2004/02/skos/core#exactMatch"
-
- @target_class_uri1 = RDF::IRI.new "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Image_Algorithm"
- @target_class_uri2 = RDF::IRI.new "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Integration_and_Interoperability_Tools"
-
- @provisional_rel1 = LinkedData::Models::ProvisionalRelation.new({creator: @user, source: @provisional_class, relationType: @relation_type, targetClassId: @target_class_uri1, targetClassOntology: @ontology})
- assert @provisional_rel1.valid?, "Invalid ProvisionalRelation object #{@provisional_rel1.errors}"
- @provisional_rel1.save
- @provisional_rel2 = LinkedData::Models::ProvisionalRelation.new({creator: @user, source: @provisional_class, relationType: @relation_type, targetClassId: @target_class_uri2, targetClassOntology: @ontology})
- assert @provisional_rel2.valid?, "Invalid ProvisionalRelation object #{@provisional_rel2.errors}"
- @provisional_rel2.save
- end
+ def self.before_suite
+ @@user = LinkedData::Models::User.new({username: "Test User", email: "tester@example.org", password: "password"})
+ @@user.save
+
+ ont_count, ont_names, ont_models = self.new('').create_ontologies_and_submissions(ont_count: 1, submission_count: 1,
+ process_submission: true,
+ process_options: {process_rdf: true,
+ extract_metadata: false})
+
+ @@ontology = ont_models.first
+ @@ontology.bring(:name)
+ @@ontology.bring(:acronym)
+ @@submission = @@ontology.bring(:submissions).submissions.first
+
+ @@provisional_class = LinkedData::Models::ProvisionalClass.new({label: "Test Provisional Class", creator: @@user, ontology: @@ontology})
+ @@provisional_class.save
+
+ @@relation_type = RDF::IRI.new "http://www.w3.org/2004/02/skos/core#exactMatch"
- def _delete
- delete_ontologies_and_submissions
- user = LinkedData::Models::User.find("Test User").first
- user.delete unless user.nil?
+ @@target_class_uri1 = RDF::IRI.new "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Image_Algorithm"
+ @@target_class_uri2 = RDF::IRI.new "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Integration_and_Interoperability_Tools"
+
+ @@provisional_rel1 = LinkedData::Models::ProvisionalRelation.new({creator: @@user, source: @@provisional_class, relationType: @@relation_type, targetClassId: @@target_class_uri1, targetClassOntology: @@ontology})
+ @@provisional_rel1.save
+ @@provisional_rel2 = LinkedData::Models::ProvisionalRelation.new({creator: @@user, source: @@provisional_class, relationType: @@relation_type, targetClassId: @@target_class_uri2, targetClassOntology: @@ontology})
+ @@provisional_rel2.save
end
+
def test_create_provisional_relation
- rel1 = LinkedData::Models::ProvisionalRelation.find(@provisional_rel1.id).first
+ rel1 = LinkedData::Models::ProvisionalRelation.find(@@provisional_rel1.id).first
rel1.bring_remaining
assert rel1.valid?
- rel2 = LinkedData::Models::ProvisionalRelation.find(@provisional_rel2.id).first
+ rel2 = LinkedData::Models::ProvisionalRelation.find(@@provisional_rel2.id).first
rel2.bring_remaining
assert rel2.valid?
- pc = LinkedData::Models::ProvisionalClass.find(@provisional_class.id).first
+ pc = LinkedData::Models::ProvisionalClass.find(@@provisional_class.id).first
pc.bring(:relations)
assert_equal 2, pc.relations.length
@@ -58,32 +51,23 @@ def test_create_provisional_relation
end
def test_find_unique_provisional_relation
- rel = LinkedData::Models::ProvisionalRelation.find_unique(@provisional_class.id, @relation_type, @target_class_uri1, @ontology.id)
- assert_equal @provisional_rel1.id, rel.id
+ rel = LinkedData::Models::ProvisionalRelation.find_unique(@@provisional_class.id, @@relation_type, @@target_class_uri1, @@ontology.id)
+ assert_equal @@provisional_rel1.id, rel.id
end
def test_equals
- rel = LinkedData::Models::ProvisionalRelation.find_unique(@provisional_class.id, @relation_type, @target_class_uri1, @ontology.id)
- assert @provisional_rel1 == rel
+ rel = LinkedData::Models::ProvisionalRelation.find_unique(@@provisional_class.id, @@relation_type, @@target_class_uri1, @@ontology.id)
+ assert @@provisional_rel1 == rel
end
def test_target_class
- target_class = @provisional_rel1.target_class
- assert_equal @target_class_uri1, target_class.id
+ target_class = @@provisional_rel1.target_class
+ assert_equal @@target_class_uri1, target_class.id
target_class.bring(:submission) if target_class.bring?(:submission)
target_class.submission.bring(ontology: [:acronym]) if target_class.submission.bring?(:ontology)
- assert_equal @ontology.acronym, target_class.submission.ontology.acronym
+ assert_equal @@ontology.acronym, target_class.submission.ontology.acronym
end
- def teardown
- super
- @provisional_class.delete
- @provisional_rel1.delete
- @provisional_rel2.delete
- rel1 = LinkedData::Models::ProvisionalRelation.find(@provisional_rel1.id).first
- assert_nil rel1
- rel2 = LinkedData::Models::ProvisionalRelation.find(@provisional_rel2.id).first
- assert_nil rel2
- end
+
end
\ No newline at end of file
diff --git a/test/models/test_resource.rb b/test/models/test_resource.rb
new file mode 100644
index 00000000..b409ddb1
--- /dev/null
+++ b/test/models/test_resource.rb
@@ -0,0 +1,292 @@
+require_relative "../test_case"
+require_relative './test_ontology_common'
+
+class TestResource < LinkedData::TestOntologyCommon
+
+ def self.before_suite
+ LinkedData::TestCase.backend_4s_delete
+
+ # Example
+ data = %(
+ .
+ "John Doe" .
+ "30"^^ .
+ "male" .
+ .
+ .
+ _:blanknode1 .
+ _:blanknode2 .
+ _:blanknode1 "Jane Smith" .
+ _:blanknode1 "25"^^ .
+ _:blanknode1 "female" .
+ _:blanknode1 .
+ _:blanknode2 "Jane Smith 2" .
+ "Hiking" .
+ "Cooking" .
+
+ .
+ "Alice Cooper" .
+ "35"^^ .
+ "female" .
+ .
+ _:skill1, _:skill2 .
+ _:skill1 "Programming" .
+ _:skill1 _:skill2 .
+ _:skill2 "Data Analysis" .
+ _:skill2 .
+ "Hiking" .
+ "Cooking" .
+ "Photography" .
+
+ .
+ .
+ .
+
+ )
+
+ graph = "http://example.org/test_graph"
+ Goo.sparql_data_client.execute_append_request(graph, data, '')
+
+ # instance the resource model
+ @@resource1 = LinkedData::Models::Resource.new("http://example.org/test_graph", "http://example.org/person1")
+ end
+
+ def self.after_suite
+ Goo.sparql_data_client.delete_graph("http://example.org/test_graph")
+ Goo.sparql_data_client.delete_graph("http://data.bioontology.org/ontologies/TEST-TRIPLES/submissions/2")
+ @resource1&.destroy
+ end
+
+ def test_generate_model
+ @object = @@resource1.to_object
+ @model = @object.class
+
+ assert_equal LinkedData::Models::Base, @model.ancestors[1]
+
+ @model.model_settings[:attributes].map do |property, val|
+ property_url = "#{val[:property]}#{property}"
+ assert_includes @@resource1.to_hash.keys, property_url
+
+ hash_value = @@resource1.to_hash[property_url]
+ object_value = @object.send(property.to_sym)
+ if property.to_sym == :knows
+ assert_equal hash_value.map{|x| x.is_a?(Hash) ? x.values : x}.flatten.map(&:to_s).sort,
+ object_value.map{|x| x.is_a?(String) ? x : x.to_h.values}.flatten.map(&:to_s).sort
+ else
+ assert_equal Array(hash_value).map(&:to_s), Array(object_value).map(&:to_s)
+ end
+ end
+
+ assert_equal "http://example.org/person1", @object.id.to_s
+
+ assert_equal Goo.namespaces[:foaf][:Person].to_s, @model.type_uri.to_s
+ end
+
+ def test_resource_fetch_related_triples
+ result = @@resource1.to_hash
+ assert_instance_of Hash, result
+
+ refute_empty result
+
+ expected_result = {
+ "id" => "http://example.org/person1",
+ "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" => "http://xmlns.com/foaf/0.1/Person",
+ "http://xmlns.com/foaf/0.1/gender" => "male",
+ "http://xmlns.com/foaf/0.1/hasInterest" => %w[Cooking Hiking],
+ "http://xmlns.com/foaf/0.1/age" => "30",
+ "http://xmlns.com/foaf/0.1/email" => "mailto:john@example.com",
+ "http://xmlns.com/foaf/0.1/knows" =>
+ ["http://example.org/person3",
+ {
+ "http://xmlns.com/foaf/0.1/gender" => "female",
+ "http://xmlns.com/foaf/0.1/age" => "25",
+ "http://xmlns.com/foaf/0.1/email" => "mailto:jane@example.com",
+ "http://xmlns.com/foaf/0.1/name" => "Jane Smith"
+ },
+ {
+ "http://xmlns.com/foaf/0.1/name" => "Jane Smith 2"
+ }
+ ],
+ "http://xmlns.com/foaf/0.1/name" => "John Doe",
+ "reverse" => {
+ "http://example2.org/person2" => "http://xmlns.com/foaf/0.1/mother",
+ "http://example2.org/person5" => ["http://xmlns.com/foaf/0.1/brother", "http://xmlns.com/foaf/0.1/friend"]
+ }
+ }
+ result = JSON.parse(MultiJson.dump(result))
+ a = sort_nested_hash(result)
+ b = sort_nested_hash(expected_result)
+ assert_equal b, a
+ end
+
+ def test_resource_serialization_json
+ result = @@resource1.to_json
+
+ refute_empty result
+ expected_result = %(
+ {
+ "@context": {"ns0": "http://example.org/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "foaf": "http://xmlns.com/foaf/0.1/", "ns1": "http://example2.org/"},
+ "@graph": [
+ {
+ "@id": "ns0:person1",
+ "@type": "foaf:Person",
+ "foaf:name": "John Doe",
+ "foaf:age": {"@type": "http://www.w3.org/2001/XMLSchema#integer", "@value": "30"},
+ "foaf:email": {"@id": "mailto:john@example.com"},
+ "foaf:gender": "male",
+ "foaf:hasInterest": ["Cooking", "Hiking"],
+ "foaf:knows": [{"@id": "ns0:person3"}, {"@id": "_:g445960"}, {"@id": "_:g445980"}]
+ },
+ {
+ "@id": "_:g445960",
+ "foaf:name": "Jane Smith",
+ "foaf:age": {"@type": "http://www.w3.org/2001/XMLSchema#integer", "@value": "25"},
+ "foaf:email": {"@id": "mailto:jane@example.com"},
+ "foaf:gender": "female"
+ },
+ {"@id": "_:g445980", "foaf:name": "Jane Smith 2"},
+ {"@id": "ns1:person5", "foaf:friend": {"@id": "ns0:person1"}, "foaf:brother": {"@id": "ns0:person1"}},
+ {"@id": "ns1:person2", "foaf:mother": {"@id": "ns0:person1"}}
+ ]
+ }
+ )
+ result = JSON.parse(result.gsub(' ', '').gsub("\n", '').gsub(/_:g\d+/, 'blanke_nodes'))
+ expected_result = JSON.parse(expected_result.gsub(' ', '').gsub("\n", '').gsub(/_:g\d+/, 'blanke_nodes'))
+
+ a = sort_nested_hash(result)
+ b = sort_nested_hash(expected_result)
+
+ assert_equal b, a
+ end
+
+ def test_resource_serialization_xml
+ result = @@resource1.to_xml
+
+ refute_empty result
+ expected_result = %(
+
+
+ male
+ Cooking
+ Hiking
+ 30
+
+
+
+
+ female
+ 25
+
+ Jane Smith
+
+
+
+
+ Jane Smith 2
+
+
+ John Doe
+
+
+
+
+
+
+
+
+
+ )
+ a = result.gsub(' ', '').gsub(/rdf:nodeID="[^"]*"/, '').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').gsub(/rdf:nodeID="[^"]*"/, '').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ def test_resource_serialization_ntriples
+ result = @@resource1.to_ntriples
+
+ refute_empty result
+
+ expected_result = %(
+ .
+ "male" .
+ "Cooking" .
+ "Hiking" .
+ "30"^^ .
+ .
+ .
+ _:g445940 "female" .
+ _:g445940 "25"^^ .
+ _:g445940 .
+ _:g445940 "Jane Smith" .
+ _:g445940 .
+ _:g445960 "Jane Smith 2" .
+ _:g445960 .
+ "John Doe" .
+ .
+ .
+ .
+ )
+ a = result.gsub(' ', '').gsub(/_:g\d+/, 'blanke_nodes').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').gsub(/_:g\d+/, 'blanke_nodes').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ def test_resource_serialization_turtle
+ result = @@resource1.to_turtle
+ refute_empty result
+ expected_result = %(
+ @prefix rdf: .
+ @prefix ns0: .
+ @prefix foaf: .
+ @prefix ns1: .
+
+ ns0:person1
+ a foaf:Person ;
+ foaf:age 30 ;
+ foaf:email ;
+ foaf:gender "male" ;
+ foaf:hasInterest "Cooking", "Hiking" ;
+ foaf:knows ns0:person3, [
+ foaf:age 25 ;
+ foaf:email ;
+ foaf:gender "female" ;
+ foaf:name "Jane Smith"
+ ], [
+ foaf:name "Jane Smith 2"
+ ] ;
+ foaf:name "John Doe" .
+
+ ns1:person2
+ foaf:mother ns0:person1 .
+
+ ns1:person5
+ foaf:brother ns0:person1 ;
+ foaf:friend ns0:person1 .
+ )
+ a = result.gsub(' ', '').split("\n").reject(&:empty?)
+ b = expected_result.gsub(' ', '').split("\n").reject(&:empty?)
+
+ assert_equal b.sort, a.sort
+ end
+
+ private
+
+ def sort_nested_hash(hash)
+ sorted_hash = {}
+
+ hash.each do |key, value|
+ if value.is_a?(Hash)
+ sorted_hash[key] = sort_nested_hash(value)
+ elsif value.is_a?(Array)
+ sorted_hash[key] = value.map { |item| item.is_a?(Hash) ? sort_nested_hash(item) : item }.sort_by { |item| item.to_s }
+ else
+ sorted_hash[key] = value
+ end
+ end
+
+ sorted_hash.sort.to_h
+ end
+
+end
\ No newline at end of file
diff --git a/test/models/test_search.rb b/test/models/test_search.rb
new file mode 100644
index 00000000..0051539a
--- /dev/null
+++ b/test/models/test_search.rb
@@ -0,0 +1,198 @@
+require_relative '../test_case'
+
+class TestSearch < LinkedData::TestCase
+
+ def self.after_suite
+ backend_4s_delete
+ LinkedData::Models::Ontology.indexClear
+ LinkedData::Models::Agent.indexClear
+ Goo.search_client(:ontology_data)&.clear_all_data
+ end
+
+ def setup
+ self.class.after_suite
+ end
+
+ def test_search_ontology
+ ont_count, ont_acronyms, created_ontologies = create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: {
+ process_rdf: true,
+ generate_missing_labels: false,
+ extract_metadata: false, run_metrics: true },
+ acronym: 'BROTEST',
+ name: 'ontTEST Bla',
+ file_path: '../../../../test/data/ontology_files/BRO_v3.2.owl',
+ ont_count: 2,
+ submission_count: 2
+ })
+
+ ontologies = LinkedData::Models::Ontology.search('*:*', { fq: 'resource_model: "ontology"' })['response']['docs']
+
+ assert_equal 2, ontologies.size
+ ontologies.each do |ont|
+ select_ont = created_ontologies.select { |ont_created| ont_created.id.to_s.eql?(ont['id']) }.first
+ refute_nil select_ont
+ select_ont.bring_remaining
+ assert_equal ont['name_text'], select_ont.name
+ assert_equal ont['acronym_text'], select_ont.acronym
+ assert_equal ont['viewingRestriction_t'], select_ont.viewingRestriction
+ assert_equal ont['ontologyType_t'], select_ont.ontologyType.id
+ end
+
+ submissions = LinkedData::Models::Ontology.search('*:*', { fq: 'resource_model: "ontology_submission"' })['response']['docs']
+ assert_equal 4, submissions.size
+ submissions.each do |sub|
+ created_sub = LinkedData::Models::OntologySubmission.find(RDF::URI.new(sub['id'])).first&.bring_remaining
+ refute_nil created_sub
+ assert_equal sub['description_text'], created_sub.description
+ assert_equal sub['submissionId_i'], created_sub.submissionId
+ assert_equal sub['URI_text'], created_sub.URI
+ assert_equal sub['status_t'], created_sub.status
+ assert_equal sub['deprecated_b'], created_sub.deprecated
+ assert_equal sub['hasOntologyLanguage_t'], created_sub.hasOntologyLanguage.id.to_s
+ assert_equal sub['released_dt'], created_sub.released.utc.strftime('%Y-%m-%dT%H:%M:%SZ')
+ assert_equal sub['creationDate_dt'], created_sub.creationDate.utc.strftime('%Y-%m-%dT%H:%M:%SZ')
+ assert_equal(sub['contact_txt'], created_sub.contact.map { |x| x.bring_remaining.embedded_doc })
+ assert_equal sub['dataDump_t'], created_sub.dataDump
+ assert_equal sub['csvDump_t'], created_sub.csvDump
+ assert_equal sub['uriLookupEndpoint_t'], created_sub.uriLookupEndpoint
+ assert_equal sub['openSearchDescription_t'], created_sub.openSearchDescription
+ assert_equal sub['endpoint_txt'], created_sub.endpoint
+ assert_equal sub['uploadFilePath_t'], created_sub.uploadFilePath
+ assert_equal sub['submissionStatus_txt'].sort, created_sub.submissionStatus.map { |x| x.id.to_s }.sort
+
+ created_sub.metrics.bring_remaining
+
+ assert_equal sub['metrics_classes_i'], created_sub.metrics.classes
+ assert_equal sub['metrics_individuals_i'], created_sub.metrics.individuals
+ assert_equal sub['metrics_properties_i'], created_sub.metrics.properties
+ assert_equal sub['metrics_maxDepth_i'], created_sub.metrics.maxDepth
+ assert_equal sub['metrics_maxChildCount_i'], created_sub.metrics.maxChildCount
+ assert_equal sub['metrics_averageChildCount_i'], created_sub.metrics.averageChildCount
+ assert_equal sub['metrics_classesWithOneChild_i'], created_sub.metrics.classesWithOneChild
+ assert_equal sub['metrics_classesWithMoreThan25Children_i'], created_sub.metrics.classesWithMoreThan25Children
+ assert_equal sub['metrics_classesWithNoDefinition_i'], created_sub.metrics.classesWithNoDefinition
+
+ embed_doc = created_sub.ontology.bring_remaining.embedded_doc
+ embed_doc.each do |k, v|
+ if v.is_a?(Array)
+ assert_equal v, Array(sub["ontology_#{k}"])
+ else
+ assert_equal v, sub["ontology_#{k}"]
+ end
+ end
+ end
+ end
+
+ def test_search_agents
+ @@user1 = LinkedData::Models::User.new(:username => 'user111221', :email => 'some111221@email.org')
+ @@user1.passwordHash = 'some random pass hash'
+ @@user1.save
+
+ @agents = [
+ LinkedData::Models::Agent.new(name: 'name 0', email: 'test_0@test.com', agentType: 'organization', creator: @@user1),
+ LinkedData::Models::Agent.new(name: 'name 1', email: 'test_1@test.com', agentType: 'organization', creator: @@user1),
+ LinkedData::Models::Agent.new(name: 'name 2', email: 'test_2@test.com', agentType: 'person', creator: @@user1)
+ ]
+ @identifiers = [
+ LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ROR', creator: @@user1),
+ LinkedData::Models::AgentIdentifier.new(notation: '000h6jb29', schemaAgency: 'ORCID', creator: @@user1),
+ ]
+
+ @identifiers.each { |i| i.save }
+ affiliations = @agents[0..1].map { |a| a.save }
+ agent = @agents.last
+ agent.affiliations = affiliations
+
+ agent.identifiers = @identifiers
+ agent.save
+
+ agents = LinkedData::Models::Agent.search('*:*')['response']['docs']
+
+ assert_equal 3, agents.size
+ agents.each do |a|
+ select_agent = @agents.select { |agent_created| agent_created.id.to_s.eql?(a['id']) }.first
+ refute_nil select_agent
+ select_agent.bring_remaining
+
+ assert_equal a['name_text'], select_agent.name
+ assert_equal a['email_text'], select_agent.email
+ assert_equal a['agentType_t'], select_agent.agentType
+ assert_equal(a['affiliations_txt'], select_agent.affiliations&.map { |x| x.bring_remaining.embedded_doc })
+ assert_equal(a['identifiers_texts'], select_agent.identifiers&.map { |x| x.bring_remaining.embedded_doc })
+ assert_equal a['creator_t'], select_agent.creator.bring_remaining.embedded_doc
+ end
+
+ @identifiers.each { |i| i.delete }
+ @agents.each { |a| a.delete }
+ @@user1.delete
+ end
+
+ def test_search_ontology_data
+ ont_count, ont_acronyms, created_ontologies = create_ontologies_and_submissions({
+ process_submission: true,
+ process_options: {
+ process_rdf: true,
+ extract_metadata: false,
+ generate_missing_labels: false,
+ index_all_data: true
+ },
+ acronym: 'BROTEST',
+ name: 'ontTEST Bla',
+ file_path: 'test/data/ontology_files/thesaurusINRAE_nouv_structure.skos',
+ ont_count: 1,
+ submission_count: 1,
+ ontology_format: 'SKOS'
+ })
+ ont_sub = LinkedData::Models::Ontology.find('BROTEST-0').first
+ ont_sub = ont_sub.latest_submission
+
+ refute_empty(ont_sub.submissionStatus.select { |x| x.id['INDEXED_ALL_DATA'] })
+
+ conn = Goo.search_client(:ontology_data)
+ response = conn.search('*')
+
+ count = Goo.sparql_query_client.query("SELECT (COUNT( DISTINCT ?id) as ?c) FROM <#{ont_sub.id}> WHERE {?id ?p ?v}")
+ .first[:c]
+ .to_i
+
+ assert_includes [count, count+1], response['response']['numFound']
+
+ response = conn.search('*', fq: ' resource_id:"http://opendata.inrae.fr/thesaurusINRAE/c_10065"')
+
+ assert_equal 1, response['response']['numFound']
+ doc = response['response']['docs'].first
+
+ expected_doc = {
+ 'id' => 'http://opendata.inrae.fr/thesaurusINRAE/c_10065_BROTEST-0',
+ 'submission_id_t' => 'http://data.bioontology.org/ontologies/BROTEST-0/submissions/1',
+ 'ontology_t' => 'BROTEST-0',
+ 'resource_id' => 'http://opendata.inrae.fr/thesaurusINRAE/c_10065',
+ 'type_txt' => %w[http://www.w3.org/2004/02/skos/core#Concept http://www.w3.org/2002/07/owl#NamedIndividual],
+ 'http___www.w3.org_2004_02_skos_core_inScheme_txt' => %w[http://opendata.inrae.fr/thesaurusINRAE/thesaurusINRAE http://opendata.inrae.fr/thesaurusINRAE/mt_53],
+ 'http___www.w3.org_2004_02_skos_core_broader_t' => 'http://opendata.inrae.fr/thesaurusINRAE/c_9937',
+ 'http___www.w3.org_2004_02_skos_core_altLabel_txt' => ['GMO food',
+ 'aliment transgénique',
+ 'aliment OGM',
+ 'transgenic food'],
+ 'http___www.w3.org_2004_02_skos_core_prefLabel_txt' => ['genetically modified food',
+ 'aliment génétiquement modifié'],
+ 'resource_model' => 'ontology_submission'
+ }
+
+ doc.delete('_version_')
+
+ assert_equal expected_doc['id'], doc['id']
+ assert_equal expected_doc['submission_id_t'], doc['submission_id_t']
+ assert_equal expected_doc['ontology_t'], doc['ontology_t']
+ assert_equal expected_doc['resource_id'], doc['resource_id']
+ assert_equal expected_doc['type_txt'].sort, doc['type_txt'].sort
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_inScheme_txt'].sort, doc['http___www.w3.org_2004_02_skos_core_inScheme_txt'].sort
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_broader_t'], doc['http___www.w3.org_2004_02_skos_core_broader_t']
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_altLabel_txt'].sort, doc['http___www.w3.org_2004_02_skos_core_altLabel_txt'].sort
+ assert_equal expected_doc['http___www.w3.org_2004_02_skos_core_prefLabel_txt'].sort, doc['http___www.w3.org_2004_02_skos_core_prefLabel_txt'].sort
+ assert_equal expected_doc['resource_model'], doc['resource_model']
+
+ end
+end
diff --git a/test/models/test_skos_submission.rb b/test/models/test_skos_submission.rb
index c754dc79..5a8afda1 100644
--- a/test/models/test_skos_submission.rb
+++ b/test/models/test_skos_submission.rb
@@ -43,7 +43,7 @@ def test_roots_no_main_scheme
roots.each do |root|
q_broader = <<-eos
SELECT ?children WHERE {
- ?children #{RDF::SKOS[:broader].to_ntriples} #{root.id.to_ntriples} }
+ ?children #{RDF::Vocab::SKOS[:broader].to_ntriples} #{root.id.to_ntriples} }
eos
children_query = []
Goo.sparql_query_client.query(q_broader).each_solution do |sol|
@@ -92,7 +92,7 @@ def test_roots_of_multiple_scheme
roots.each do |r|
selected_schemes = r.inScheme.select { |s| concept_schemes.include?(s) }
refute_empty selected_schemes
- assert_equal r.isInActiveScheme, selected_schemes
+ assert_equal r.isInActiveScheme.sort, selected_schemes.sort
assert_equal r.isInActiveCollection, []
end
roots = roots.map { |r| r.id.to_s } unless roots.nil?
diff --git a/test/models/user/test_user_oauth.rb b/test/models/user/test_user_oauth.rb
new file mode 100644
index 00000000..41eb6eb5
--- /dev/null
+++ b/test/models/user/test_user_oauth.rb
@@ -0,0 +1,80 @@
+require_relative '../../test_case'
+
+class TestUserOAuthAuthentication < LinkedData::TestCase
+
+ def self.before_suite
+ @@fake_responses = {
+ github: {
+ id: 123456789,
+ login: 'github_user',
+ email: 'github_user@example.com',
+ name: 'GitHub User',
+ avatar_url: 'https://avatars.githubusercontent.com/u/123456789'
+ },
+ google: {
+ sub: 'google_user_id',
+ email: 'google_user@example.com',
+ name: 'Google User',
+ given_name: 'Google',
+ family_name: 'User',
+ picture: 'https://lh3.googleusercontent.com/a-/user-profile-image-url'
+ },
+ orcid: {
+ orcid: '0000-0002-1825-0097',
+ email: 'orcid_user@example.com',
+ name: {
+ "family-name": 'ORCID',
+ "given-names": 'User'
+ }
+ }
+ }
+ end
+
+
+ def test_authentication_new_users
+ users = []
+
+ @@fake_responses.each do |provider, data|
+ WebMock.stub_request(:get, LinkedData::Models::User.oauth_providers[provider][:link])
+ .to_return(status: 200, body: data.to_json, headers: { 'Content-Type' => 'application/json' })
+ user = LinkedData::Models::User.oauth_authenticate('fake token', provider)
+ refute_nil user
+ assert user.is_a?(LinkedData::Models::User)
+ assert_equal user.email, data[:email]
+ users << user
+ end
+
+ users.each(&:delete)
+ end
+
+ def test_authentication_existent_users
+ users = []
+ @@fake_responses.each do |provider, data|
+ user_hash = LinkedData::Models::User.send("user_from_#{provider}_data", data.stringify_keys)
+
+ user = LinkedData::Models::User.new(user_hash)
+ user.githubId = nil
+ user.orcidId = nil
+ user.password = 'password'
+
+ assert user.valid?
+
+ user.save
+
+ WebMock.stub_request(:get, LinkedData::Models::User.oauth_providers[provider][:link])
+ .to_return(status: 200, body: data.to_json, headers: { 'Content-Type' => 'application/json' })
+ auth_user = LinkedData::Models::User.oauth_authenticate('fake token', provider)
+
+ assert_equal auth_user.id, user.id
+
+ if provider.eql?(:github)
+ assert_equal data[:githubId], auth_user.githubId
+ elsif provider.eql?(:orcid)
+ assert_equal data[:orcidId], auth_user.orcidId
+ end
+ users << user
+ end
+ users.each(&:delete)
+ end
+
+end
diff --git a/test/rack/test_request_authorization.rb b/test/rack/test_request_authorization.rb
index ae81ac7c..b904f7ee 100644
--- a/test/rack/test_request_authorization.rb
+++ b/test/rack/test_request_authorization.rb
@@ -43,7 +43,7 @@ def _create_user
user = LinkedData::Models::User.new({
username: username,
password: "test_password",
- email: "test_email@example.org"
+ email: "test_email#{rand}@example.org"
})
user.save unless user.exist?
users << user
diff --git a/config/solr/term_search/schema.xml b/test/solr/configsets/term_search/conf/schema.xml
similarity index 54%
rename from config/solr/term_search/schema.xml
rename to test/solr/configsets/term_search/conf/schema.xml
index 6b18a2a1..3bb5f9e7 100644
--- a/config/solr/term_search/schema.xml
+++ b/test/solr/configsets/term_search/conf/schema.xml
@@ -128,11 +128,20 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -140,9 +149,18 @@
+
+
+
+
+
+
+
-
+
+
+
@@ -251,6 +269,17 @@
+
+
+
+
+
+
+
+
+
+
+
@@ -412,9 +441,9 @@
-
-
-
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
@@ -769,255 +798,255 @@
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
+
\ No newline at end of file
diff --git a/test/test_case.rb b/test/test_case.rb
index df7d5b76..af9cf9a4 100644
--- a/test/test_case.rb
+++ b/test/test_case.rb
@@ -18,6 +18,7 @@
require_relative 'test_log_file'
require_relative '../lib/ontologies_linked_data'
+require_relative '../config/config'
if ENV['OVERRIDE_CONFIG'] == 'true'
SOLR_HOST = ENV.include?('SOLR_HOST') ? ENV['SOLR_HOST'] : 'localhost'
@@ -38,8 +39,10 @@
end
end
-require_relative '../config/config'
+
require 'minitest/unit'
+require 'webmock/minitest'
+WebMock.allow_net_connect!
MiniTest::Unit.autorun
# Check to make sure you want to run if not pointed at localhost
@@ -238,7 +241,11 @@ def self.backend_4s_delete
raise StandardError, 'Too many triples in KB, does not seem right to run tests' unless
count_pattern('?s ?p ?o') < 400000
- Goo.sparql_update_client.update('DELETE {?s ?p ?o } WHERE { ?s ?p ?o }')
+ graphs = Goo.sparql_query_client.query("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o . } }")
+ graphs.each_solution do |sol|
+ Goo.sparql_data_client.delete_graph(sol[:g])
+ end
+
LinkedData::Models::SubmissionStatus.init_enum
LinkedData::Models::OntologyType.init_enum
LinkedData::Models::OntologyFormat.init_enum
diff --git a/test/util/test_notifications.rb b/test/util/test_notifications.rb
index f3d93a99..88bc12e0 100644
--- a/test/util/test_notifications.rb
+++ b/test/util/test_notifications.rb
@@ -19,10 +19,14 @@ def self.before_suite
@@ont = LinkedData::SampleData::Ontology.create_ontologies_and_submissions(ont_count: 1, submission_count: 1)[2].first
@@ont.bring_remaining
@@user = @@ont.administeredBy.first
- @@subscription = self.new("before_suite")._subscription(@@ont)
@@user.bring_remaining
- @@user.subscription = [@@subscription]
- @@user.save
+
+ @@subscription = self.new("before_suite")._subscription(@@ont)
+
+ @@user2 = LinkedData::Models::User.new(username: "tim2", email: "tim2@example.org", password: "password").save
+ @@user2.bring_remaining
+ @@user2.subscription = [@@subscription]
+ @@user2.save
end
def self.after_suite
@@ -79,7 +83,9 @@ def test_new_note_notification
note.relatedOntology = [@@ont]
note.save
assert last_email_sent.subject.include?("[#{@@ui_name} Notes]")
- assert_equal [@@user.email], last_email_sent.to
+ last_emails = all_emails[-2..]
+ assert_equal [@@user.email, LinkedData.settings.admin_emails].flatten.sort, last_emails.last.to.sort
+ assert_equal [@@user2.email].sort, last_emails.first.to.sort
ensure
note.delete if note
end
@@ -92,7 +98,8 @@ def test_processing_complete_notification
subscription = _subscription(ont)
@@user.subscription = @@user.subscription.dup << subscription
@@user.save
- ont.latest_submission(status: :any).process_submission(Logger.new(TestLogFile.new))
+ ont.latest_submission(status: :any).process_submission(Logger.new(TestLogFile.new), process_rdf: true,
+ extract_metadata: false, generate_missing_labels: false)
subscription.bring :user
admin_mails = LinkedData::Utils::Notifier.admin_mails(ont)
mail_sent_count = subscription.user.size + 1
@@ -148,4 +155,33 @@ def test_remote_ontology_pull_notification
end
end
end
+
+ def test_mail_options
+ current_auth_type = LinkedData.settings.smtp_auth_type
+
+ LinkedData.settings.smtp_auth_type = :none
+ options = LinkedData::Utils::Notifier.mail_options
+ expected_options = {
+ address: LinkedData.settings.smtp_host,
+ port: LinkedData.settings.smtp_port,
+ domain: LinkedData.settings.smtp_domain
+ }
+ assert_equal options, expected_options
+
+ # testing SMTP authentification-based login
+
+ LinkedData.settings.smtp_auth_type = :plain
+ options = LinkedData::Utils::Notifier.mail_options
+ expected_options = {
+ address: LinkedData.settings.smtp_host,
+ port: LinkedData.settings.smtp_port,
+ domain: LinkedData.settings.smtp_domain,
+ user_name: LinkedData.settings.smtp_user,
+ password: LinkedData.settings.smtp_password,
+ authentication: LinkedData.settings.smtp_auth_type
+ }
+ assert_equal options, expected_options
+
+ LinkedData.settings.smtp_auth_type = current_auth_type
+ end
end