diff --git a/Changes.md b/Changes.md index 17010680..6cfa6221 100644 --- a/Changes.md +++ b/Changes.md @@ -1,5 +1,31 @@ -# unreleased -- bumped fastobo validator to new version ([pull](https://github.com/INCATools/ontology-development-kit/pull/379) + +# v1.2.26 (2 February 2021) +- New versions: + - fastobo validator to new version ([pull](https://github.com/INCATools/ontology-development-kit/pull/379) + - ROBOT 1.8.1 (lots of new changes, see: [Changelog](https://github.com/ontodev/robot/blob/master/CHANGELOG.md)) + - DOSDPTOOLS 0.16 (lots of speed-up for bulk pattern generation) +- New features: + - new python dependencies ([cogs](https://github.com/ontodev/cogs), a tool to directly manage tsv files in your repo on Google sheets) + - stable serialisation order for JSON files using jq's walk function. -> this decreases the size of the diff for git! + - Some improvements to logging when seeding a new repo, to make it easier to find errors + - A new method to validate the id-ranges file can be invoked using `sh run.sh make validate_idranges` (after update to latest ODK repo) + - modules are now annotated with a dc:source annotation to their original ontology (version) +- New configuration options: + - module_type ([example slme](tests/test-module-star.yaml), [example minimal](tests/test-module-minimal.yaml), [example mireot](tests/test-module-star.yaml)). Direct support for MIREOT and a new module type, minimal. + - To encourage stable versions and releases, ODK, by default, merges imports into the release files. Previously, we continued to release the imports as well - which we do not recommend anymore. If you still wish to release your imports as usual, you can set a flag `release_imports` in the `import_group` section of your makefile (see [example](tests/test-release.yaml)). + - the same as the above applies for reports (see [example](tests/test-robot-report.yaml)) + - The custom sparql checks, and the custom sparql exports, are now directly configurable + - `custom_sparql_checks` : Chose which additional sparql checks you want to run. The related sparql query must be named CHECKNAME-violation.sparql, and be placed in the src/sparql directory (see [example](tests/test-robot-report.yaml)) + - `custom_sparql_exports` : Chose which additional sparql checks you want to run. The related sparql query must be named CHECKNAME-violation.sparql, and be placed in the src/sparql directory (see [example](tests/test-robot-report.yaml)) + - `custom_sparql_exports` : Chose which custom reports to generate. The related sparql query must be named CHECKNAME.sparql, and be placed in the src/sparql directory (see [example](tests/test-robot-report.yaml)) + - `git_main_branch` : The `main` branch for your repo, default `main`, or (now discouraged, previously) `master`. + - `ci`: continuous integration defaults; currently available: `travis`, `github_actions` + - `create_obo_metadata`: This is mainly for new OBO ontologies. If true, OBO Markdown and PURL configs are created. + - `export_project_yaml`: Default `False`. If set to `True`, project.yaml is created in the top level of the repo. +- Removed a few files from the standard config. This is all part of an effort to slimming down the ODK to the least number of necessary files checked into version control: `src/ontology/Dockerfile`,`src/ontology/patterns.sh`, `src/ontology/release.sh`, `src/ontology/test.sh`, and some temporary files. The `patterns` directory and all its contents only appear now when `use_dosdps`=TRUE. +- Technical: + - Refactored ODK Dockerfile (merged some layers) + - added jq 1.6 which is not available via apt-get (yet). # v1.2.25 (18 November 2020) - Updated ROBOT to new version 1.7.2, which includes some hotfixes for ROBOT report and update to whelk 1.0.4 diff --git a/Dockerfile b/Dockerfile index a3798300..55ce7e49 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,33 +4,51 @@ FROM ubuntu:18.04 LABEL maintainer="obo-tools@googlegroups.com" ### 2. Get Java, Python and all required system libraries (version control etc) -ENV JAVA_HOME="/usr/lib/jvm/java-1.8-openjdk" -#ENV JAVA_HOME /usr/lib/jvm/java-8-oracle - -RUN apt-get update && apt-get install -y software-properties-common && add-apt-repository ppa:swi-prolog/stable && apt-get upgrade -y \ - && apt-get install -y software-properties-common \ - build-essential git \ - openjdk-8-jre openjdk-8-jdk swi-prolog - - -### 3. Python and all required system libraries (version control etc) +ENV JAVA_HOME="/usr" +WORKDIR /tools +ENV PATH "/tools/:$PATH" +COPY requirements.txt /tools/ +COPY scripts/obodash /tools/ +COPY odk/make-release-assets.py /tools/ -RUN apt-get update \ - && apt-get install -y python3-pip python3-dev subversion make automake gcc g++ unzip rsync curl wget jq openssl git xlsx2csv \ - && cd /usr/local/bin \ - && ln -s /usr/bin/python3 python \ - && pip3 install --upgrade pip setuptools \ - && if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi \ - && if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi \ - && rm -r /root/.cache +#ENV JAVA_HOME /usr/lib/jvm/java-8-oracle +# LAYERSIZE ~1000MB +RUN apt-get update &&\ + apt-get install -y software-properties-common &&\ + add-apt-repository ppa:swi-prolog/stable &&\ + apt-get upgrade -y &&\ + apt-get install -y build-essential \ + git \ + openjdk-8-jre \ + openjdk-8-jdk \ + swi-prolog \ + maven \ + python3-pip \ + python3-dev \ + subversion \ + make \ + automake \ + gcc \ + g++ \ + unzip \ + rsync \ + curl \ + wget \ + jq \ + openssl \ + xlsx2csv &&\ + cd /usr/local/bin \ + && ln -s /usr/bin/python3 python \ + && pip3 install --upgrade pip setuptools \ + && pip3 install -r /tools/requirements.txt \ + && if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi \ + && if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi \ + && rm -r /root/.cache ### 4. Install custom tools +# scripts/droid + -###### Python libraries ###### -WORKDIR /tools -ENV PATH "/tools/:$PATH" -COPY requirements.txt /tools/ -RUN pip3 install -r requirements.txt ###### owltools & OORT ###### # For now we get these from jenkins builds, but these should be obtained @@ -44,9 +62,10 @@ RUN wget https://github.com/owlcollab/owltools/releases/download/$OWLTOOLS/owlto chmod +x /tools/owltools-oort-all.jar ###### Konclude, the DL reasoner ###### +# LAYERSIZE ~28MB RUN wget https://github.com/konclude/Konclude/releases/download/v0.6.2-845/Konclude-v0.6.2-845-LinuxAlpine-x64-GCC8.3.0-Static-Qt-5.13.zip -O /tools/konclude.zip && \ unzip /tools/konclude.zip && \ - mv /tools/Konclude-v0.6.2-845-LinuxAlpine-x64-GCC8.3.0-Static-Qt-5.13 /tools/konclude_reasoner && \ + mv /tools/Konclude-v0.6.2-845-LinuxAlpine-x64-GCC8.3.0-Static-Qt-5.13 /tools/konclude_reasoner && \ rm /tools/konclude.zip && \ chmod +x /tools/konclude_reasoner/Binaries && \ echo "#!/bin/bash" > /tools/Konclude && \ @@ -54,10 +73,10 @@ RUN wget https://github.com/konclude/Konclude/releases/download/v0.6.2-845/Koncl chmod +x /tools/Konclude ###### ROBOT ###### -ENV ROBOT v1.7.2 +ENV ROBOT v1.8.1 ARG ROBOT_JAR=https://github.com/ontodev/robot/releases/download/$ROBOT/robot.jar ENV ROBOT_JAR ${ROBOT_JAR} -RUN pwd +# LAYERSIZE ~66MB RUN wget $ROBOT_JAR -O /tools/robot.jar && \ wget https://raw.githubusercontent.com/ontodev/robot/$ROBOT/bin/robot -O /tools/robot && \ chmod +x /tools/robot && \ @@ -73,15 +92,18 @@ RUN wget https://dl.bintray.com/fastobo/fastobo-validator/$FASTOBO_VALIDATOR/fas && chmod +x /tools/fastobo-validator ##### Ammonite ##### +# LAYERSIZE ~31MB RUN (echo "#!/usr/bin/env sh" \ && curl -L https://github.com/lihaoyi/Ammonite/releases/download/2.0.3/2.13-2.0.3) >/tools/amm \ && chmod +x /tools/amm # Force precompile of ammonite files +# LAYERSIZE ~67MB RUN amm /dev/null ###### DOSDPTOOLS ###### -ENV DOSDPVERSION=0.14 +ENV DOSDPVERSION=0.16 ENV PATH "/tools/dosdp-tools/bin:$PATH" +# LAYERSIZE ~200MB RUN wget -nv https://github.com/INCATools/dosdp-tools/releases/download/v$DOSDPVERSION/dosdp-tools-$DOSDPVERSION.tgz \ && tar -zxvf dosdp-tools-$DOSDPVERSION.tgz \ && mv dosdp-tools-$DOSDPVERSION /tools/dosdp-tools \ @@ -97,20 +119,34 @@ RUN swipl -g "pack_install(sparqlprog, [interactive(false)])" -g halt ENV PATH "/root/.local/share/swi-prolog/pack/sparqlprog/bin:$PATH" RUN ln -sf /root/.local/share/swi-prolog/pack/sparqlprog /tools/ -COPY scripts/obo-dash.sh /tools/obodash -RUN cd /tools/ && chmod +x /tools/obodash && git clone https://github.com/OBOFoundry/OBO-Dashboard.git && \ - cd OBO-Dashboard && git checkout docker-dash && echo "DOCKER DASH BRANCH CHECKED OUT" &&\ +RUN cd /tools/ && chmod +x /tools/obodash && git clone --depth 1 --branch docker-dash https://github.com/OBOFoundry/OBO-Dashboard.git && \ + cd OBO-Dashboard && git checkout docker-dash && echo "Dashboard: using branch" &&\ python3 -m pip install -r requirements.txt && echo " " >> Makefile &&\ echo "build/robot.jar:" >> Makefile &&\ echo " echo 'skipped ROBOT jar download' && touch \$@" >> Makefile && echo "" >> Makefile +########## DROID ######### +# LAYERSIZE ~18MB +#RUN apt-get install -y leiningen +#ENV DROID_JAR "droid-0.1.0-SNAPSHOT-standalone.jar" +# LAYERSIZE: ~80MB +#RUN cd /tools/ && mkdir droid_github && cd /tools/droid_github && git clone https://github.com/ontodev/droid &&\ +# cd /tools/droid_github/droid && lein uberjar &&\ +# mv /tools/droid_github/droid/target/uberjar/$DROID_JAR /tools/droid.jar && rm -rf /tools/droid_github &&\ +# ls -l /tools/ +#RUN chmod +x /tools/droid + ### 5. Install ODK ARG ODK_VERSION=0.0.0 ENV ODK_VERSION=${ODK_VERSION} +### TODO REVIEW THIS. As we speak, jq is official still stalled at 1.5, but for the walk function, we +### Need 1.6 - once 1.6 is offcial, the following RUN can be removed. +# LAYERSIZE: ~4MB +RUN wget https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -O /tools/jq && chmod +x /tools/jq +COPY odk/odk.py /tools/ COPY template/ /tools/templates/ -COPY odk/ /tools/ RUN chmod +x /tools/*.py ENV LC_ALL=C.UTF-8 ENV LANG=C.UTF-8 diff --git a/Makefile b/Makefile index 7a5c56a4..1373e5ed 100644 --- a/Makefile +++ b/Makefile @@ -27,9 +27,10 @@ test_go_mini: TESTS = $(notdir $(wildcard tests/*.yaml)) TEST_FILES = $(foreach n,$(TESTS), tests/$(n)) -test: custom_tests $(TEST_FILES) +#TEST_FILES = tests/test-release.yaml +test: $(TEST_FILES) custom_tests echo "All tests passed successfully!" - + tests/*.yaml: .FORCE $(CMD) -c -C $@ @@ -37,7 +38,7 @@ schema/project-schema.json: ./odk/odk.py dump-schema > $@ # Building docker image -VERSION = "v1.2.25" +VERSION = "v1.2.26" IM=obolibrary/odkfull DEV=obolibrary/odkdev ROBOT_JAR="https://build.obolibrary.io/job/ontodev/job/robot/job/cmd-metrics/6/artifact/bin/robot.jar" diff --git a/configs/omia-odk.yaml b/configs/omia-odk.yaml new file mode 100644 index 00000000..63e51607 --- /dev/null +++ b/configs/omia-odk.yaml @@ -0,0 +1,16 @@ +id: omia +title: "Online Inheritance of Animals Ontology" +github_org: monarch +repo: omia-ontology +report_fail_on: none +use_dosdps: TRUE +dosdp_tools_options: "--obo-prefixes=true" +export_formats: + - owl + - obo + - json +import_group: + products: + - id: ro +robot_java_args: '-Xmx8G' +allow_equivalents: asserted-only diff --git a/odk/odk.py b/odk/odk.py index 25e65267..da0649a9 100755 --- a/odk/odk.py +++ b/odk/odk.py @@ -80,7 +80,7 @@ class SubsetProduct(Product): @dataclass_json @dataclass -class ComponentProduct(): +class ComponentProduct(JsonSchemaMixin): """ Represents an individual component Examples: a file external to the edit file that contains axioms that belong to this ontology @@ -128,7 +128,7 @@ class PatternProduct(Product): @dataclass_json @dataclass -class RoboTemplateProduct(Product): +class RobotTemplateProduct(Product): """ Represents a ROBOT template """ @@ -226,7 +226,19 @@ class ImportGroup(ProductGroup): products : Optional[List[ImportProduct]] = None """all import products""" - + + module_type : str = "slme" + """Module type. Supported: slme, mireot, minimal, custom""" + + module_type_slme : str = "BOT" + """SLME module type. Supported: BOT, TOP, STAR""" + + slme_individuals : str = "include" + """See http://robot.obolibrary.org/extract#syntactic-locality-module-extractor-slme""" + + release_imports : bool = False + """If set to True, imports are copied to the release directory.""" + directory : Directory = "imports/" """directory where imports are extracted into to""" @@ -253,6 +265,15 @@ class ReportConfig(JsonSchemaMixin): report_on : List[str] = field(default_factory=lambda: ['edit', '.owl']) """Chose which files to run the report on.""" + + release_reports : bool = False + """ If true, release reports are added as assets to the release (top level directory, reports directory)""" + + custom_sparql_checks : Optional[List[str]] = field(default_factory=lambda: ['equivalent-classes', 'owldef-self-reference']) + """Chose which additional sparql checks yoy want to run. The related sparql query must be named CHECKNAME-violation.sparql, and be placed in the src/sparql directory""" + + custom_sparql_exports : Optional[List[str]] = field(default_factory=lambda: ['basic-report', 'class-count-by-prefix', 'edges', 'xrefs', 'obsoletes', 'synonyms']) + """Chose which custom reports to generate. The related sparql query must be named CHECKNAME.sparql, and be placed in the src/sparql directory.""" @dataclass_json @@ -309,14 +330,14 @@ class PatternGroup(ProductGroup): @dataclass_json @dataclass -class RoboTemplateGroup(): +class RobotTemplateGroup(JsonSchemaMixin): """ - A configuration section that consists of a list of `RoboTemplateProduct` descriptions + A configuration section that consists of a list of `RobotTemplateProduct` descriptions """ directory : Directory = "../templates/" - products : Optional[List[RoboTemplateProduct]] = None + products : Optional[List[RobotTemplateProduct]] = None @dataclass_json @dataclass @@ -359,8 +380,11 @@ class OntologyProject(JsonSchemaMixin): github_org : str = "" """Name of github org or username where repo will live. Examples: obophenotype, cmungall""" + + git_main_branch : str = "main" + """The main branch for your repo, such as main, or (now discouraged) master.""" - edit_format : str = 'owl' + edit_format : str = "owl" """Format in which the edit file is managed, either obo or owl""" robot_version: Optional[str] = None @@ -375,25 +399,28 @@ class OntologyProject(JsonSchemaMixin): use_external_date: bool = False """Flag to set if you want odk to use the host `date` rather than the docker internal `date`""" - reasoner : str = 'ELK' + export_project_yaml: bool = False + """Flag to set if you want a full project.yaml to be exported, including all the default options.""" + + reasoner : str = "ELK" """Name of reasoner to use in ontology pipeline, see robot reason docs for allowed values""" - exclude_tautologies : str = 'structural' + exclude_tautologies : str = "structural" """Remove tautologies such as A SubClassOf: owl:Thing or owl:Nothing SubclassOf: A. For more information see http://robot.obolibrary.org/reason#excluding-tautologies""" - primary_release : str = 'full' + primary_release : str = "full" """Which release file should be published as the primary release artefact, i.e. foo.owl""" - license : str = 'https://creativecommons.org/licenses/unspecified' + license : str = "https://creativecommons.org/licenses/unspecified" """Which license is ontology supplied under - must be an IRI.""" - description : str = 'None' + description : str = "None" """Provide a short description of the ontology""" use_dosdps : bool = False """if true use dead simple owl design patterns""" - public_release : str = 'none' + public_release : str = "none" """if true add functions to run automated releases (experimental). Current options are: github_curl, github_python.""" public_release_assets : Optional[List[str]] = None @@ -402,12 +429,18 @@ class OntologyProject(JsonSchemaMixin): release_date : bool = False """if true, releases will be tagged with a release date (oboInOwl:date)""" - allow_equivalents : str = 'all' + allow_equivalents : str = "all" """can be all, none or assert-only (see ROBOT documentation: http://robot.obolibrary.org/reason)""" + ci : Optional[List[str]] = field(default_factory=lambda: ['travis', 'github_actions']) + """continuous integration defaults; currently available: travis, github_actions""" + import_pattern_ontology : bool = False """if true import pattern.owl""" + create_obo_metadata : bool = True + """if true OBO Markdown and PURL configs are created.""" + gzip_main : bool = False """if true add a gzipped version of the main artefact""" @@ -432,7 +465,7 @@ class OntologyProject(JsonSchemaMixin): catalog_file : str = "catalog-v001.xml" """Name of the catalog file to be used by the build.""" - uribase : str = 'http://purl.obolibrary.org/obo' + uribase : str = "http://purl.obolibrary.org/obo" """Base URI for PURLs. DO NOT MODIFY AT THIS TIME, code is still hardwired for OBO """ contact : Optional[Person] = None @@ -444,10 +477,9 @@ class OntologyProject(JsonSchemaMixin): contributors : Optional[List[Person]] = None """List of ontology contributors (currently setting this has no effect)""" - # product groups - robot_report : Optional[ReportConfig] = ReportConfig() - """Block that includes information on all ontology imports to be generated""" - + robot_report : Dict[str, Any] = field(default_factory=lambda: ReportConfig().to_dict()) + """Block that includes settings for ROBOT report, ROBOT verify and additional reports that are generated""" + # product groups import_group : Optional[ImportGroup] = None """Block that includes information on all ontology imports to be generated""" @@ -464,7 +496,7 @@ class OntologyProject(JsonSchemaMixin): pattern_pipelines_group : Optional[PatternPipelineGroup] = None """Block that includes information on all ontology imports to be generated""" - robotemplate_group : Optional[RoboTemplateGroup] = None + robotemplate_group : Optional[RobotTemplateGroup] = None """Block that includes information on all ROBOT templates used""" def fill_missing(self): @@ -739,8 +771,9 @@ def seed(config, clean, outdir, templatedir, dependencies, title, user, source, tgts.append(derived_file) tgt_project_file = "{}/project.yaml".format(outdir) - save_project_yaml(project, tgt_project_file) - tgts.append(tgt_project_file) + if project.export_project_yaml: + save_project_yaml(project, tgt_project_file) + tgts.append(tgt_project_file) if source is not None: copyfile(source, "{}/src/ontology/{}-edit.{}".format(outdir, project.id, project.edit_format)) if config is not None: @@ -775,7 +808,7 @@ def seed(config, clean, outdir, templatedir, dependencies, title, user, source, def runcmd(cmd): logging.info("RUNNING: {}".format(cmd)) - p = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + p = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) (out, err) = p.communicate() logging.info('OUT: {}'.format(out)) if err: diff --git a/requirements.txt b/requirements.txt index 24de5d7a..1499a383 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ jsonpath_rw >=0.0 jsonschema >=0.0 jupyter>=0.0 matplotlib_venn>=0.0 -matplotlib>=0.0 +matplotlib==2.2.3 numpy >=0.0 ontobio>=0.0 pandas >=0.0 @@ -19,3 +19,7 @@ requests >=0.0 ruamel.yaml >=0.0 seaborn>=0.0 upsetplot>=0.0 +plotly>=0.0 +networkx==2.5 +ontodev-cogs>=0.0 +sssom==0.14.11.dev0 \ No newline at end of file diff --git a/schema/project-schema.json b/schema/project-schema.json index 44243570..f574417a 100644 --- a/schema/project-schema.json +++ b/schema/project-schema.json @@ -1,45 +1,203 @@ { - "$schema": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-06/schema#", "definitions": { - "ImportGroup": { - "description": "\n A configuration section that consists of a list of `ImportProduct` descriptions\n\n Controls extraction of import modules via robot extract into the \"imports/\" directory\n ", + "CommandSettings": { + "description": "\n Settings to be provided to a tool like ROBOT\n ", "properties": { - "directory": { + "memory_gb": { + "type": "integer" + } + }, + "type": "object" + }, + "ComponentGroup": { + "allOf": [ + { + "$ref": "#/definitions/ComponentProduct" + }, + { + "properties": { + "directory": { + "default": "components", + "type": "string" + }, + "products": { + "items": { + "$ref": "#/definitions/ComponentProduct" + }, + "type": "array" + } + }, + "type": "object" + } + ], + "description": "\n A configuration section that consists of a list of `ComponentProduct` descriptions\n\n Controls extraction of import modules via robot extract into the \"components/\" directory\n " + }, + "ComponentProduct": { + "description": "\n Represents an individual component\n Examples: a file external to the edit file that contains axioms that belong to this ontology\n Components are usually maintained manually.\n ", + "properties": { + "filename": { "type": "string" }, - "disabled": { - "type": "boolean" + "source": { + "type": "string" + } + }, + "type": "object" + }, + "ImportGroup": { + "allOf": [ + { + "$ref": "#/definitions/ProductGroup" }, - "ids": { - "items": { - "type": "string" + { + "properties": { + "directory": { + "default": "imports/", + "type": "string" + }, + "module_type": { + "default": "slme", + "type": "string" + }, + "module_type_slme": { + "default": "BOT", + "type": "string" + }, + "products": { + "items": { + "$ref": "#/definitions/ImportProduct" + }, + "type": "array" + }, + "release_imports": { + "default": false, + "type": "boolean" + }, + "slme_individuals": { + "default": "include", + "type": "string" + } }, - "type": "array" + "type": "object" + } + ], + "description": "\n A configuration section that consists of a list of `ImportProduct` descriptions\n\n Controls extraction of import modules via robot extract into the \"imports/\" directory\n " + }, + "ImportProduct": { + "allOf": [ + { + "$ref": "#/definitions/Product" }, - "products": { - "items": { - "$ref": "#/definitions/ImportProduct" + { + "properties": { + "mirror_from": { + "type": "string" + } }, - "type": "array" + "type": "object" + } + ], + "description": "\n Represents an individual import\n Examples: 'uberon' (in go)\n Imports are typically built from an upstream source, but this can be configured\n " + }, + "PatternGroup": { + "allOf": [ + { + "$ref": "#/definitions/ProductGroup" + }, + { + "properties": { + "directory": { + "default": "../patterns/", + "type": "string" + }, + "products": { + "items": { + "$ref": "#/definitions/PatternProduct" + }, + "type": "array" + } + }, + "type": "object" + } + ], + "description": "\n A configuration section that consists of a list of `PatternProduct` descriptions\n\n " + }, + "PatternPipelineGroup": { + "allOf": [ + { + "$ref": "#/definitions/ProductGroup" + }, + { + "properties": { + "products": { + "items": { + "$ref": "#/definitions/PatternPipelineProduct" + }, + "type": "array" + } + }, + "type": "object" + } + ], + "description": "\n A configuration section that consists of a list of `PatternPipelineProduct` descriptions\n\n Controls the handling of patterns data in the \"src/patterns/data\" directory\n " + }, + "PatternPipelineProduct": { + "allOf": [ + { + "$ref": "#/definitions/Product" + }, + { + "properties": { + "dosdp_tools_options": { + "default": "--obo-prefixes=true", + "type": "string" + } + }, + "type": "object" + } + ], + "description": "\n Represents an individual pattern pipeline\n Examples: manual curation pipeline, auto curation pipeline\n Each pipeline gets their own specific directory\n " + }, + "PatternProduct": { + "allOf": [ + { + "$ref": "#/definitions/Product" + }, + { + "properties": {}, + "type": "object" + } + ], + "description": "Represents a DOSDP template product\n \n The products here can be manfested as CSVs (from 'parse'ing OWL)\n or they may be OWL (from the dosdp 'generate' command)\n " + }, + "Product": { + "description": "\n abstract base class for all products.\n\n Here, a product is something that is produced by an ontology workflow.\n A product can be manifested in different formats.\n \n For example, goslim_prok is a subset (aka slim) product from GO,\n this can be manifest as obo, owl, json\n ", + "properties": { + "description": { + "type": "string" + }, + "id": { + "type": "string" }, "rebuild_if_source_changes": { + "default": true, "type": "boolean" + }, + "robot_settings": { + "$ref": "#/definitions/CommandSettings" } }, "required": [ - "disabled", - "rebuild_if_source_changes", - "directory" + "id" ], "type": "object" }, - "PatternGroup": { - "description": "\n A configuration section that consists of a list of `PatternProduct` descriptions\n\n ", + "ProductGroup": { + "description": "\n abstract base class for all product groups.\n\n A product group is a simple holder for a list of\n groups, with the ability to set configurations that\n hold by default for all within that group.\n\n Note: currently the configuration can specify\n EITHER a list of ontology ids (e.g. uberon, cl)\n OR a list of product objects\n OR some mixture\n\n For example, in specifying upstream imports I can\n be lazy and just list the ids, but if I need to\n configure each one individually then I need to specify\n the full product object.\n\n This buys some simplicity for the majority of projects\n that don't do anything fancy, but at the price of overall\n complexity\n ", "properties": { - "directory": { - "type": "string" - }, "disabled": { + "default": false, "type": "boolean" }, "ids": { @@ -48,58 +206,107 @@ }, "type": "array" }, - "products": { - "items": { - "$ref": "#/definitions/PatternProduct" - }, - "type": "array" - }, "rebuild_if_source_changes": { + "default": true, "type": "boolean" } }, - "required": [ - "disabled", - "rebuild_if_source_changes", - "directory" - ], "type": "object" }, - "SubsetGroup": { - "description": "\n A configuration section that consists of a list of `SubsetProduct` descriptions\n\n Controls export of subsets/slims into the \"subsets/\" directory\n ", + "RobotTemplateGroup": { + "description": "\n A configuration section that consists of a list of `RobotTemplateProduct` descriptions\n ", "properties": { "directory": { + "default": "../templates/", "type": "string" }, - "disabled": { - "type": "boolean" - }, - "ids": { + "products": { "items": { - "type": "string" + "$ref": "#/definitions/RobotTemplateProduct" }, "type": "array" + } + }, + "type": "object" + }, + "RobotTemplateProduct": { + "allOf": [ + { + "$ref": "#/definitions/Product" }, - "products": { - "items": { - "$ref": "#/definitions/SubsetProduct" + { + "properties": {}, + "type": "object" + } + ], + "description": "\n Represents a ROBOT template\n " + }, + "SubsetGroup": { + "allOf": [ + { + "$ref": "#/definitions/ProductGroup" + }, + { + "properties": { + "directory": { + "default": "subsets/", + "type": "string" + }, + "products": { + "items": { + "$ref": "#/definitions/SubsetProduct" + }, + "type": "array" + } }, - "type": "array" + "type": "object" + } + ], + "description": "\n A configuration section that consists of a list of `SubsetProduct` descriptions\n\n Controls export of subsets/slims into the \"subsets/\" directory\n " + }, + "SubsetProduct": { + "allOf": [ + { + "$ref": "#/definitions/Product" }, - "rebuild_if_source_changes": { - "type": "boolean" + { + "properties": { + "creators": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" } - }, - "required": [ - "disabled", - "rebuild_if_source_changes", - "directory" ], - "type": "object" + "description": "\n Represents an individual subset.\n Examples: goslim_prok (in go), eco_subset (in ro)\n " } }, "description": "\n A configuration for an ontology project/repository\n\n This is divided into project-wide settings, plus\n groups of products. Products are grouped into 4\n categories (more may be added)\n ", "properties": { + "allow_equivalents": { + "default": "all", + "type": "string" + }, + "catalog_file": { + "default": "catalog-v001.xml", + "type": "string" + }, + "ci": { + "default": [ + "travis", + "github_actions" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "components": { + "$ref": "#/definitions/ComponentGroup" + }, "contact": { "type": "string" }, @@ -109,49 +316,172 @@ }, "type": "array" }, + "create_obo_metadata": { + "default": true, + "type": "boolean" + }, "creators": { "items": { "type": "string" }, "type": "array" }, + "description": { + "default": "None", + "type": "string" + }, + "dosdp_tools_options": { + "default": "--obo-prefixes=true", + "type": "string" + }, "edit_format": { + "default": "owl", + "type": "string" + }, + "exclude_tautologies": { + "default": "structural", + "type": "string" + }, + "export_formats": { + "default": [ + "owl", + "obo" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "export_project_yaml": { + "default": false, + "type": "boolean" + }, + "git_main_branch": { + "default": "main", + "type": "string" + }, + "git_user": { + "default": "", "type": "string" }, "github_org": { + "default": "", "type": "string" }, + "gzip_main": { + "default": false, + "type": "boolean" + }, "id": { + "default": "", "type": "string" }, "import_group": { "$ref": "#/definitions/ImportGroup" }, + "import_pattern_ontology": { + "default": false, + "type": "boolean" + }, + "license": { + "default": "https://creativecommons.org/licenses/unspecified", + "type": "string" + }, + "namespaces": { + "items": { + "type": "string" + }, + "type": "array" + }, "obo_format_options": { + "default": "", "type": "string" }, "pattern_group": { "$ref": "#/definitions/PatternGroup" }, + "pattern_pipelines_group": { + "$ref": "#/definitions/PatternPipelineGroup" + }, + "primary_release": { + "default": "full", + "type": "string" + }, + "public_release": { + "default": "none", + "type": "string" + }, + "public_release_assets": { + "items": { + "type": "string" + }, + "type": "array" + }, "reasoner": { + "default": "ELK", "type": "string" }, + "release_artefacts": { + "default": [ + "full", + "base" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "release_date": { + "default": false, + "type": "boolean" + }, "repo": { + "default": "", "type": "string" }, - "report_fail_on": { + "robot_java_args": { + "default": "", "type": "string" }, + "robot_report": { + "default": { + "custom_profile": false, + "custom_sparql_checks": [ + "equivalent-classes", + "owldef-self-reference" + ], + "custom_sparql_exports": [ + "basic-report", + "class-count-by-prefix", + "edges", + "xrefs", + "obsoletes", + "synonyms" + ], + "fail_on": null, + "release_reports": false, + "report_on": [ + "edit", + ".owl" + ], + "use_labels": true + }, + "type": "object" + }, + "robot_settings": { + "$ref": "#/definitions/CommandSettings" + }, "robot_version": { "type": "string" }, "robotemplate_group": { - "type": "object" + "$ref": "#/definitions/RobotTemplateGroup" }, "subset_group": { "$ref": "#/definitions/SubsetGroup" }, "title": { + "default": "", "type": "string" }, "travis_emails": { @@ -161,27 +491,35 @@ "type": "array" }, "uribase": { + "default": "http://purl.obolibrary.org/obo", "type": "string" }, "use_dosdps": { + "default": false, + "type": "boolean" + }, + "use_external_date": { + "default": false, "type": "boolean" } }, - "required": [ - "id", - "title", - "repo", - "github_org", - "edit_format", - "reasoner", - "use_dosdps", - "obo_format_options", - "uribase" - ], "type": "object" } { - "$schema": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-06/schema#", + "allOf": [ + { + "$ref": "#/definitions/Product" + }, + { + "properties": { + "mirror_from": { + "type": "string" + } + }, + "type": "object" + } + ], "definitions": { "CommandSettings": { "description": "\n Settings to be provided to a tool like ROBOT\n ", @@ -191,29 +529,80 @@ } }, "type": "object" + }, + "Product": { + "description": "\n abstract base class for all products.\n\n Here, a product is something that is produced by an ontology workflow.\n A product can be manifested in different formats.\n \n For example, goslim_prok is a subset (aka slim) product from GO,\n this can be manifest as obo, owl, json\n ", + "properties": { + "description": { + "type": "string" + }, + "id": { + "type": "string" + }, + "rebuild_if_source_changes": { + "default": true, + "type": "boolean" + }, + "robot_settings": { + "$ref": "#/definitions/CommandSettings" + } + }, + "required": [ + "id" + ], + "type": "object" } }, - "description": "\n Represents an individual import\n Examples: 'uberon' (in go)\n Imports are typically built from an upstream source, but this can be configured\n ", - "properties": { - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "mirror_from": { - "type": "string" + "description": "\n Represents an individual import\n Examples: 'uberon' (in go)\n Imports are typically built from an upstream source, but this can be configured\n " +} +{ + "$schema": "http://json-schema.org/draft-06/schema#", + "allOf": [ + { + "$ref": "#/definitions/Product" }, - "rebuild_if_source_changes": { - "type": "boolean" + { + "properties": { + "dosdp_tools_options": { + "default": "--obo-prefixes=true", + "type": "string" + } + }, + "type": "object" + } + ], + "definitions": { + "CommandSettings": { + "description": "\n Settings to be provided to a tool like ROBOT\n ", + "properties": { + "memory_gb": { + "type": "integer" + } + }, + "type": "object" }, - "robot_settings": { - "$ref": "#/definitions/CommandSettings" + "Product": { + "description": "\n abstract base class for all products.\n\n Here, a product is something that is produced by an ontology workflow.\n A product can be manifested in different formats.\n \n For example, goslim_prok is a subset (aka slim) product from GO,\n this can be manifest as obo, owl, json\n ", + "properties": { + "description": { + "type": "string" + }, + "id": { + "type": "string" + }, + "rebuild_if_source_changes": { + "default": true, + "type": "boolean" + }, + "robot_settings": { + "$ref": "#/definitions/CommandSettings" + } + }, + "required": [ + "id" + ], + "type": "object" } }, - "required": [ - "id", - "rebuild_if_source_changes" - ], - "type": "object" + "description": "\n Represents an individual pattern pipeline\n Examples: manual curation pipeline, auto curation pipeline\n Each pipeline gets their own specific directory\n " } diff --git a/scripts/droid b/scripts/droid new file mode 100644 index 00000000..29de33a4 --- /dev/null +++ b/scripts/droid @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + + +java -jar /tools/droid.jar \ No newline at end of file diff --git a/scripts/obo-dash.sh b/scripts/obodash similarity index 100% rename from scripts/obo-dash.sh rename to scripts/obodash diff --git a/template/.gitignore.jinja2 b/template/.gitignore.jinja2 index 7f11b6ed..adeae095 100644 --- a/template/.gitignore.jinja2 +++ b/template/.gitignore.jinja2 @@ -9,6 +9,7 @@ bin/ src/ontology/mirror src/ontology/mirror/* +src/ontology/reports/* src/ontology/{{ project.id }}.owl src/ontology/{{ project.id }}.obo src/ontology/{{ project.id }}.json diff --git a/template/_dynamic_files.jinja2 b/template/_dynamic_files.jinja2 index 1f68779c..e82a0bbc 100644 --- a/template/_dynamic_files.jinja2 +++ b/template/_dynamic_files.jinja2 @@ -148,7 +148,12 @@ Datatype: rdf:PlainLiteral ## ## If you need to customize your Makefile, make ## changes here rather than in the main Makefile - +{% if project.import_group is defined -%} +{%- if 'custom' == project.import_group.module_type %} +imports/%_import.owl: mirror/%.owl + echo "ERROR: You have configured your default module type to be custom; this behavior needs to be overwritten in {{ project.id }}.Makefile!" && touch $@ +{% endif %} +{%- endif %} {#- Imports files, one per import @@ -232,6 +237,32 @@ NBO:0000313 TODO: include a script that makes it easy for maintainers to do this. #} +{%- if project.create_obo_metadata %} +^^^ src/metadata/README.md +Metadata files for the OBO Library + + * [{{ project.id }}.yml]({{ project.id }}.yml) + * Determines how your purl.obolibrary.org/obo/{{ project.id }}/ redirects will be handled + * Go here: https://github.com/OBOFoundry/purl.obolibrary.org/tree/master/config + * Click [New File](https://github.com/OBOFoundry/purl.obolibrary.org/new/master/config) + * Paste in the contents of [{{ project.id }}.yml]({{ project.id }}.yml) + * Click "Commit new file" + * IMPORTANT: remember to make a pull request + * An OBO admin will merge your Pull Request *providing it meets the requirements of the OBO library* + * [{{ project.id }}.md]({{ project.id }}.md) + * Determines how your metadata is shown on OBO Library, OLS and AberOWL + * Go here: https://github.com/OBOFoundry/OBOFoundry.github.io/tree/master/ontology + * Click [New File](https://github.com/OBOFoundry/OBOFoundry.github.io/new/master/ontology) + * Paste in the contents of [{{ project.id }}.md]({{ project.id }}.md) + * Click "Commit new file" + * IMPORTANT: remember to make a pull request + * An OBO admin will merge your Pull Request *providing it meets the requirements of the OBO library* + +For more background see: + + * http://obofoundry.org/ + * http://obofoundry.org/faq/how-do-i-edit-metadata.html + ^^^ src/metadata/{{ project.id }}.md --- layout: ontology_detail @@ -285,8 +316,8 @@ idspace: {{ project.id|upper }} base_url: /obo/{{ project.id }} products: -- {{ project.id }}.owl: https://raw.githubusercontent.com/{{ project.github_org }}/{{ project.repo }}/master/{{ project.id }}.owl -- {{ project.id }}.obo: https://raw.githubusercontent.com/{{ project.github_org }}/{{ project.repo }}/master/{{ project.id }}.obo +- {{ project.id }}.owl: https://raw.githubusercontent.com/{{ project.github_org }}/{{ project.repo }}/{{ project.git_main_branch }}/{{ project.id }}.owl +- {{ project.id }}.obo: https://raw.githubusercontent.com/{{ project.github_org }}/{{ project.repo }}/{{ project.git_main_branch }}/{{ project.id }}.obo term_browser: ontobee example_terms: @@ -305,11 +336,65 @@ entries: ## generic fall-through, serve direct from github by default - prefix: / - replacement: https://raw.githubusercontent.com/{{ project.github_org }}/{{ project.repo }}/master/ + replacement: https://raw.githubusercontent.com/{{ project.github_org }}/{{ project.repo }}/{{ project.git_main_branch }}/ +{%- endif %} {#- Example pattern implementation TSV #} {%- if project.use_dosdps %} +^^^ src/patterns/definitions.owl +Prefix(:=) +Prefix(owl:=) +Prefix(rdf:=) +Prefix(xml:=) +Prefix(xsd:=) +Prefix(rdfs:=) + + +Ontology( + +) +^^^ src/patterns/pattern.owl +Prefix(:=) +Prefix(owl:=) +Prefix(rdf:=) +Prefix(xml:=) +Prefix(xsd:=) +Prefix(rdfs:=) + + +Ontology( + +) +^^^ src/patterns/README.md +# DOSDP patterns - editors docs +^^^ src/patterns/data/default/example.tsv +defined_class example + +^^^ src/patterns/dosdp-patterns/example.yaml +pattern_name: example +pattern_iri: http://purl.obolibrary.org/obo/{{ project.id }}/example.yaml +description: "This is a minimal example pattern." + +classes: + example: owl:Thing + +relations: + part_of: BFO:0000050 + +vars: + example: "'example'" + +name: + text: "Part of %s" + vars: + - example + +equivalentTo: + text: "'part_of' some %s" + vars: + - example +^^^ src/patterns/dosdp-patterns/external.txt ^^^ src/patterns/data/default/README.md Documentation of the Default DOSDP Pipeline {%- if project.pattern_pipelines_group is defined %} @@ -376,4 +461,73 @@ ERROR misused_obsolete_label ERROR multiple_definitions ERROR multiple_equivalent_classes ERROR multiple_labels -{%- endif %} \ No newline at end of file +{%- endif %} +{%- if 'basic' in project.release_artefacts or project.primary_release == 'basic' %} +^^^ src/ontology/keeprelations.txt +BFO:0000050 +{% endif -%} +{% if project.ci is defined -%}{% if 'travis' in project.ci %} +^^^ .travis.yml +## REMEMBER TO SET UP YOUR GIT REPO FOR TRAVIS +## Go to: https://travis-ci.org/{{ project.github_org }} for details +sudo: required + +services: + - docker + +before_install: + - docker pull obolibrary/odkfull + +# command to run tests +script: cd src/ontology && sh run.sh make test + +#after_success: +# coveralls + +# whitelist +branches: + only: + - {{ project.git_main_branch }} + - test-travis + +### Add your own lists here +### See https://github.com/INCATools/ontology-development-kit/issues/35 +notifications: + email: + - obo-ci-reports-all@groups.io +{% endif -%}{% if 'github_actions' in project.ci %} +^^^ .github/workflows/qc.yml +# Basic ODK workflow + +name: CI + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the {{ project.git_main_branch }} branch + push: + branches: [ {{ project.git_main_branch }} ] + pull_request: + branches: [ {{ project.git_main_branch }} ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "ontology_qc" + ontology_qc: + # The type of runner that the job will run on + runs-on: ubuntu-latest + container: obolibrary/odkfull:{% if env is defined -%}{{env['ODK_VERSION'] or "v1.2.25" }}{%- else %}v1.2.25{% endif %} + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + - name: Run ontology QC checks + env: + DEFAULT_BRANCH: {{ project.git_main_branch }} + run: cd src/ontology && make ROBOT_ENV='ROBOT_JAVA_ARGS=-Xmx6G' test IMP=false PAT=false +{% endif -%} +{% endif -%} \ No newline at end of file diff --git a/template/src/metadata/README.md.jinja2 b/template/src/metadata/README.md.jinja2 deleted file mode 100644 index db8df160..00000000 --- a/template/src/metadata/README.md.jinja2 +++ /dev/null @@ -1,23 +0,0 @@ -Metadata files for the OBO Library - - * [{{ project.id }}.yml]({{ project.id }}.yml) - * Determines how your purl.obolibrary.org/obo/{{ project.id }}/ redirects will be handled - * Go here: https://github.com/OBOFoundry/purl.obolibrary.org/tree/master/config - * Click [New File](https://github.com/OBOFoundry/purl.obolibrary.org/new/master/config) - * Paste in the contents of [{{ project.id }}.yml]({{ project.id }}.yml) - * Click "Commit new file" - * IMPORTANT: remember to make a pull request - * An OBO admin will merge your Pull Request *providing it meets the requirements of the OBO library* - * [{{ project.id }}.md]({{ project.id }}.md) - * Determines how your metadata is shown on OBO Library, OLS and AberOWL - * Go here: https://github.com/OBOFoundry/OBOFoundry.github.io/tree/master/ontology - * Click [New File](https://github.com/OBOFoundry/OBOFoundry.github.io/new/master/ontology) - * Paste in the contents of [{{ project.id }}.md]({{ project.id }}.md) - * Click "Commit new file" - * IMPORTANT: remember to make a pull request - * An OBO admin will merge your Pull Request *providing it meets the requirements of the OBO library* - -For more background see: - - * http://obofoundry.org/ - * http://obofoundry.org/faq/how-do-i-edit-metadata.html diff --git a/template/src/ontology/Dockerfile b/template/src/ontology/Dockerfile deleted file mode 100644 index 661e66df..00000000 --- a/template/src/ontology/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM cmungall/osk - -CMD make test diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index 1e0ac5eb..4e92bda9 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -13,7 +13,7 @@ # Generated using ontology-starter-kit # ODK Version: {% if env is defined %}{{env['ODK_VERSION'] or "Unknown" }}{% else %}"Unknown"{% endif %} # ---------------------------------------- -# +# IMPORTANT: DO NOT EDIT THIS FILE. To override default make goals, use {{ project.id }}.Makefile instead # ---------------------------------------- # Standard Constants @@ -31,19 +31,23 @@ RELEASEDIR= ../.. REPORTDIR= reports TMPDIR= tmp SPARQLDIR = ../sparql +{%- if project.robot_report.custom_profile %} ROBOT_PROFILE = profile.txt +{%- endif %} REPORT_FAIL_ON = {{ project.robot_report.fail_on|default('ERROR') }} REPORT_LABEL = {% if project.robot_report.use_labels %}-l true{% endif %} REPORT_PROFILE_OPTS = {% if project.robot_report.custom_profile %}--profile $(ROBOT_PROFILE){% endif %} OBO_FORMAT_OPTIONS = {{ project.obo_format_options }} -SPARQL_VALIDATION_CHECKS = equivalent-classes trailing-whitespace owldef-self-reference xref-syntax nolabels -SPARQL_EXPORTS = basic-report class-count-by-prefix edges xrefs obsoletes synonyms +SPARQL_VALIDATION_CHECKS = {% if project.robot_report.custom_sparql_checks is defined %}{% for x in project.robot_report.custom_sparql_checks %} {{ x }}{% endfor %}{% endif %} +SPARQL_EXPORTS = {% if project.robot_report.custom_sparql_exports is defined %}{% for x in project.robot_report.custom_sparql_exports %} {{ x }}{% endfor %}{% endif %} ODK_VERSION_MAKEFILE = {% if env is defined %}{{env['ODK_VERSION'] or "Unknown" }}{% else %}"Unknown"{% endif %} -TODAY ?= $(shell date +%Y-%m-%d) -OBODATE ?= $(shell date +'%d:%m:%Y %H:%M') -VERSION=$(TODAY) +TODAY ?= $(shell date +%Y-%m-%d) +OBODATE ?= $(shell date +'%d:%m:%Y %H:%M') +VERSION= $(TODAY) ANNOTATE_ONTOLOGY_VERSION = annotate -V $(ONTBASE)/releases/$(VERSION)/$@ --annotation owl:versionInfo $(VERSION) +OTHER_SRC = {% if project.use_dosdps -%}$(PATTERNDIR)/definitions.owl {% endif -%}{% if project.components is defined -%}{% for component in project.components.products -%}{{ project.components.directory }}/{{ component.filename }} {% endfor -%}{% endif %} +ONTOLOGYTERMS = tmp/ontologyterms.txt {%- if project.use_dosdps %} PATTERNDIR= ../patterns @@ -51,12 +55,8 @@ DOSDP_SCHEMA= http:// # change to PURL when ready. PATTERN_TESTER= simple_pattern_tester.py DOSDPT= dosdp-tools PATTERN_RELEASE_FILES= $(PATTERNDIR)/definitions.owl $(PATTERNDIR)/pattern.owl + {% endif %} -# seed.txt contains all referenced entities -IMPORTSEED=seed.txt -SRCMERGED=tmp/merged-$(SRC) -SIMPLESEED=simple_seed.txt -PRESEED=tmp/pre_seed.txt FORMATS = $(sort {% for format in project.export_formats %} {{ format }}{% endfor %} owl) FORMATS_INCL_TSV = $(sort $(FORMAT) tsv) @@ -75,7 +75,7 @@ odkversion: echo "ODK Makefile version: $(ODK_VERSION_MAKEFILE) (this is the version of the ODK with which this Makefile was generated, not the version of the ODK you are running)" &&\ echo "ROBOT version (ODK): " && $(ROBOT) --version -$TMPDIR: +$(TMPDIR) $(REPORTDIR) : mkdir -p $@ ## -- main targets -- @@ -137,6 +137,11 @@ ASSETS = \ $(REPORT_FILES) \ $(SUBSET_FILES) +RELEASE_ASSETS = \ + $(MAIN_FILES) {% if project.import_group is defined %}{% if project.import_group.release_imports %}$(IMPORT_FILES) {% endif %}{% endif %}{% if project.robot_report.release_reports %}$(REPORT_FILES){% endif -%}\ + $(REPORT_FILES) \ + $(SUBSET_FILES) + all_assets: $(ASSETS) show_assets: @@ -148,198 +153,24 @@ show_assets: # Release Management # ---------------------------------------- +{% if 'basic' in project.release_artefacts or project.primary_release == 'basic' -%} KEEPRELATIONS=keeprelations.txt -ONTOLOGYTERMS=ontologyterms.txt +{% endif -%} +CLEANFILES=$(MAIN_FILES) $(SRCMERGED) # This should be executed by the release manager whenever time comes to make a release. # It will ensure that all assets/files are fresh, and will copy to release folder prepare_release: $(ASSETS) $(PATTERN_RELEASE_FILES) - rsync -R $(ASSETS) $(RELEASEDIR) &&\ + rsync -R $(RELEASE_ASSETS) $(RELEASEDIR) &&\ {% if project.use_dosdps -%} mkdir -p $(RELEASEDIR)/patterns &&\ cp $(PATTERN_RELEASE_FILES) $(RELEASEDIR)/patterns &&\ {% endif -%} - rm -f $(MAIN_FILES) $(SRCMERGED) &&\ + rm -f $(CLEANFILES) &&\ echo "Release files are now in $(RELEASEDIR) - now you should commit, push and make a release on your git hosting site such as GitHub or GitLab" prepare_initial_release: prepare_release - cd $(RELEASEDIR) && git add $(ASSETS) - -# ---------------------------------------- -# Export formats -# ---------------------------------------- - - -{% for release in project.release_artefacts -%} -{% if 'obo' in project.export_formats -%} -$(ONT)-{{ release }}.obo: $(ONT)-{{ release }}.owl - $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo -{% endif -%} -{% if 'ttl' in project.export_formats -%} -$(ONT)-{{ release }}.ttl: $(ONT)-{{ release }}.owl - $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ -{% endif -%} -{% if 'json' in project.export_formats -%} -$(ONT)-{{ release }}.json: $(ONT)-{{ release }}.owl - $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f json -o $@.tmp.json && mv $@.tmp.json $@ -{% endif -%} -{% endfor -%} - -{% if 'base' not in project.release_artefacts -%} -{% if 'obo' in project.export_formats -%} -$(ONT)-base.obo: $(ONT)-base.owl - $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo -{% endif -%} -{% if 'ttl' in project.export_formats -%} -$(ONT)-base.ttl: $(ONT)-base.owl - $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ -{% endif -%} -{% if 'json' in project.export_formats -%} -$(ONT)-base.json: $(ONT)-base.owl - $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f json -o $@.tmp.json && mv $@.tmp.json $@ -{% endif -%} -{% endif -%} - -{% if 'full' not in project.release_artefacts -%} -{% if 'obo' in project.export_formats -%} -$(ONT)-full.obo: $(ONT)-full.owl - $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo -{% endif -%} -{% if 'ttl' in project.export_formats -%} -$(ONT)-full.ttl: $(ONT)-full.owl - $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ -{% endif -%} -{% if 'json' in project.export_formats -%} -$(ONT)-full.json: $(ONT)-full.owl - $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f json -o $@.tmp.json && mv $@.tmp.json $@ -{% endif -%} -{% endif -%} - -# Main release artefacts -$(ONT).owl: $(ONT)-{{ project.primary_release }}.owl - $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert -o $@.tmp.owl && mv $@.tmp.owl $@ - -{% if 'obo' in project.export_formats -%} -$(ONT).obo: $(ONT).owl - $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo -{% endif -%} -{% if 'ttl' in project.export_formats -%} -$(ONT).ttl: $(ONT)-{{ project.primary_release }}.owl - $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ -{% endif -%} -{% if 'json' in project.export_formats -%} -$(ONT).json: $(ONT)-{{ project.primary_release }}.owl - $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f json -o $@.tmp.json && mv $@.tmp.json $@ -{% endif -%} - -# ---------------------------------------- -# Initiating Step: Reason over source -# ---------------------------------------- - -# by default we use {{ project.reasoner }} to perform a reason-relax-reduce chain -# after that we annotate the ontology with the release versionInfo - -OTHER_SRC = {% if project.use_dosdps -%} $(PATTERNDIR)/definitions.owl{% endif -%}{% if project.components is defined %} {% for component in project.components.products %} {{ project.components.directory }}/{{ component.filename }}{% endfor %}{% endif %} - - -$(ONTOLOGYTERMS): $(SRC) $(OTHER_SRC) - touch $(ONTOLOGYTERMS) && \ - $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/{{ project.id }}_terms.sparql $@ - -{% for format in project.export_formats %} -{% if project.gzip_main -%} -$(ONT).{{ format }}.gz: $(ONT).{{ format }} - gzip -c $< > $@.tmp && mv $@.tmp $@ -{% endif %} -{% endfor %} -{% if 'owl' not in project.export_formats %} -{% if project.gzip_main -%} -$(ONT).owl.gz: $(ONT).owl - gzip -c $< > $@.tmp && mv $@.tmp $@ -{% endif %} -$(ONT).owl: $(ONT)-{{ project.primary_release }}.owl - cp $< $@ -{% endif %} - -# base: OTHER sources of interest, such as definitions owl -$(ONT)-base.owl: $(SRC) $(OTHER_SRC) - $(ROBOT) remove --input $< --select imports --trim false \ - {% if project.use_dosdps or project.components is defined %}merge $(patsubst %, -i %, $(OTHER_SRC)) \ - {% endif %}annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ - --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - {% if project.release_date -%} --annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ - -# Full: The full artefacts with imports merged, reasoned -$(ONT)-full.owl: $(SRC) $(OTHER_SRC) - $(ROBOT) merge --input $< \ - reason --reasoner {{ project.reasoner }} --equivalent-classes-allowed {{ project.allow_equivalents }} --exclude-tautologies {{ project.exclude_tautologies }} \ - relax \ - reduce -r {{ project.reasoner }} \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ - -{% if 'non-classified' in project.release_artefacts or project.primary_release == 'non-classified' -%} -# foo-non-classified: (edit->imports-merged) -$(ONT)-non-classified.owl: $(SRC) $(OTHER_SRC) - $(ROBOT) merge --input $< \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ -{% endif -%} - -{% if 'simple' in project.release_artefacts or project.primary_release == 'simple' -%} -# foo-simple: (edit->reason,relax,reduce,drop imports, drop every axiom which contains an entity outside the "namespaces of interest") -# drop every axiom: filter --term-file keep_terms.txt --trim true -# remove --select imports --trim false \ - -$(ONT)-simple.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) - $(ROBOT) merge --input $< $(patsubst %, -i %, $(OTHER_SRC)) \ - reason --reasoner {{ project.reasoner }} --equivalent-classes-allowed {{ project.allow_equivalents }} --exclude-tautologies {{ project.exclude_tautologies }} \ - relax \ - remove --axioms equivalent \ - relax \ - filter --term-file $(SIMPLESEED) --select "annotations ontology anonymous self" --trim true --signature true \ - reduce -r {{ project.reasoner }} \ - query --update ../sparql/inject-subset-declaration.ru \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ -{% endif -%} - -{% if 'simple-non-classified' in project.release_artefacts or project.primary_release == 'simple-non-classified' -%} -# foo-simple-non-classified (edit->relax,reduce,drop imports, drop every axiom which contains an entity outside the "namespaces of interest") <- aka the HPO use case, no reason. -# Should this be the non-classified ontology with the drop foreign axiom filter? -# Consider adding remove --term "http://www.geneontology.org/formats/oboInOwl#hasOBONamespace" - -$(ONT)-simple-non-classified.owl: $(SRC) $(OTHER_SRC) $(ONTOLOGYTERMS) - $(ROBOT) remove --input $< --select imports --trim true \ - merge $(patsubst %, -i %, $(OTHER_SRC)) \ - remove --axioms equivalent \ - reduce -r {{ project.reasoner }} \ - filter --select ontology --term-file $(ONTOLOGYTERMS) --trim false \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ -{% endif -%} - -{% if 'basic' in project.release_artefacts or project.primary_release == 'basic' -%} -# foo-basic: A version of -simple containing only relationships using relations on a configurable whitelist (default = BFO:0000050 (?)). See above (David comment) for explanation. -# removes any axioms that contains one of the ops that not in the whitelist file - -$(ONT)-basic.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) $(KEEPRELATIONS) - $(ROBOT) merge --input $< $(patsubst %, -i %, $(OTHER_SRC)) \ - reason --reasoner {{ project.reasoner }} --equivalent-classes-allowed {{ project.allow_equivalents }} --exclude-tautologies {{ project.exclude_tautologies }} \ - relax \ - remove --axioms equivalent \ - remove --axioms disjoint \ - remove --term-file $(KEEPRELATIONS) --select complement --select object-properties --trim true \ - relax \ - filter --term-file $(SIMPLESEED) --select "annotations ontology anonymous self" --trim true --signature true \ - reduce -r {{ project.reasoner }} \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ -{% endif -%} + cd $(RELEASEDIR) && git add $(RELEASE_ASSETS) # ---------------------------------------- # Import modules @@ -347,6 +178,15 @@ $(ONT)-basic.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) $(KEEPRELATIONS) # Most ontologies are modularly constructed using portions of other ontologies # These live in the imports/ folder +# ------------------------ +# Imports: Seeding system +# ------------------------ + +# seed.txt contains all referenced entities +IMPORTSEED=tmp/seed.txt +SRCMERGED=tmp/merged-$(SRC) +PRESEED=tmp/pre_seed.txt + $(SRCMERGED): $(SRC) $(ROBOT) remove --input $< --select imports --trim false \ merge $(patsubst %, -i %, $(OTHER_SRC)) -o $@ @@ -355,11 +195,15 @@ $(PRESEED): $(SRCMERGED) $(ROBOT) query -f csv -i $< --query ../sparql/terms.sparql $@.tmp &&\ cat $@.tmp | sort | uniq > $@ +{% if 'basic' in project.release_artefacts or 'simple' in project.release_artefacts or project.primary_release == 'basic' or project.primary_release == 'simple' -%} +SIMPLESEED=tmp/simple_seed.txt + $(SIMPLESEED): $(SRCMERGED) $(ONTOLOGYTERMS) $(ROBOT) query -f csv -i $< --query ../sparql/simple-seed.sparql $@.tmp &&\ cat $@.tmp $(ONTOLOGYTERMS) | sort | uniq > $@ &&\ echo "http://www.geneontology.org/formats/oboInOwl#SubsetProperty" >> $@ &&\ echo "http://www.geneontology.org/formats/oboInOwl#SynonymTypeProperty" >> $@ +{% endif -%} {% if project.use_dosdps %} # Ugly hack which should be replaced by the dynamic import revision pipeline @@ -370,37 +214,48 @@ $(IMPORTSEED): $(PRESEED) if [ $(IMP) = true ]; then cat $(PRESEED) | sort | uniq > $@; fi {% endif %} +{% if project.import_group is defined -%} +# -- Generate Import Modules -- +# +# This pattern uses ROBOT to generate an import module # Generate terms.txt for each import. (Assume OBO-style Possibly hacky step?) # Should be able to drop this if robot can just take a big messy list of terms as input. - imports/%_terms_combined.txt: $(IMPORTSEED) imports/%_terms.txt if [ $(IMP) = true ]; then cat $^ | grep -v ^# | sort | uniq > $@; fi -# -- Generate Import Modules -- -# -# This pattern uses ROBOT to generate an import module +{% if 'slme' == project.import_group.module_type %} imports/%_import.owl: mirror/%.owl imports/%_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) extract -i $< -T imports/$*_terms_combined.txt --force true --method BOT \ - query --update ../sparql/inject-subset-declaration.ru \ + if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ + extract -T imports/$*_terms_combined.txt --force true --copy-ontology-annotations true --individuals exclude --method STAR \ + query --update ../sparql/inject-subset-declaration.ru --update ../sparql/postprocess-module.ru \ annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi +{% elif 'minimal' == project.import_group.module_type %} +imports/%_import.owl: mirror/%.owl imports/%_terms_combined.txt + if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ + extract -T imports/$*_terms_combined.txt --force true --copy-ontology-annotations true --method BOT \ + remove --base-iri $(URIBASE)"/$(shell echo $* | tr a-z A-Z)_" --axioms external --preserve-structure false --trim false \ + query --update ../sparql/inject-subset-declaration.ru --update ../sparql/postprocess-module.ru \ + remove --term rdfs:label -T imports/$*_terms_combined.txt --select complement --select "classes individuals annotation-properties" \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi +{% elif 'mireot' == project.import_group.module_type %} +imports/%_import.owl: mirror/%.owl imports/%_terms_combined.txt + if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ + extract -L imports/$*_terms_combined.txt -U imports/$*_terms_combined.txt --force true --copy-ontology-annotations true --method MIREOT \ + query --update ../sparql/inject-subset-declaration.ru --update ../sparql/postprocess-module.ru \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi +{% elif 'custom' == project.import_group.module_type %} +imports/%_import.owl: mirror/%.owl + echo "ERROR: You have configured your default module type to be custom; this behavior needs to be overwritten in {{ project.id }}.Makefile!" && false +{% endif %} .PRECIOUS: imports/%_import.owl - -{% if 'obo' in project.export_formats -%} +{% if 'obo' == project.edit_format %} # convert imports to obo. # this can be useful for spot-checks and diffs. # we set strict mode to false by default. For discussion see https://github.com/owlcs/owlapi/issues/752 imports/%_import.obo: imports/%_import.owl if [ $(IMP) = true ]; then $(ROBOT) convert --check false -i $< -f obo -o $@.tmp.obo && mv $@.tmp.obo $@; fi {% endif -%} -{% if 'ttl' in project.export_formats -%} -imports/%_import.ttl: imports/%_import.owl - if [ $(IMP) = true ]; then $(ROBOT) convert --check false -i $< -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@; fi -{% endif -%} -{% if 'json' in project.export_formats -%} -imports/%_import.json: imports/%_import.owl - if [ $(IMP) = true ]; then $(ROBOT) convert --check false -i $< -f json -o $@.tmp.json && mv $@.tmp.json $@; fi -{% endif -%} - +{% endif %} {% if project.components is not none %} # ---------------------------------------- @@ -484,24 +339,28 @@ release: $(ONT).owl $(ONT).obo # NOTE: these will soon be phased out and replaced by robot-report # run all violation checks -SPARQL_VALIDATION_QUERIES = $(foreach V,$(SPARQL_VALIDATION_CHECKS),$(SPARQLDIR)/$V-violation.sparql) -sparql_test: $(SRC) - $(ROBOT) verify --catalog catalog-v001.xml -i $< --queries $(SPARQL_VALIDATION_QUERIES) -O reports/ +SPARQL_VALIDATION_QUERIES = $(foreach V,$(SPARQL_VALIDATION_CHECKS),$(SPARQLDIR)/$(V)-violation.sparql) +sparql_test: $(SRC) catalog-v001.xml | $(REPORTDIR) +ifneq ($(SPARQL_VALIDATION_QUERIES),) + $(ROBOT) verify --catalog catalog-v001.xml -i $< --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) +endif # ---------------------------------------- # ROBOT report # ---------------------------------------- -reports/%-obo-report.tsv: % +$(REPORTDIR)/%-obo-report.tsv: % | $(REPORTDIR) $(ROBOT) report -i $< $(REPORT_LABEL) $(REPORT_PROFILE_OPTS) --fail-on $(REPORT_FAIL_ON) --print 5 -o $@ # ---------------------------------------- # Sparql queries: Exports # ---------------------------------------- -SPARQL_EXPORTS_ARGS = $(foreach V,$(SPARQL_EXPORTS),-s $(SPARQLDIR)/$V.sparql reports/$V.tsv) +SPARQL_EXPORTS_ARGS = $(foreach V,$(SPARQL_EXPORTS),-s $(SPARQLDIR)/$(V).sparql $(REPORTDIR)/$(V).tsv) # This combines all into one single command all_reports_onestep: $(SRC) +ifneq ($(SPARQL_EXPORTS_ARGS),) $(ROBOT) query -f tsv -i $< $(SPARQL_EXPORTS_ARGS) +endif # ---------------------------------------- @@ -531,12 +390,12 @@ pattern_schema_checks: update_patterns #This command is a workaround for the absence of -N and -i in wget of alpine (the one ODK depend on now). It downloads all patterns specified in external.txt update_patterns: .FORCE if [ $(PAT) = true ]; then rm -f $(PATTERNDIR)/dosdp-patterns/*.yaml.1 || true; fi - if [ $(PAT) = true ]; then wget -i $(PATTERNDIR)/dosdp-patterns/external.txt --backups=1 -P $(PATTERNDIR)/dosdp-patterns; fi + if [ $(PAT) = true ] && [ -s $(PATTERNDIR)/dosdp-patterns/external.txt ]; then wget -i $(PATTERNDIR)/dosdp-patterns/external.txt --backups=1 -P $(PATTERNDIR)/dosdp-patterns; fi if [ $(PAT) = true ]; then rm -f $(PATTERNDIR)/dosdp-patterns/*.yaml.1 || true; fi $(PATTERNDIR)/pattern.owl: pattern_schema_checks update_patterns - if [ $(PAT) = true ]; then $(DOSDPT) prototype --obo-prefixes --template=$(PATTERNDIR)/dosdp-patterns --outfile=$@; fi + if [ $(PAT) = true ]; then $(DOSDPT) prototype --obo-prefixes true --template=$(PATTERNDIR)/dosdp-patterns --outfile=$@; fi individual_patterns_default := $(patsubst %.tsv, $(PATTERNDIR)/data/default/%.ofn, $(notdir $(wildcard $(PATTERNDIR)/data/default/*.tsv))) pattern_term_lists_default := $(patsubst %.tsv, $(PATTERNDIR)/data/default/%.txt, $(notdir $(wildcard $(PATTERNDIR)/data/default/*.tsv))) @@ -587,6 +446,187 @@ $(PATTERNDIR)/data/{{ pipeline.id }}/%.txt: $(PATTERNDIR)/dosdp-patterns/%.yaml {% endfor %} {% endif -%} {% endif %} + +# ---------------------------------------- +# Release artefacts: export formats +# ---------------------------------------- + + +{% for release in project.release_artefacts -%} +{% if 'obo' in project.export_formats -%} +$(ONT)-{{ release }}.obo: $(ONT)-{{ release }}.owl + $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo +{% endif -%} +{% if 'ttl' in project.export_formats -%} +$(ONT)-{{ release }}.ttl: $(ONT)-{{ release }}.owl + $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ +{% endif -%} +{% if 'json' in project.export_formats -%} +$(ONT)-{{ release }}.json: $(ONT)-{{ release }}.owl + $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f json -o $@.tmp.json &&\ + jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json +{% endif -%} +{% endfor -%} + +# We always want a base - even if it is not explicitly configured.. +{% if 'base' not in project.release_artefacts -%} +{% if 'obo' in project.export_formats -%} +$(ONT)-base.obo: $(ONT)-base.owl + $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo +{% endif -%} +{% if 'ttl' in project.export_formats -%} +$(ONT)-base.ttl: $(ONT)-base.owl + $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ +{% endif -%} +{% if 'json' in project.export_formats -%} +$(ONT)-base.json: $(ONT)-base.owl + $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f json -o $@.tmp.json &&\ + jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json +{% endif -%} +{% endif -%} + +# We always want a full release - even if it is not explicitly configured.. +{% if 'full' not in project.release_artefacts -%} +{% if 'obo' in project.export_formats -%} +$(ONT)-full.obo: $(ONT)-full.owl + $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo +{% endif -%} +{% if 'ttl' in project.export_formats -%} +$(ONT)-full.ttl: $(ONT)-full.owl + $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ +{% endif -%} +{% if 'json' in project.export_formats -%} +$(ONT)-full.json: $(ONT)-full.owl + $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f json -o $@.tmp.json &&\ + jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json +{% endif -%} +{% endif -%} + +# ---------------------------------------- +# Release artefacts: main release artefacts +# ---------------------------------------- + +$(ONT).owl: $(ONT)-{{ project.primary_release }}.owl + $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert -o $@.tmp.owl && mv $@.tmp.owl $@ + +{% if 'obo' in project.export_formats -%} +$(ONT).obo: $(ONT).owl + $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo +{% endif -%} +{% if 'ttl' in project.export_formats -%} +$(ONT).ttl: $(ONT)-{{ project.primary_release }}.owl + $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@ +{% endif -%} +{% if 'json' in project.export_formats -%} +$(ONT).json: $(ONT)-{{ project.primary_release }}.owl + $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + convert --check false -f json -o $@.tmp.json &&\ + jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json +{% endif -%} + +# ----------------------------------------------------- +# Release artefacts: variants (base, full, simple, etc) +# ----------------------------------------------------- + +$(ONTOLOGYTERMS): $(SRC) $(OTHER_SRC) + touch $(ONTOLOGYTERMS) && \ + $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/{{ project.id }}_terms.sparql $@ + +{% for format in project.export_formats %} +{% if project.gzip_main -%} +$(ONT).{{ format }}.gz: $(ONT).{{ format }} + gzip -c $< > $@.tmp && mv $@.tmp $@ +{% endif %} +{% endfor %} +{% if 'owl' not in project.export_formats %} +{% if project.gzip_main -%} +$(ONT).owl.gz: $(ONT).owl + gzip -c $< > $@.tmp && mv $@.tmp $@ +{% endif %} +$(ONT).owl: $(ONT)-{{ project.primary_release }}.owl + cp $< $@ +{% endif %} + +# base: OTHER sources of interest, such as definitions owl +$(ONT)-base.owl: $(SRC) $(OTHER_SRC) + $(ROBOT) remove --input $< --select imports --trim false \ + {% if project.use_dosdps or project.components is defined %}merge $(patsubst %, -i %, $(OTHER_SRC)) \ + {% endif %}annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ + --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ + {% if project.release_date -%} --annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ + +# Full: The full artefacts with imports merged, reasoned +$(ONT)-full.owl: $(SRC) $(OTHER_SRC) + $(ROBOT) merge --input $< \ + reason --reasoner {{ project.reasoner }} --equivalent-classes-allowed {{ project.allow_equivalents }} --exclude-tautologies {{ project.exclude_tautologies }} \ + relax \ + reduce -r {{ project.reasoner }} \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ + +{% if 'non-classified' in project.release_artefacts or project.primary_release == 'non-classified' -%} +# foo-non-classified: (edit->imports-merged) +$(ONT)-non-classified.owl: $(SRC) $(OTHER_SRC) + $(ROBOT) merge --input $< \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ +{% endif -%} + +{% if 'simple' in project.release_artefacts or project.primary_release == 'simple' -%} +# foo-simple: (edit->reason,relax,reduce,drop imports, drop every axiom which contains an entity outside the "namespaces of interest") +# drop every axiom: filter --term-file keep_terms.txt --trim true +# remove --select imports --trim false \ + +$(ONT)-simple.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) + $(ROBOT) merge --input $< $(patsubst %, -i %, $(OTHER_SRC)) \ + reason --reasoner {{ project.reasoner }} --equivalent-classes-allowed {{ project.allow_equivalents }} --exclude-tautologies {{ project.exclude_tautologies }} \ + relax \ + remove --axioms equivalent \ + relax \ + filter --term-file $(SIMPLESEED) --select "annotations ontology anonymous self" --trim true --signature true \ + reduce -r {{ project.reasoner }} \ + query --update ../sparql/inject-subset-declaration.ru \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ +{% endif -%} + +{% if 'simple-non-classified' in project.release_artefacts or project.primary_release == 'simple-non-classified' -%} +# foo-simple-non-classified (edit->relax,reduce,drop imports, drop every axiom which contains an entity outside the "namespaces of interest") <- aka the HPO use case, no reason. +# Should this be the non-classified ontology with the drop foreign axiom filter? +# Consider adding remove --term "http://www.geneontology.org/formats/oboInOwl#hasOBONamespace" + +$(ONT)-simple-non-classified.owl: $(SRC) $(OTHER_SRC) $(ONTOLOGYTERMS) + $(ROBOT) remove --input $< --select imports --trim true \ + merge $(patsubst %, -i %, $(OTHER_SRC)) \ + remove --axioms equivalent \ + reduce -r {{ project.reasoner }} \ + filter --select ontology --term-file $(ONTOLOGYTERMS) --trim false \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ +{% endif -%} + +{% if 'basic' in project.release_artefacts or project.primary_release == 'basic' -%} +# foo-basic: A version of -simple containing only relationships using relations on a configurable whitelist (default = BFO:0000050 (?)). See above (David comment) for explanation. +# removes any axioms that contains one of the ops that not in the whitelist file + +$(ONT)-basic.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) $(KEEPRELATIONS) + $(ROBOT) merge --input $< $(patsubst %, -i %, $(OTHER_SRC)) \ + reason --reasoner {{ project.reasoner }} --equivalent-classes-allowed {{ project.allow_equivalents }} --exclude-tautologies {{ project.exclude_tautologies }} \ + relax \ + remove --axioms equivalent \ + remove --axioms disjoint \ + remove --term-file $(KEEPRELATIONS) --select complement --select object-properties --trim true \ + relax \ + filter --term-file $(SIMPLESEED) --select "annotations ontology anonymous self" --trim true --signature true \ + reduce -r {{ project.reasoner }} \ + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) {% if project.release_date -%}--annotation oboInOwl:date "$(OBODATE)" {% endif -%}--output $@.tmp.owl && mv $@.tmp.owl $@ +{% endif -%} + + {%- if project.public_release != 'none' %} # ---------------------------------------- # GitHub release (HIGHLY experimental) @@ -598,14 +638,14 @@ TAGNAME=v$(TODAY) {% if project.public_release == 'github_curl' %} USER=unknown -GH_ASSETS = $(patsubst %, $TMPDIR/gh_release_asset_%.txt, $(RELEASEFILES)) +GH_ASSETS = $(patsubst %, $(TMPDIR)/gh_release_asset_%.txt, $(RELEASEFILES)) GITHUB_REPO={{ project.github_org }}/{{ project.repo }} -$TMPDIR/release_get.txt: $TMPDIR +$(TMPDIR)/release_get.txt: | $(TMPDIR) curl -s https://api.github.com/repos/${GITHUB_REPO}/releases/tags/${TAGNAME} > $@ -$TMPDIR/release_op.txt: $TMPDIR $TMPDIR/release_get.txt - $(eval RELEASEID=$(shell cat $TMPDIR/release_get.txt | jq '.id')) +$(TMPDIR)/release_op.txt: $(TMPDIR)/release_get.txt | $(TMPDIR) + $(eval RELEASEID=$(shell cat $(TMPDIR)/release_get.txt | jq '.id')) if ! [ "$(RELEASEID)" -eq "$(RELEASEID)" ] ; then \ curl -s -X POST \ https://api.github.com/repos/${GITHUB_REPO}/releases \ @@ -617,12 +657,12 @@ $TMPDIR/release_op.txt: $TMPDIR $TMPDIR/release_get.txt cp $< $@; \ fi; -$TMPDIR/gh_release_id.txt: $TMPDIR $TMPDIR/release_op.txt - echo $(shell cat $TMPDIR/release_op.txt | jq '.id') > $@; +$(TMPDIR)/gh_release_id.txt: $(TMPDIR)/release_op.txt | $(TMPDIR) + echo $(shell cat $(TMPDIR)/release_op.txt | jq '.id') > $@; -$TMPDIR/gh_release_asset_%.txt: $TMPDIR $TMPDIR/gh_release_id.txt % +$(TMPDIR)/gh_release_asset_%.txt: $(TMPDIR)/gh_release_id.txt % | $(TMPDIR) curl -X POST \ - "https://uploads.github.com/repos/${GITHUB_REPO}/releases/$(shell cat $TMPDIR/gh_release_id.txt)/assets?name=$*&label=$*" \ + "https://uploads.github.com/repos/${GITHUB_REPO}/releases/$(shell cat $(TMPDIR)/gh_release_id.txt)/assets?name=$*&label=$*" \ --data-binary @$* \ -u ${USER} \ -H 'Accept: */*' \ @@ -630,7 +670,7 @@ $TMPDIR/gh_release_asset_%.txt: $TMPDIR $TMPDIR/gh_release_id.txt % -H 'Connection: keep-alive' \ -H 'Content-Type: application/octet-stream' > $@ -public_release: $TMPDIR $TMPDIR/gh_release_id.txt $(GH_ASSETS) +public_release: $(TMPDIR)/gh_release_id.txt $(GH_ASSETS) | $(TMPDIR) {% endif %} {%- if project.public_release == 'github_python' %} GITHUB_RELEASE_PYTHON=make-release-assets.py @@ -640,6 +680,9 @@ public_release: $(GITHUB_RELEASE_PYTHON) --release $(TAGNAME) $(RELEASEFILES) {%- endif %} +validate_idranges: + amm ../scripts/validate_id_ranges.sc {{ project.id }}-idranges.owl + update_repo: sh ../scripts/update_repo.sh diff --git a/template/src/ontology/README-editors.md.jinja2 b/template/src/ontology/README-editors.md.jinja2 index 11cb1bf4..6190abd9 100644 --- a/template/src/ontology/README-editors.md.jinja2 +++ b/template/src/ontology/README-editors.md.jinja2 @@ -61,7 +61,7 @@ All import modules are in the [imports/](imports/) folder. There are two ways to include new classes in an import module 1. Reference an external ontology class in the edit ontology. In Protege: "add new entity", then paste in the PURL - 2. Add to the imports/{{ project.id }}_terms.txt file + 2. Add to the imports/ont_terms.txt file, for example imports/go_terms.txt After doing this, you can run @@ -69,9 +69,10 @@ After doing this, you can run to regenerate imports. -Note: the {{ project.id }}_terms.txt file may include 'starter' classes seeded from +Note: the ont_terms.txt file may include 'starter' classes seeded from the ontology starter kit. It is safe to remove these. +{% if project.use_dosdps -%} ## Design patterns You can automate (class) term generation from design patterns by placing DOSDP @@ -105,7 +106,7 @@ To compile design patterns to terms run: This generates a file (`src/patterns/definitions.owl`). You then need to add an import statement to the editor's file to import the definitions file. - +{% endif -%} ## Release Manager notes @@ -125,11 +126,11 @@ first type to make sure you are on master cd src/ontology - ./build.sh + sh run.sh make all If this looks good type: - ./prepare_release.sh + sh run.sh make prepare_release This generates derived files such as {{ project.id }}.owl and {{ project.id }}.obo and places them in the top level (../..). diff --git a/template/src/ontology/patterns.sh b/template/src/ontology/patterns.sh deleted file mode 100755 index e2fb401b..00000000 --- a/template/src/ontology/patterns.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -./run.sh make patterns diff --git a/template/src/ontology/prepare_release.sh b/template/src/ontology/prepare_release.sh deleted file mode 100755 index 6aefd945..00000000 --- a/template/src/ontology/prepare_release.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -./run.sh make prepare_release diff --git a/template/src/ontology/reports/README.md b/template/src/ontology/reports/README.md deleted file mode 100644 index 933492c7..00000000 --- a/template/src/ontology/reports/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Reports folder - -Files are added to this during the release process diff --git a/template/src/ontology/run.bat b/template/src/ontology/run.bat deleted file mode 100644 index 717ca499..00000000 --- a/template/src/ontology/run.bat +++ /dev/null @@ -1 +0,0 @@ -docker run -v %cd%\..\..\:/work -w /work/src/ontology --rm -ti obolibrary/odkfull %* diff --git a/template/src/ontology/run.bat.jinja2 b/template/src/ontology/run.bat.jinja2 new file mode 100644 index 00000000..0e78bf0b --- /dev/null +++ b/template/src/ontology/run.bat.jinja2 @@ -0,0 +1 @@ +docker run -v %cd%\..\..\:/work -w /work/src/ontology {% if project.robot_java_args is defined %}-e ROBOT_JAVA_ARGS='{{ project.robot_java_args }}' -e JAVA_OPTS='{{ project.robot_java_args }}'{% endif %} {% if project.use_external_date is sameas True %}-e TODAY=`date +%Y-%m-%d` {% endif %}--rm -ti obolibrary/odkfull %* diff --git a/template/src/ontology/test.sh b/template/src/ontology/test.sh deleted file mode 100755 index dfd54a7e..00000000 --- a/template/src/ontology/test.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -./run.sh make test diff --git a/template/src/patterns/README.md b/template/src/patterns/README.md deleted file mode 100644 index e69de29b..00000000 diff --git a/template/src/patterns/data/default/example.tsv b/template/src/patterns/data/default/example.tsv deleted file mode 100644 index 13bbc779..00000000 --- a/template/src/patterns/data/default/example.tsv +++ /dev/null @@ -1 +0,0 @@ -defined_class example diff --git a/template/src/patterns/definitions.owl.jinja2 b/template/src/patterns/definitions.owl.jinja2 deleted file mode 100644 index 5cb63233..00000000 --- a/template/src/patterns/definitions.owl.jinja2 +++ /dev/null @@ -1,11 +0,0 @@ -Prefix(:=) -Prefix(owl:=) -Prefix(rdf:=) -Prefix(xml:=) -Prefix(xsd:=) -Prefix(rdfs:=) - - -Ontology( - -) \ No newline at end of file diff --git a/template/src/patterns/dosdp-patterns/example.yaml b/template/src/patterns/dosdp-patterns/example.yaml deleted file mode 100644 index ed5c0b31..00000000 --- a/template/src/patterns/dosdp-patterns/example.yaml +++ /dev/null @@ -1,22 +0,0 @@ -pattern_name: example -pattern_iri: http://purl.obolibrary.org/obo/odk/example.yaml -description: "This is a minimal example pattern." - -classes: - example: owl:Thing - -relations: - part_of: BFO:0000050 - -vars: - example: "'example'" - -name: - text: "Part of %s" - vars: - - example - -equivalentTo: - text: "'part_of' some %s" - vars: - - example \ No newline at end of file diff --git a/template/src/patterns/dosdp-patterns/external.txt b/template/src/patterns/dosdp-patterns/external.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/template/src/patterns/pattern.owl.jinja2 b/template/src/patterns/pattern.owl.jinja2 deleted file mode 100644 index 47c39333..00000000 --- a/template/src/patterns/pattern.owl.jinja2 +++ /dev/null @@ -1,11 +0,0 @@ -Prefix(:=) -Prefix(owl:=) -Prefix(rdf:=) -Prefix(xml:=) -Prefix(xsd:=) -Prefix(rdfs:=) - - -Ontology( - -) diff --git a/template/src/scripts/validate_id_ranges.sc b/template/src/scripts/validate_id_ranges.sc new file mode 100644 index 00000000..1785e8df --- /dev/null +++ b/template/src/scripts/validate_id_ranges.sc @@ -0,0 +1,75 @@ +import $ivy.`net.sourceforge.owlapi:owlapi-distribution:4.5.16` +import $ivy.`com.outr::scribe-slf4j:2.7.12` +import org.semanticweb.owlapi.apibinding.OWLManager +import org.semanticweb.owlapi.model._ +import org.semanticweb.owlapi.vocab.OWLFacet +import java.io.File +import scala.collection +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +@main +def main(id_range_file: os.Path) = { + val o = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(id_range_file.toIO)) + val allMyFacets = mutable.ListBuffer.empty[MyFacet] + for (dt <- o.getDatatypesInSignature().asScala) { + val defs = o.getAxioms(dt) + for (ax <- defs.asScala) { + val range = ax.getDataRange() + val f = new MyFacet() + f.id = dt.toString() + range.accept(new OWLDataRangeVisitor() { + override + def visit(owlDatatype: OWLDatatype) = () + override + def visit(owlDataOneOf: OWLDataOneOf) = () + override + def visit(owlDataComplementOf: OWLDataComplementOf) = () + override + def visit(owlDataIntersectionOf: OWLDataIntersectionOf) = () + override + def visit(owlDataUnionOf: OWLDataUnionOf) = () + override + def visit(owlDatatypeRestriction: OWLDatatypeRestriction) = { + for (fr <- owlDatatypeRestriction.getFacetRestrictions().asScala) { + var i = fr.getFacetValue().parseInteger() + if(fr.getFacet().equals(OWLFacet.MIN_INCLUSIVE)) { + f.min = i + } else if(fr.getFacet().equals(OWLFacet.MAX_INCLUSIVE)) { + f.max = i + } else if(fr.getFacet().equals(OWLFacet.MIN_EXCLUSIVE)) { + i += 1 + f.min = i + } else if(fr.getFacet().equals(OWLFacet.MAX_EXCLUSIVE)) { + i -= 1 + f.max = i + } else { + log("Unknown range restriction: "+fr) + } + } + } + }) + log("Testing range: "+f) + testFacetViolation(f,allMyFacets) + allMyFacets.append(f) + } + } +} +def testFacetViolation(f: MyFacet , allMyFacets: collection.Seq[MyFacet]) = { + for (f_p <- allMyFacets) { + if (((f.min <= f_p.max) && (f_p.min <= f.max))) { + throw new IllegalStateException(f + " overlaps with " + f_p + "!") + } + } +} +def log(o: Object) = { + println(o.toString()) +} +class MyFacet { + var min: Int = _ + var max: Int = _ + var id: String = _ + override + def toString(): String = { + return "Facet{" + id + "}[min:" + min + " max:" + max + "]" + } +} \ No newline at end of file diff --git a/template/src/sparql/postprocess-module.ru b/template/src/sparql/postprocess-module.ru new file mode 100644 index 00000000..0767af1e --- /dev/null +++ b/template/src/sparql/postprocess-module.ru @@ -0,0 +1,16 @@ +PREFIX rdf: +PREFIX rdfs: +PREFIX dc: +PREFIX owl: + + +DELETE { + ?ontology ?ontology_annotation_property ?ontology_annotation_value . +} + +WHERE { + ?ontology rdf:type owl:Ontology . + ?ontology ?ontology_annotation_property ?ontology_annotation_value . + FILTER(?ontology_annotation_property != dc:source && ?ontology_annotation_property != rdf:type) + +} \ No newline at end of file diff --git a/template/src/sparql/preprocess-module.ru b/template/src/sparql/preprocess-module.ru new file mode 100644 index 00000000..99120a0c --- /dev/null +++ b/template/src/sparql/preprocess-module.ru @@ -0,0 +1,22 @@ +PREFIX rdf: +PREFIX rdfs: +PREFIX dc: +PREFIX owl: + + +#DELETE { +# ?ontology ?ontology_annotation_property ?ontology_annotation_value . +#} + +INSERT { + ?ontology dc:source ?version_iri . +} + +WHERE { + ?ontology rdf:type owl:Ontology ; + owl:versionIRI ?version_iri . + #OPTIONAL { + # ?ontology ?ontology_annotation_property ?ontology_annotation_value . + #} + +} \ No newline at end of file diff --git a/template/src/sparql/trailing-whitespace-violation.sparql b/template/src/sparql/trailing-whitespace-violation.sparql deleted file mode 100644 index 1d69472d..00000000 --- a/template/src/sparql/trailing-whitespace-violation.sparql +++ /dev/null @@ -1,11 +0,0 @@ -# home: hp/sparql/trailing-whitespace-violation.sparql -prefix owl: -prefix rdfs: - -SELECT ?c ?p ?x WHERE -{ - ?c ?p ?x . - - FILTER( regex(STR(?x), "^ ") || regex(STR(?x), " $") ) - FILTER( ?p != owl:annotatedTarget ) -} diff --git a/template/src/sparql/xref-syntax-violation.sparql b/template/src/sparql/xref-syntax-violation.sparql deleted file mode 100644 index 98a61720..00000000 --- a/template/src/sparql/xref-syntax-violation.sparql +++ /dev/null @@ -1,13 +0,0 @@ -# home: hp -prefix hasDbXref: -prefix oio: -prefix owl: -prefix rdfs: - -SELECT ?c ?x WHERE -{ - ?c hasDbXref: ?x . - - FILTER( regex(STR(?x), " ") || regex(STR(?x), ";") || STR(?x) = "" ) - -} diff --git a/tests/test-ci.yaml b/tests/test-ci.yaml new file mode 100644 index 00000000..e46e146a --- /dev/null +++ b/tests/test-ci.yaml @@ -0,0 +1,11 @@ +id: tci +title: "Test CI" +github_org: INCATools +repo: ontology-development-kit +git_main_branch: master +ci: + - github_actions +robot_report: + custom_sparql_checks : [] + custom_sparql_exports : + - basic-report \ No newline at end of file diff --git a/tests/test-components.yaml b/tests/test-components.yaml index ff886449..1d7fb39f 100644 --- a/tests/test-components.yaml +++ b/tests/test-components.yaml @@ -1,15 +1,12 @@ -id: components +id: tco title: "Test Components Ontology" github_org: obophenotype repo: components report_fail_on: ERROR use_dosdps: TRUE dosdp_tools_options: "--obo-prefixes=true --restrict-axioms-to=logical" -namespaces: - - http://purl.obolibrary.org/obo/ODK_ release_artefacts: - base - - simple-non-classified - full - simple primary_release: full @@ -21,7 +18,6 @@ import_group: products: - id: pato - id: ro - - id: iao - id: bfo components: products: diff --git a/tests/test-import-minimal.yaml b/tests/test-import-minimal.yaml new file mode 100644 index 00000000..c02a803b --- /dev/null +++ b/tests/test-import-minimal.yaml @@ -0,0 +1,32 @@ +id: rrc +title: "Test ROBOT report" +github_org: INCATools +repo: ontology-development-kit +edit_format: obo +export_formats: + - owl + - obo + - json +release_artefacts: + - base + - simple + - full +primary_release: full +import_group: + products: + - id: ro + - id: iao + - id: pco +components: + products: + - filename: pato_ext.owl +robot_java_args: '-Xmx8G' +robot_report: + fail_on : ERROR + use_labels : False + custom_profile : True + report_on : + - edit + custom_sparql_checks : [] + custom_sparql_exports : + - basic-report \ No newline at end of file diff --git a/tests/test-minimal.yaml b/tests/test-minimal.yaml new file mode 100644 index 00000000..062d3eb9 --- /dev/null +++ b/tests/test-minimal.yaml @@ -0,0 +1 @@ +id: mini diff --git a/tests/test-module-minimal.yaml b/tests/test-module-minimal.yaml new file mode 100644 index 00000000..64a6b078 --- /dev/null +++ b/tests/test-module-minimal.yaml @@ -0,0 +1,15 @@ +id: modmini +title: "Test ROBOT extract minimal" +github_org: INCATools +repo: ontology-development-kit +edit_format: obo +export_formats: + - owl + - obo + - json +import_group: + products: + - id: ro + - id: pato + module_type: minimal + diff --git a/tests/test-module-mireot.yaml b/tests/test-module-mireot.yaml new file mode 100644 index 00000000..84881046 --- /dev/null +++ b/tests/test-module-mireot.yaml @@ -0,0 +1,15 @@ +id: modmireot +title: "Test ROBOT extract minimal" +github_org: INCATools +repo: ontology-development-kit +edit_format: obo +export_formats: + - owl + - obo + - json +import_group: + products: + - id: ro + - id: pato + module_type: mireot + diff --git a/tests/test-module-star.yaml b/tests/test-module-star.yaml new file mode 100644 index 00000000..3ccacf77 --- /dev/null +++ b/tests/test-module-star.yaml @@ -0,0 +1,19 @@ +id: modstar +title: "Test ROBOT extract star" +github_org: INCATools +repo: ontology-development-kit +edit_format: obo +export_formats: + - owl + - obo + - json +import_group: + products: + - id: ro + - id: pato + module_type: slme + module_type_slme: STAR + slme_individuals: exclude + release_imports: False + create_obo_metadata: False + diff --git a/tests/test-release.yaml b/tests/test-release.yaml index 65155f8e..5fa768aa 100644 --- a/tests/test-release.yaml +++ b/tests/test-release.yaml @@ -15,6 +15,7 @@ release_artefacts: - full primary_release: simple import_group: + release_imports: TRUE products: - id: ro - id: pato diff --git a/tests/test-robot-report.yaml b/tests/test-robot-report.yaml new file mode 100644 index 00000000..cdbc384c --- /dev/null +++ b/tests/test-robot-report.yaml @@ -0,0 +1,27 @@ +id: rrc +title: "Test ROBOT report" +github_org: INCATools +repo: ontology-development-kit +edit_format: owl +export_formats: + - owl + - obo + - json +release_artefacts: + - base + - simple + - full +import_group: + products: + - id: ro + - id: iao +robot_report: + release_reports: True + fail_on : ERROR + use_labels : False + custom_profile : True + report_on : + - edit + custom_sparql_checks : [] + custom_sparql_exports : + - basic-report \ No newline at end of file