From e26076673665b43612b040059a4da5c8592fe3f1 Mon Sep 17 00:00:00 2001 From: Nico Matentzoglu Date: Thu, 18 Apr 2024 15:57:30 +0300 Subject: [PATCH] ODK 1.5 migration --- .github/workflows/qc.yml | 6 +- src/ontology/Makefile | 306 ++++++++++++------ src/ontology/run.sh | 83 ++++- src/ontology/xpo-odk.yaml | 1 + src/scripts/run-command.sh | 4 + src/scripts/update_repo.sh | 5 +- src/sparql/dc-properties-violation.sparql | 11 + src/sparql/edges.sparql | 1 - src/sparql/inject-subset-declaration.ru | 2 +- src/sparql/inject-synonymtype-declaration.ru | 2 +- src/sparql/iri-range-violation.sparql | 2 +- src/sparql/label-with-iri-violation.sparql | 2 +- .../multiple-replaced_by-violation.sparql | 12 + .../owldef-self-reference-violation.sparql | 2 +- src/sparql/simple-seed.sparql | 1 - src/sparql/subsets-labeled.sparql | 1 - src/sparql/xpo_terms.sparql | 2 +- 17 files changed, 334 insertions(+), 109 deletions(-) mode change 100644 => 100755 src/ontology/run.sh create mode 100755 src/scripts/run-command.sh create mode 100644 src/sparql/dc-properties-violation.sparql create mode 100644 src/sparql/multiple-replaced_by-violation.sparql diff --git a/.github/workflows/qc.yml b/.github/workflows/qc.yml index c6dced87..1c9e286f 100644 --- a/.github/workflows/qc.yml +++ b/.github/workflows/qc.yml @@ -4,7 +4,7 @@ name: CI # Controls when the action will run. on: - # Triggers the workflow on push or pull request events but only for the main branch + # Triggers the workflow on push or pull request events but only for the master branch push: branches: [ master ] pull_request: @@ -19,12 +19,12 @@ jobs: ontology_qc: # The type of runner that the job will run on runs-on: ubuntu-latest - container: obolibrary/odkfull:v1.3.0 + container: obolibrary/odkfull:v1.5 # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run ontology QC checks env: diff --git a/src/ontology/Makefile b/src/ontology/Makefile index f7bf5cf3..4ab06029 100644 --- a/src/ontology/Makefile +++ b/src/ontology/Makefile @@ -1,7 +1,7 @@ # ---------------------------------------- # Makefile for xpo # Generated using ontology-development-kit -# ODK Version: v1.3.1 +# ODK Version: v1.5 # ---------------------------------------- # IMPORTANT: DO NOT EDIT THIS FILE. To override default make goals, use xpo.Makefile instead @@ -9,17 +9,22 @@ # ---------------------------------------- # More information: https://github.com/INCATools/ontology-development-kit/ +# Fingerprint of the configuration file when this Makefile was last generated +CONFIG_HASH= f7826ff5a16f09152b9311cccc6e161ac99e09b7bbe4227e4b8dcf0d2310d0e6 + # ---------------------------------------- # Standard Constants # ---------------------------------------- # these can be overwritten on the command line +OBOBASE= http://purl.obolibrary.org/obo URIBASE= http://purl.obolibrary.org/obo ONT= xpo -ONTBASE= $(URIBASE)/$(ONT) +ONTBASE= http://purl.obolibrary.org/obo/xpo EDIT_FORMAT= owl SRC = $(ONT)-edit.$(EDIT_FORMAT) +MAKE_FAST= $(MAKE) IMP=false PAT=false COMP=false MIR=false CATALOG= catalog-v001.xml ROBOT= robot --catalog $(CATALOG) @@ -32,32 +37,39 @@ MIRRORDIR= mirror IMPORTDIR= imports SUBSETDIR= subsets SCRIPTSDIR= ../scripts +UPDATEREPODIR= target SPARQLDIR = ../sparql COMPONENTSDIR = components REPORT_FAIL_ON = None REPORT_LABEL = -l true REPORT_PROFILE_OPTS = OBO_FORMAT_OPTIONS = -SPARQL_VALIDATION_CHECKS = owldef-self-reference iri-range label-with-iri -SPARQL_EXPORTS = basic-report class-count-by-prefix edges xrefs obsoletes synonyms -ODK_VERSION_MAKEFILE = v1.3.1 +SPARQL_VALIDATION_CHECKS = owldef-self-reference iri-range label-with-iri multiple-replaced_by dc-properties +SPARQL_EXPORTS = basic-report edges xrefs obsoletes synonyms +ODK_VERSION_MAKEFILE = v1.5 TODAY ?= $(shell date +%Y-%m-%d) OBODATE ?= $(shell date +'%d:%m:%Y %H:%M') VERSION= $(TODAY) ANNOTATE_ONTOLOGY_VERSION = annotate -V $(ONTBASE)/releases/$(VERSION)/$@ --annotation owl:versionInfo $(VERSION) +ANNOTATE_CONVERT_FILE = annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) convert -f ofn --output $@.tmp.owl && mv $@.tmp.owl $@ OTHER_SRC = $(PATTERNDIR)/definitions.owl ONTOLOGYTERMS = $(TMPDIR)/ontologyterms.txt +EDIT_PREPROCESSED = $(TMPDIR)/$(ONT)-preprocess.owl PATTERNDIR= ../patterns -PATTERN_TESTER= dosdp validate -i -DOSDPT= dosdp-tools -PATTERN_RELEASE_FILES= $(PATTERNDIR)/definitions.owl $(PATTERNDIR)/pattern.owl - +PATTERN_TESTER= dosdp validate -i +DOSDPT= dosdp-tools +PATTERN_RELEASE_FILES= $(PATTERNDIR)/definitions.owl $(PATTERNDIR)/pattern.owl FORMATS = $(sort owl obo json owl) FORMATS_INCL_TSV = $(sort $(FORMATS) tsv) -RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full $(ONT)-simple $(ONT)-base $(ONT)-full) +RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full $(ONT)-simple ) + +ifeq ($(ODK_DEBUG),yes) +ODK_DEBUG_FILE = debug.log +SHELL = $(SCRIPTSDIR)/run-command.sh +endif # ---------------------------------------- # Top-level targets @@ -69,23 +81,72 @@ RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full $(ONT)-simple $(ONT)-base $(O all: all_odk .PHONY: all_odk -all_odk: odkversion test all_assets +all_odk: odkversion config_check test custom_reports all_assets .PHONY: test -test: odkversion dosdp_validation sparql_test robot_reports $(REPORTDIR)/validate_profile_owl2dl_$(ONT).owl.txt - $(ROBOT) reason --input $(SRC) --reasoner ELK --equivalent-classes-allowed asserted-only \ - --exclude-tautologies structural --output test.owl && rm test.owl &&\ - echo "Success" +test: odkversion dosdp_validation reason_test sparql_test robot_reports $(REPORTDIR)/validate_profile_owl2dl_$(ONT).owl.txt + echo "Finished running all tests successfully." + +.PHONY: test +test_fast: + $(MAKE_FAST) test + +.PHONY: release_diff +release_diff: $(REPORTDIR)/release-diff.md + +.PHONY: reason_test +reason_test: $(EDIT_PREPROCESSED) + $(ROBOT) reason --input $< --reasoner ELK --equivalent-classes-allowed asserted-only \ + --exclude-tautologies structural --output test.owl && rm test.owl .PHONY: odkversion odkversion: - echo "ODK Makefile version: $(ODK_VERSION_MAKEFILE) (this is the version of the ODK with which this Makefile was generated, \ - not the version of the ODK you are running)" &&\ - echo "ROBOT version (ODK): " && $(ROBOT) --version + @echo "ODK Makefile $(ODK_VERSION_MAKEFILE)" + @odk-info --tools +.PHONY: config_check +config_check: + @if [ "$$(sha256sum $(ONT)-odk.yaml | cut -c1-64)" = "$(CONFIG_HASH)" ]; then \ + echo "Repository is up-to-date." ; else \ + echo "Your ODK configuration has changed since this Makefile was generated. You may need to run 'make update_repo'." ; fi + $(TMPDIR) $(REPORTDIR) $(MIRRORDIR) $(IMPORTDIR) $(COMPONENTSDIR) $(SUBSETDIR): mkdir -p $@ +# ---------------------------------------- +# ODK-managed ROBOT plugins +# ---------------------------------------- + +# Make sure ROBOT knows where to find plugins +export ROBOT_PLUGINS_DIRECTORY=$(TMPDIR)/plugins + +# Override this rule in xpo.Makefile to install custom plugins +.PHONY: custom_robot_plugins +custom_robot_plugins: + + +.PHONY: extra_robot_plugins +extra_robot_plugins: + + +# Install all ROBOT plugins to the runtime plugins directory +.PHONY: all_robot_plugins +all_robot_plugins: $(foreach plugin,$(notdir $(wildcard /tools/robot-plugins/*.jar)),$(ROBOT_PLUGINS_DIRECTORY)/$(plugin)) \ + $(foreach plugin,$(notdir $(wildcard ../../plugins/*.jar)),$(ROBOT_PLUGINS_DIRECTORY)/$(plugin)) \ + custom_robot_plugins extra_robot_plugins \ + +# Default rule to install plugins +$(ROBOT_PLUGINS_DIRECTORY)/%.jar: + @mkdir -p $(ROBOT_PLUGINS_DIRECTORY) + @if [ -f ../../plugins/$*.jar ]; then \ + ln ../../plugins/$*.jar $@ ; \ + elif [ -f /tools/robot-plugins/$*.jar ]; then \ + cp /tools/robot-plugins/$*.jar $@ ; \ + fi + +# Specific rules for supplementary plugins defined in configuration + + # ---------------------------------------- # Release assets # ---------------------------------------- @@ -126,6 +187,19 @@ SUBSET_FILES = $(foreach n,$(SUBSET_ROOTS), $(foreach f,$(FORMATS_INCL_TSV), $(n .PHONY: all_subsets all_subsets: $(SUBSET_FILES) +# ---------------------------------------- +# Mapping assets +# ---------------------------------------- + + +MAPPINGS = + +MAPPING_FILES = $(patsubst %, $(MAPPINGDIR)/%.sssom.tsv, $(MAPPINGS)) + +.PHONY: all_mappings +all_mappings: $(MAPPING_FILES) + + # ---------------------------------------- # QC Reports & Utilities # ---------------------------------------- @@ -144,7 +218,7 @@ all_reports: custom_reports robot_reports # ROBOT OWL Profile checking # ---------------------------------------- -# The conversion to functional syntax is necessary to avoid undeclared entity violations. +# The merge step is necessary to avoid undeclared entity violations. $(REPORTDIR)/validate_profile_owl2dl_%.txt: % | $(REPORTDIR) $(TMPDIR) $(ROBOT) merge -i $< convert -f ofn -o $(TMPDIR)/validate.ofn $(ROBOT) validate-profile --profile DL -i $(TMPDIR)/validate.ofn -o $@ || { cat $@ && exit 1; } @@ -162,9 +236,10 @@ validate_profile_%: $(REPORTDIR)/validate_profile_owl2dl_%.txt SPARQL_VALIDATION_QUERIES = $(foreach V,$(SPARQL_VALIDATION_CHECKS),$(SPARQLDIR)/$(V)-violation.sparql) -sparql_test: $(SRC) catalog-v001.xml | $(REPORTDIR) +sparql_test: $(EDIT_PREPROCESSED) | $(REPORTDIR) ifneq ($(SPARQL_VALIDATION_QUERIES),) - $(ROBOT) verify --catalog catalog-v001.xml -i $< --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) + + $(ROBOT) verify -i $(EDIT_PREPROCESSED) --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) endif # ---------------------------------------- @@ -177,6 +252,10 @@ $(REPORTDIR)/$(SRC)-obo-report.tsv: $(SRCMERGED) | $(REPORTDIR) $(REPORTDIR)/%-obo-report.tsv: % | $(REPORTDIR) $(ROBOT) report -i $< $(REPORT_LABEL) $(REPORT_PROFILE_OPTS) --fail-on $(REPORT_FAIL_ON) --base-iri http://purl.obolibrary.org/obo/XPO_ --print 5 -o $@ +check_for_robot_updates: + echo "You are not using a custom profile, so you are getting the joy of the latest ROBOT report!" + + # ---------------------------------------- # Release assets # ---------------------------------------- @@ -186,25 +265,32 @@ ASSETS = \ $(MAIN_FILES) \ $(PATTERN_RELEASE_FILES) \ $(REPORT_FILES) \ - $(SUBSET_FILES) + $(SUBSET_FILES) \ + $(MAPPING_FILES) RELEASE_ASSETS = \ $(MAIN_FILES) \ $(SUBSET_FILES) .PHONY: all_assets -all_assets: $(ASSETS) +all_assets: $(ASSETS) check_rdfxml_assets .PHONY: show_assets show_assets: echo $(ASSETS) du -sh $(ASSETS) +check_rdfxml_%: % + @check-rdfxml $< + +.PHONY: check_rdfxml_assets +check_rdfxml_assets: $(foreach product,$(MAIN_PRODUCTS),check_rdfxml_$(product).owl) + # ---------------------------------------- # Release Management # ---------------------------------------- -CLEANFILES=$(MAIN_FILES) $(SRCMERGED) +CLEANFILES=$(MAIN_FILES) $(SRCMERGED) $(EDIT_PREPROCESSED) # This should be executed by the release manager whenever time comes to make a release. # It will ensure that all assets/files are fresh, and will copy to release folder @@ -227,6 +313,14 @@ prepare_initial_release: all_assets prepare_release_fast: $(MAKE) prepare_release IMP=false PAT=false MIR=false COMP=false +CURRENT_RELEASE=$(ONTBASE).owl + +$(TMPDIR)/current-release.owl: + wget $(CURRENT_RELEASE) -O $@ + +$(REPORTDIR)/release-diff.md: $(ONT).owl $(TMPDIR)/current-release.owl + $(ROBOT) diff --labels true --left $(TMPDIR)/current-release.owl --right $(ONT).owl -f markdown -o $@ + # ------------------------ # Imports: Seeding system # ------------------------ @@ -235,10 +329,13 @@ prepare_release_fast: IMPORTSEED=$(TMPDIR)/seed.txt PRESEED=$(TMPDIR)/pre_seed.txt -$(SRCMERGED): $(SRC) $(OTHER_SRC) +$(SRCMERGED): $(EDIT_PREPROCESSED) $(OTHER_SRC) $(ROBOT) remove --input $< --select imports --trim false \ merge $(patsubst %, -i %, $(OTHER_SRC)) -o $@ +$(EDIT_PREPROCESSED): $(SRC) + $(ROBOT) convert --input $< --format ofn --output $@ + $(PRESEED): $(SRCMERGED) $(ROBOT) query -f csv -i $< --query ../sparql/terms.sparql $@.tmp &&\ cat $@.tmp | sort | uniq > $@ @@ -278,7 +375,7 @@ $(IMPORTDIR)/%_import.owl: $(MIRRORDIR)/%.owl $(IMPORTDIR)/%_terms_combined.txt if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/$*_terms_combined.txt --force true --copy-ontology-annotations true --individuals include --method BOT \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi + $(ANNOTATE_CONVERT_FILE); fi .PRECIOUS: $(IMPORTDIR)/%_import.owl @@ -287,7 +384,7 @@ $(IMPORTDIR)/%_import.owl: $(MIRRORDIR)/%.owl $(IMPORTDIR)/%_terms_combined.txt $(IMPORTDIR)/chebi_import.owl: $(MIRRORDIR)/chebi.owl $(IMPORTDIR)/chebi_terms_combined.txt if [ $(IMP) = true ] && [ $(IMP_LARGE) = true ]; then $(ROBOT) extract -i $< -T $(IMPORTDIR)/chebi_terms_combined.txt --force true --copy-ontology-annotations true --individuals include --method BOT \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi + $(ANNOTATE_CONVERT_FILE); fi .PHONY: refresh-imports @@ -318,84 +415,86 @@ IMP=true # Global parameter to bypass import generation MIR=true # Global parameter to bypass mirror generation IMP_LARGE=true # Global parameter to bypass handling of large imports +ifeq ($(strip $(MIR)),true) ## ONTOLOGY: iao .PHONY: mirror-iao .PRECIOUS: $(MIRRORDIR)/iao.owl mirror-iao: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/iao.owl --create-dirs -o $(MIRRORDIR)/iao.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/iao.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/iao.owl --create-dirs -o $(TMPDIR)/iao-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/iao-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: go .PHONY: mirror-go .PRECIOUS: $(MIRRORDIR)/go.owl mirror-go: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/go.owl --create-dirs -o $(MIRRORDIR)/go.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/go.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/go.owl --create-dirs -o $(TMPDIR)/go-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/go-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: ro .PHONY: mirror-ro .PRECIOUS: $(MIRRORDIR)/ro.owl mirror-ro: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/ro.owl --create-dirs -o $(MIRRORDIR)/ro.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/ro.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/ro.owl --create-dirs -o $(TMPDIR)/ro-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/ro-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: chebi .PHONY: mirror-chebi .PRECIOUS: $(MIRRORDIR)/chebi.owl +ifeq ($(strip $(IMP_LARGE)),true) mirror-chebi: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ] && [ $(IMP_LARGE) = true ]; then curl -L $(URIBASE)/chebi.owl.gz --create-dirs -o $(MIRRORDIR)/chebi.owl.gz --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/chebi.owl.gz -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/chebi.owl.gz --create-dirs -o $(MIRRORDIR)/chebi.owl.gz --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(MIRRORDIR)/chebi.owl.gz -o $(TMPDIR)/$@.owl +else +mirror-chebi: + @echo "Not refreshing chebi because refreshing large imports is disabled (IMP_LARGE=$(IMP_LARGE))." +endif ## ONTOLOGY: pato .PHONY: mirror-pato .PRECIOUS: $(MIRRORDIR)/pato.owl mirror-pato: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/pato.owl --create-dirs -o $(MIRRORDIR)/pato.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/pato.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/pato.owl --create-dirs -o $(TMPDIR)/pato-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/pato-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: bfo .PHONY: mirror-bfo .PRECIOUS: $(MIRRORDIR)/bfo.owl mirror-bfo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/bfo.owl --create-dirs -o $(MIRRORDIR)/bfo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/bfo.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/bfo.owl --create-dirs -o $(TMPDIR)/bfo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/bfo-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: xao .PHONY: mirror-xao .PRECIOUS: $(MIRRORDIR)/xao.owl mirror-xao: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/xao.owl --create-dirs -o $(MIRRORDIR)/xao.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/xao.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/xao.owl --create-dirs -o $(TMPDIR)/xao-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/xao-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: nbo .PHONY: mirror-nbo .PRECIOUS: $(MIRRORDIR)/nbo.owl mirror-nbo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/nbo.owl --create-dirs -o $(MIRRORDIR)/nbo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/nbo.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/nbo.owl --create-dirs -o $(TMPDIR)/nbo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/nbo-download.owl -o $(TMPDIR)/$@.owl $(MIRRORDIR)/%.owl: mirror-% | $(MIRRORDIR) - if [ $(IMP) = true ] && [ $(MIR) = true ] && [ -f $(TMPDIR)/mirror-$*.owl ]; then if cmp -s $(TMPDIR)/mirror-$*.owl $@ ; then echo "Mirror identical, ignoring."; else echo "Mirrors different, updating." &&\ + if [ -f $(TMPDIR)/mirror-$*.owl ]; then if cmp -s $(TMPDIR)/mirror-$*.owl $@ ; then echo "Mirror identical, ignoring."; else echo "Mirrors different, updating." &&\ cp $(TMPDIR)/mirror-$*.owl $@; fi; fi +else # MIR=false +$(MIRRORDIR)/%.owl: + @echo "Not refreshing $@ because the mirrorring pipeline is disabled (MIR=$(MIR))." +endif @@ -417,7 +516,7 @@ $(SUBSETDIR)/%.obo: $(SUBSETDIR)/%.owl $(SUBSETDIR)/%.json: $(SUBSETDIR)/%.owl $(ROBOT) convert --input $< --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ # --------------------------------------------- @@ -428,7 +527,7 @@ SPARQL_EXPORTS_ARGS = $(foreach V,$(SPARQL_EXPORTS),-s $(SPARQLDIR)/$(V).sparql # This combines all into one single command .PHONY: custom_reports -custom_reports: $(SRC) | $(REPORTDIR) +custom_reports: $(EDIT_PREPROCESSED) | $(REPORTDIR) ifneq ($(SPARQL_EXPORTS_ARGS),) $(ROBOT) query -f tsv --use-graphs true -i $< $(SPARQL_EXPORTS_ARGS) endif @@ -467,7 +566,7 @@ pattern_schema_checks dosdp_validation: $(TMPDIR)/pattern_schema_checks .PHONY: update_patterns update_patterns: download_patterns - cp -r $(TMPDIR)/dosdp/*.yaml $(PATTERNDIR)/dosdp-patterns + if [ -n "$$(find $(TMPDIR) -type f -path '$(TMPDIR)/dosdp/*.yaml')" ]; then cp -r $(TMPDIR)/dosdp/*.yaml $(PATTERNDIR)/dosdp-patterns; fi # This command is a workaround for the absence of -N and -i in wget of alpine (the one ODK depend on now). # It downloads all patterns specified in external.txt @@ -492,8 +591,8 @@ DOSDP_OWL_FILES_DEFAULT = $(patsubst %.tsv, $(PATTERNDIR)/data/default/%.ofn, $( DOSDP_TERM_FILES_DEFAULT = $(patsubst %.tsv, $(PATTERNDIR)/data/default/%.txt, $(notdir $(wildcard $(PATTERNDIR)/data/default/*.tsv))) DOSDP_PATTERN_NAMES_DEFAULT = $(strip $(patsubst %.tsv,%, $(notdir $(wildcard $(PATTERNDIR)/data/default/*.tsv)))) -$(DOSDP_OWL_FILES_DEFAULT): $(SRC) $(DOSDP_TSV_FILES_DEFAULT) $(ALL_PATTERN_FILES) - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ]; then $(DOSDPT) generate --catalog=catalog-v001.xml \ +$(DOSDP_OWL_FILES_DEFAULT): $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_DEFAULT) $(ALL_PATTERN_FILES) + if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ --infile=$(PATTERNDIR)/data/default/ --template=$(PATTERNDIR)/dosdp-patterns --batch-patterns="$(DOSDP_PATTERN_NAMES_DEFAULT)" \ --ontology=$< --obo-prefixes=true --outfile=$(PATTERNDIR)/data/default; fi @@ -506,8 +605,8 @@ DOSDP_TERM_FILES_MANUAL = $(patsubst %.tsv, $(PATTERNDIR)/data/manual/%.txt, $(n DOSDP_TSV_FILES_MANUAL = $(wildcard $(PATTERNDIR)/data/manual/*.tsv) DOSDP_PATTERN_NAMES_MANUAL = $(strip $(patsubst %.tsv,%, $(notdir $(wildcard $(PATTERNDIR)/data/manual/*.tsv)))) -$(DOSDP_OWL_FILES_MANUAL): $(SRC) $(DOSDP_TSV_FILES_MANUAL) $(ALL_PATTERN_FILES) - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_MANUAL}" ]; then $(DOSDPT) generate --catalog=catalog-v001.xml \ +$(DOSDP_OWL_FILES_MANUAL): $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_MANUAL) $(ALL_PATTERN_FILES) + if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_MANUAL}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ --infile=$(PATTERNDIR)/data/manual --template=$(PATTERNDIR)/dosdp-patterns/ --batch-patterns="$(DOSDP_PATTERN_NAMES_MANUAL)" \ --ontology=$< --obo-prefixes=true --outfile=$(PATTERNDIR)/data/manual; fi @@ -518,8 +617,8 @@ DOSDP_TERM_FILES_ANATOMY = $(patsubst %.tsv, $(PATTERNDIR)/data/anatomy/%.txt, $ DOSDP_TSV_FILES_ANATOMY = $(wildcard $(PATTERNDIR)/data/anatomy/*.tsv) DOSDP_PATTERN_NAMES_ANATOMY = $(strip $(patsubst %.tsv,%, $(notdir $(wildcard $(PATTERNDIR)/data/anatomy/*.tsv)))) -$(DOSDP_OWL_FILES_ANATOMY): $(SRC) $(DOSDP_TSV_FILES_ANATOMY) $(ALL_PATTERN_FILES) - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_ANATOMY}" ]; then $(DOSDPT) generate --catalog=catalog-v001.xml \ +$(DOSDP_OWL_FILES_ANATOMY): $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_ANATOMY) $(ALL_PATTERN_FILES) + if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_ANATOMY}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ --infile=$(PATTERNDIR)/data/anatomy --template=$(PATTERNDIR)/dosdp-patterns/ --batch-patterns="$(DOSDP_PATTERN_NAMES_ANATOMY)" \ --ontology=$< --obo-prefixes=true --outfile=$(PATTERNDIR)/data/anatomy; fi # Generate template file seeds @@ -567,19 +666,19 @@ $(ONT)-base.obo: $(ONT)-base.owl $(ONT)-base.json: $(ONT)-base.owl $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ convert --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ $(ONT)-full.obo: $(ONT)-full.owl $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo $(ONT)-full.json: $(ONT)-full.owl $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ convert --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ $(ONT)-simple.obo: $(ONT)-simple.owl $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo $(ONT)-simple.json: $(ONT)-simple.owl $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ convert --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ # ---------------------------------------- # Release artefacts: main release artefacts # ---------------------------------------- @@ -590,41 +689,48 @@ $(ONT).owl: $(ONT)-full.owl $(ONT).obo: $(ONT).owl $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo -$(ONT).json: $(ONT)-full.owl +$(ONT).json: $(ONT).owl $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f json -o $@.tmp.json && \ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + convert --check false -f json -o $@.tmp.json &&\ + mv $@.tmp.json $@ # ----------------------------------------------------- # Release artefacts: variants (base, full, simple, etc) # ----------------------------------------------------- SHARED_ROBOT_COMMANDS = -$(ONTOLOGYTERMS): $(SRC) $(OTHER_SRC) - touch $(ONTOLOGYTERMS) && \ - $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/xpo_terms.sparql $@ +$(ONTOLOGYTERMS): $(SRCMERGED) + $(ROBOT) query -f csv -i $< --query ../sparql/xpo_terms.sparql $@ -# base: OTHER sources of interest, such as definitions owl -$(ONT)-base.owl: $(SRC) $(OTHER_SRC) - $(ROBOT) remove --input $< --select imports --trim false \ - merge $(patsubst %, -i %, $(OTHER_SRC)) \ - $(SHARED_ROBOT_COMMANDS) annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ +# ROBOT pipeline that merges imports, including components. +ROBOT_RELEASE_IMPORT_MODE=$(ROBOT) merge --input $< + +# ROBOT pipeline that removes imports, then merges components. This is for release artefacts that start from "base" +ROBOT_RELEASE_IMPORT_MODE_BASE=$(ROBOT) remove --input $< --select imports --trim false merge $(patsubst %, -i %, $(OTHER_SRC)) + +# base: A version of the ontology that does not include any externally imported axioms. +$(ONT)-base.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(IMPORT_FILES) + $(ROBOT_RELEASE_IMPORT_MODE) \ + reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural --annotate-inferred-axioms False \ + relax \ + reduce -r ELK \ + remove --base-iri http://purl.obolibrary.org/obo/XPO_ --axioms external --preserve-structure false --trim false \ + $(SHARED_ROBOT_COMMANDS) \ + annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ --output $@.tmp.owl && mv $@.tmp.owl $@ - -# Full: The full artefacts with imports merged, reasoned -$(ONT)-full.owl: $(SRC) $(OTHER_SRC) $(IMPORT_FILES) - $(ROBOT) merge --input $< \ +# Full: The full artefacts with imports merged, reasoned. +$(ONT)-full.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(IMPORT_FILES) + $(ROBOT_RELEASE_IMPORT_MODE) \ reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural \ relax \ reduce -r ELK \ $(SHARED_ROBOT_COMMANDS) annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@ - # foo-simple: (edit->reason,relax,reduce,drop imports, drop every axiom which contains an entity outside the "namespaces of interest") # drop every axiom: filter --term-file keep_terms.txt --trim true # remove --select imports --trim false -$(ONT)-simple.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) $(IMPORT_FILES) - $(ROBOT) merge --input $< $(patsubst %, -i %, $(OTHER_SRC)) \ - reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural \ +$(ONT)-simple.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(SIMPLESEED) $(IMPORT_FILES) + $(ROBOT_RELEASE_IMPORT_MODE) \ + reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural --annotate-inferred-axioms False \ relax \ remove --axioms equivalent \ relax \ @@ -636,10 +742,19 @@ $(ONT)-simple.owl: $(SRC) $(OTHER_SRC) $(SIMPLESEED) $(IMPORT_FILES) # Debugging Tools # ---------------------------------------- -explain_unsat: $(SRC) - $(ROBOT) explain -i $(SRC) -M unsatisfiability --unsatisfiable random:10 --explanation $(TMPDIR)/$@.md +explain_unsat: $(EDIT_PREPROCESSED) + $(ROBOT) explain -i $< -M unsatisfiability --unsatisfiable random:10 --explanation $(TMPDIR)/$@.md + +RELEASE_ASSETS_AFTER_RELEASE=$(foreach n,$(RELEASE_ASSETS), ../../$(n)) +GHVERSION=v$(VERSION) + +.PHONY: public_release +public_release: + @test $(GHVERSION) + ls -alt $(RELEASE_ASSETS_AFTER_RELEASE) + gh release create $(GHVERSION) --title "$(VERSION) Release" --draft $(RELEASE_ASSETS_AFTER_RELEASE) --generate-notes # ---------------------------------------- # General Validation @@ -676,13 +791,17 @@ update_repo: -# Note to future generations: prepending ./ is a safety measure to ensure that -# the environment does not malicously set `CLEANFILES` to `\`. +# Note to future generations: computing the real path relative to the +# current directory is a way to ensure we only clean up directories that +# are located below the current directory, regardless of the contents of +# the *DIR variables. .PHONY: clean clean: $(MAKE) pattern_clean - [ -n "$(MIRRORDIR)" ] && [ $(MIRRORDIR) != "." ] && [ $(MIRRORDIR) != "/" ] && [ $(MIRRORDIR) != ".." ] && [ -d ./$(MIRRORDIR) ] && rm -rf ./$(MIRRORDIR)/* - [ -n "$(TMPDIR)" ] && [ $(TMPDIR) != "." ] && [ $(TMPDIR) != "/" ] && [ $(TMPDIR) != ".." ] && [ -d ./$(TMPDIR) ] && rm -rf ./$(TMPDIR)/* + for dir in $(MIRRORDIR) $(TMPDIR) $(UPDATEREPODIR) ; do \ + reldir=$$(realpath --relative-to=$$(pwd) $$dir) ; \ + case $$reldir in .*|"") ;; *) rm -rf $$reldir/* ;; esac \ + done rm -f $(CLEANFILES) .PHONY: help @@ -701,14 +820,16 @@ Core commands: * prepare_release_fast: Run the entire release pipeline without refreshing imports, recreating components or recompiling patterns. * update_repo: Update the ODK repository setup using the config file xpo-odk.yaml * test: Running all validation tests +* test_fast: Runs the test suite, but without updating imports or components * odkversion: Show the current version of the ODK Makefile and ROBOT. * clean: Delete all temporary files * help: Print ODK Usage information +* public_release: Uploads the release file to a release management system, such as GitHub releases. Must be configured. Imports management: * refresh-imports: Refresh all imports and mirrors. -* refresh-components: Refresh all components. +* recreate-components: Recreate all components. * no-mirror-refresh-imports: Refresh all imports without downloading mirrors. * refresh-imports-excluding-large: Refresh all imports and mirrors, but skipping the ones labelled as 'is_large'. * refresh-%: Refresh a single import, i.e. refresh-go will refresh 'imports/go_import.owl'. @@ -726,10 +847,11 @@ DOSDP templates Editor utilities: * validate_idranges: Make sure your ID ranges file is formatted correctly -* normalize_src: Load and safe your xpo-edit file after you to make sure its serialised correctly +* normalize_src: Load and save your xpo-edit file after you to make sure its serialised correctly * explain_unsat: If you have unsatisfiable classes, this command will create a markdown file (tmp/explain_unsat.md) which will explain all your unsatisfiable classes * validate-all-tsv: Check all your tsv files for possible problems in syntax. Use ALL_TSV_FILES variable to list files * validate-tsv: Check a tsv file for syntactic problems with tsvalid. Use TSV variable to pass filepath, e.g. make TSV=../my.tsv validate-tsv. +* release_diff: Create a diff between the current release and the new release Additional build commands (advanced users) * all: Run the entire pipeline (like prepare_release), but without copying the release files to the release directory. @@ -737,10 +859,12 @@ Additional build commands (advanced users) * custom_reports: Generate all custom sparql reports you have configured in your xpo-odk.yaml file. * all_assets: Build all assets * show_assets: Print a list of all assets that would be build by the release pipeline +* all_mappings: Update all SSSOM mapping sets Additional QC commands (advanced users) * robot_reports: Run all configured ROBOT reports * validate_profile_%: Run an OWL2 DL profile validation check, for example validate_profile_xpo-edit.owl. +* reason_test: Run a basic reasoning test Examples: * sh run.sh make IMP=false prepare_release diff --git a/src/ontology/run.sh b/src/ontology/run.sh old mode 100644 new mode 100755 index 9a498dd9..873e9bdf --- a/src/ontology/run.sh +++ b/src/ontology/run.sh @@ -8,22 +8,97 @@ # The assumption is that you are working in the src/ontology folder; # we therefore map the whole repo (../..) to a docker volume. # +# To use singularity instead of docker, please issue +# export USE_SINGULARITY= +# before running this script. +# # See README-editors.md for more details. -IMAGE=${IMAGE:-odkfull} -ODK_JAVA_OPTS=-Xmx8G +set -e + +if [ -f run.sh.conf ]; then + . ./run.sh.conf +fi + +# Look for a GitHub token +if [ -n "$GH_TOKEN" ]; then + : +elif [ -f ../../.github/token.txt ]; then + GH_TOKEN=$(cat ../../.github/token.txt) +elif [ -f $XDG_CONFIG_HOME/ontology-development-kit/github/token ]; then + GH_TOKEN=$(cat $XDG_CONFIG_HOME/ontology-development-kit/github/token) +elif [ -f "$HOME/Library/Application Support/ontology-development-kit/github/token" ]; then + GH_TOKEN=$(cat "$HOME/Library/Application Support/ontology-development-kit/github/token") +fi + +# SSH agent socket +# On macOS, we cannot use $SSH_AUTH_SOCK directly, +# we need to use a "magic" socket instead. +case "$(uname)" in +Darwin) + ODK_SSH_AUTH_SOCKET=/run/host-services/ssh-auth.sock + ;; +*) + ODK_SSH_AUTH_SOCKET=$SSH_AUTH_SOCK + ;; +esac +ODK_SSH_BIND= +if [ -n "$ODK_SSH_AUTH_SOCKET" ]; then + ODK_SSH_BIND=",$ODK_SSH_AUTH_SOCKET:/run/host-services/ssh-auth.sock" +fi + +ODK_IMAGE=${ODK_IMAGE:-odkfull} +TAG_IN_IMAGE=$(echo $ODK_IMAGE | awk -F':' '{ print $2 }') +if [ -n "$TAG_IN_IMAGE" ]; then + # Override ODK_TAG env var if IMAGE already includes a tag + ODK_TAG=$TAG_IN_IMAGE + ODK_IMAGE=$(echo $ODK_IMAGE | awk -F':' '{ print $1 }') +fi +ODK_TAG=${ODK_TAG:-latest} +ODK_JAVA_OPTS=${ODK_JAVA_OPTS:--Xmx8G} ODK_DEBUG=${ODK_DEBUG:-no} +ODK_USER_ID=${ODK_USER_ID:-$(id -u)} +ODK_GROUP_ID=${ODK_GROUP_ID:-$(id -g)} + +# Convert OWLAPI_* environment variables to the OWLAPI as Java options +# See http://owlcs.github.io/owlapi/apidocs_4/org/semanticweb/owlapi/model/parameters/ConfigurationOptions.html +# for a list of allowed options +OWLAPI_OPTIONS_NAMESPACE=org.semanticweb.owlapi.model.parameters.ConfigurationOptions +for owlapi_var in $(env | sed -n s/^OWLAPI_//p) ; do + ODK_JAVA_OPTS="$ODK_JAVA_OPTS -D$OWLAPI_OPTIONS_NAMESPACE.${owlapi_var%=*}=${owlapi_var#*=}" +done + TIMECMD= if [ x$ODK_DEBUG = xyes ]; then # If you wish to change the format string, take care of using # non-breaking spaces (U+00A0) instead of normal spaces, to # prevent the shell from tokenizing the format string. - echo "Running ${IMAGE} with ${ODK_JAVA_OPTS} of memory for ROBOT and Java-based pipeline steps." + echo "Running obolibrary/${ODK_IMAGE}:${ODK_TAG} with '${ODK_JAVA_OPTS}' as options for ROBOT and other Java-based pipeline steps." TIMECMD="/usr/bin/time -f ### DEBUG STATS ###\nElapsed time: %E\nPeak memory: %M kb" fi +rm -f tmp/debug.log -docker run -v $PWD/../../:/work -w /work/src/ontology -e ROBOT_JAVA_ARGS="$ODK_JAVA_OPTS" -e JAVA_OPTS="$ODK_JAVA_OPTS" --rm -ti obolibrary/$IMAGE $TIMECMD "$@" +VOLUME_BIND=$PWD/../../:/work$ODK_SSH_BIND +WORK_DIR=/work/src/ontology + +if [ -n "$ODK_BINDS" ]; then + VOLUME_BIND="$VOLUME_BIND,$ODK_BINDS" +fi + +if [ -n "$USE_SINGULARITY" ]; then + + singularity exec --cleanenv $ODK_SINGULARITY_OPTIONS \ + --env "ROBOT_JAVA_ARGS=$ODK_JAVA_OPTS,JAVA_OPTS=$ODK_JAVA_OPTS,SSH_AUTH_SOCK=/run/host-services/ssh-auth.sock,ODK_USER_ID=$ODK_USER_ID,ODK_GROUP_ID=$ODK_GROUP_ID,ODK_DEBUG=$ODK_DEBUG" \ + --bind $VOLUME_BIND \ + -W $WORK_DIR \ + docker://obolibrary/$ODK_IMAGE:$ODK_TAG $TIMECMD "$@" +else + BIND_OPTIONS="-v $(echo $VOLUME_BIND | sed 's/,/ -v /')" + docker run $ODK_DOCKER_OPTIONS $BIND_OPTIONS -w $WORK_DIR \ + -e ROBOT_JAVA_ARGS="$ODK_JAVA_OPTS" -e JAVA_OPTS="$ODK_JAVA_OPTS" -e SSH_AUTH_SOCK=/run/host-services/ssh-auth.sock -e ODK_USER_ID=$ODK_USER_ID -e ODK_GROUP_ID=$ODK_GROUP_ID -e ODK_DEBUG=$ODK_DEBUG \ + --rm -ti obolibrary/$ODK_IMAGE:$ODK_TAG $TIMECMD "$@" +fi case "$@" in *update_repo*|*release*) diff --git a/src/ontology/xpo-odk.yaml b/src/ontology/xpo-odk.yaml index b77120cc..3196762a 100644 --- a/src/ontology/xpo-odk.yaml +++ b/src/ontology/xpo-odk.yaml @@ -3,6 +3,7 @@ title: "Xenopus Phenotype Ontology" github_org: obophenotype repo: xenopus-phenotype-ontology report_fail_on: ERROR +git_main_branch: master use_dosdps: TRUE dosdp_tools_options: "--obo-prefixes=true" namespaces: diff --git a/src/scripts/run-command.sh b/src/scripts/run-command.sh new file mode 100755 index 00000000..45d431d1 --- /dev/null +++ b/src/scripts/run-command.sh @@ -0,0 +1,4 @@ +#!/bin/sh +ODK_DEBUG_FILE=${ODK_DEBUG_FILE:-debug.log} +echo "Command: sh $@" >> $ODK_DEBUG_FILE +/usr/bin/time -a -o $ODK_DEBUG_FILE -f "Elapsed time: %E\nPeak memory: %M kb" /bin/sh "$@" diff --git a/src/scripts/update_repo.sh b/src/scripts/update_repo.sh index debbffa6..300a46fa 100644 --- a/src/scripts/update_repo.sh +++ b/src/scripts/update_repo.sh @@ -21,10 +21,11 @@ cp target/$OID/src/ontology/run.sh $SRCDIR/ontology/ cp -r target/$OID/src/sparql/* $SRCDIR/sparql/ mkdir -p $ROOTDIR/.github mkdir -p $ROOTDIR/.github/workflows -cp -n target/$OID/.github/workflows/qc.yml $ROOTDIR/.github/workflows/qc.yml +cp target/$OID/.github/workflows/qc.yml $ROOTDIR/.github/workflows/qc.yml + echo "WARNING: These files should be manually migrated: mkdocs.yaml, .gitignore, src/ontology/catalog.xml (if you added a new import or component)" -echo "WARNING: Your QC workflows have not been updated automatically. Please update the ODK version number(s) in .github/workflows/qc.yml." + echo "Ontology repository update successfully completed." \ No newline at end of file diff --git a/src/sparql/dc-properties-violation.sparql b/src/sparql/dc-properties-violation.sparql new file mode 100644 index 00000000..f2e9078d --- /dev/null +++ b/src/sparql/dc-properties-violation.sparql @@ -0,0 +1,11 @@ +# The purpose of this violation is to make sure people update +# from using the deprecated DC Elements 1.1 namespace (http://purl.org/dc/elements/1.1/) +# to using the recommended DC Terms namespace (http://purl.org/dc/terms/) +# See also discussion on https://github.com/oborel/obo-relations/pull/692 + +SELECT ?term ?predicate WHERE { + ?term ?predicate ?value . + FILTER(STRSTARTS(STR(?predicate), "http://purl.org/dc/elements/1.1/")) + FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) +} + diff --git a/src/sparql/edges.sparql b/src/sparql/edges.sparql index 5fec04ea..edf658bf 100644 --- a/src/sparql/edges.sparql +++ b/src/sparql/edges.sparql @@ -1,4 +1,3 @@ -prefix obo: prefix owl: prefix rdfs: prefix rdf: diff --git a/src/sparql/inject-subset-declaration.ru b/src/sparql/inject-subset-declaration.ru index 18a8430d..788313b7 100644 --- a/src/sparql/inject-subset-declaration.ru +++ b/src/sparql/inject-subset-declaration.ru @@ -7,5 +7,5 @@ INSERT { ?y rdfs:subPropertyOf ?y . FILTER(isIRI(?y)) - FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)")) + FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)") || regex(str(?y),"^(http://purl.obolibrary.org/obo)")) } \ No newline at end of file diff --git a/src/sparql/inject-synonymtype-declaration.ru b/src/sparql/inject-synonymtype-declaration.ru index 9906089f..cad57edb 100644 --- a/src/sparql/inject-synonymtype-declaration.ru +++ b/src/sparql/inject-synonymtype-declaration.ru @@ -7,5 +7,5 @@ INSERT { ?y rdfs:subPropertyOf ?y . FILTER(isIRI(?y)) - FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)")) + FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)") || regex(str(?y),"^(http://purl.obolibrary.org/obo)")) } \ No newline at end of file diff --git a/src/sparql/iri-range-violation.sparql b/src/sparql/iri-range-violation.sparql index f205aaa0..5c0318bb 100644 --- a/src/sparql/iri-range-violation.sparql +++ b/src/sparql/iri-range-violation.sparql @@ -13,7 +13,7 @@ WHERE { oboInOwl:inSubset dcterms:contributor } ?term ?property ?value . - FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_") || STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) + FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) FILTER (!isIRI(?value)) } diff --git a/src/sparql/label-with-iri-violation.sparql b/src/sparql/label-with-iri-violation.sparql index af3ce12e..58dd77de 100644 --- a/src/sparql/label-with-iri-violation.sparql +++ b/src/sparql/label-with-iri-violation.sparql @@ -4,6 +4,6 @@ SELECT ?term ?value WHERE { ?term rdfs:label ?value . FILTER (REGEX(?value, "http[s]?[:]")) - FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_") || STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) + FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) } diff --git a/src/sparql/multiple-replaced_by-violation.sparql b/src/sparql/multiple-replaced_by-violation.sparql new file mode 100644 index 00000000..65bb8de2 --- /dev/null +++ b/src/sparql/multiple-replaced_by-violation.sparql @@ -0,0 +1,12 @@ +PREFIX replaced_by: + +SELECT DISTINCT ?entity ?property ?value WHERE { + VALUES ?property { + replaced_by: + } + ?entity ?property ?value1 . + ?entity ?property ?value2 . + FILTER(?value1!=?value2) + BIND(CONCAT(str(?value1), CONCAT("|", str(?value2))) as ?value) +} + diff --git a/src/sparql/owldef-self-reference-violation.sparql b/src/sparql/owldef-self-reference-violation.sparql index b3576175..eb577aa4 100644 --- a/src/sparql/owldef-self-reference-violation.sparql +++ b/src/sparql/owldef-self-reference-violation.sparql @@ -7,6 +7,6 @@ SELECT ?term WHERE { { ?term owl:equivalentClass [ owl:intersectionOf [ rdf:rest*/rdf:first ?term ] ] } UNION { ?term owl:equivalentClass [ owl:intersectionOf [ rdf:rest*/rdf:first [ owl:someValuesFrom ?term ] ] ] } - FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_") || STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) + FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) } diff --git a/src/sparql/simple-seed.sparql b/src/sparql/simple-seed.sparql index a96d2634..247fbde8 100644 --- a/src/sparql/simple-seed.sparql +++ b/src/sparql/simple-seed.sparql @@ -1,5 +1,4 @@ prefix owl: -prefix obo: SELECT DISTINCT ?cls WHERE { diff --git a/src/sparql/subsets-labeled.sparql b/src/sparql/subsets-labeled.sparql index 7bc992f1..5ca7e31c 100644 --- a/src/sparql/subsets-labeled.sparql +++ b/src/sparql/subsets-labeled.sparql @@ -1,5 +1,4 @@ prefix oio: -prefix def: prefix owl: prefix inSubset: prefix rdfs: diff --git a/src/sparql/xpo_terms.sparql b/src/sparql/xpo_terms.sparql index fe5c3fe8..e7a2c020 100644 --- a/src/sparql/xpo_terms.sparql +++ b/src/sparql/xpo_terms.sparql @@ -3,5 +3,5 @@ WHERE { { ?s1 ?p1 ?term . } UNION { ?term ?p2 ?o2 . } - FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_") || STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) + FILTER(isIRI(?term) && (STRSTARTS(str(?term), "http://purl.obolibrary.org/obo/XPO_"))) }