From fa66e99202e9097206c7c1affadc942b72e586e3 Mon Sep 17 00:00:00 2001 From: Alex Nelson Date: Tue, 14 Mar 2023 22:11:37 -0400 Subject: [PATCH] Integrate catalog construction into CI chain The construction script now handles multiple input ontology files, but with the requirement that they be in the same directory. Interfaces have also been added to handle imported, possibly non-CDO ontology references in two ways: * With a TSV file mapping ontology IRIs or version IRIs to files. * With optional references to (effectively imported) `catalog-v001.xml` files. Another behavior change is implemented: the focus ontologies are now also added to the `catalog-v001.xml` file, in part to support when multiple graph files are in one directory, and in part to support re-consumption of `catalog-v001.xml` by the `catalog-v001.xml` generating script. The rationales for how to handle ontology--file mappings outside the scope of UCO (in both upstream and downstream directions) include: * Symbolic links could have been used to pool all file references into the `/dependencies` directory. Windows users that run `git clone` without symbolic links enabled for their system would encounter significantly counter-intuitive errors. - This also would not iterate well with consumers of the catalog script outside of UCO (e.g. CASE). * A Makefile could have been made to normalize the dependent ontology files into the same Turtle style (or even away from RDF-XML, which the Collections Ontology currently uses as sole format). However, this would again be a point of difficulty for Windows users, as they would have to run `make` to create the files referenced in the catalog XML. * Copying files into a Git repository introduces code-drift issues that are difficult to manage. When the copied files were themselves tracked in Git, this is counter to the purpose of Git submodules. This patch goes on the assumption that Git submodules and recursive cloning are a reasonable minimal requirement to access full local-file ontology interaction. The catalog generating script in this patch state has been tested (offline) with CASE and CASE-Corpora as users, via a submodule chain starting with CASE-Corpora. The `CONTRIBUTE.md` file has also been updated to add usage documentation, and to fix a copy-paste error from some time ago. A follow-on patch will regenerate Make-managed files. References: * https://github.com/ucoProject/UCO/issues/449 Signed-off-by: Alex Nelson --- CONTRIBUTE.md | 13 +- Makefile | 3 +- etc/dependency_files.tsv | 2 + .../uco/master => etc}/domain_directories.tsv | 0 ontology/co/Makefile | 27 +- ontology/uco/master/Makefile | 38 --- src/create-catalog-v001.xml.py | 323 +++++++++++++----- src/review.mk | 25 +- 8 files changed, 290 insertions(+), 141 deletions(-) create mode 100644 etc/dependency_files.tsv rename {ontology/uco/master => etc}/domain_directories.tsv (100%) delete mode 100644 ontology/uco/master/Makefile diff --git a/CONTRIBUTE.md b/CONTRIBUTE.md index 4bacd610..7bf87fd2 100644 --- a/CONTRIBUTE.md +++ b/CONTRIBUTE.md @@ -1,6 +1,17 @@ -# Contributing to the CASE ontology +# Contributing to the UCO ontology ## Testing prerelease states Practices for users interested in testing prerelease states are documented on the [Cyber Domain Ontology website](https://cyberdomainontology.org/ontology/development/#testing-prereleases). + + +## Using Protégé catalog files + +Interested users of `catalog-v001.xml` files, e.g. users of [Protégé](https://protege.stanford.edu/), can use these XML files to interact with UCO as local files. To do so, UCO must be `git-clone`'d with Git submodules also cloned. This can be done with the following commands: + +* `git clone --recursive https://github.com/ucoProject/UCO.git` (all users) +* `git clone https://github.com/ucoProject/UCO.git ; make` (macOS or Linux users) + - The narrowest setup operation strictly for purposes of supporting the `catalog-v001.xml` files is to run `make .git_submodule_init.done.log` instead of the default `make all`. + +Protégé should not require network connectivity to load imported ontologies after the above commands are run. diff --git a/Makefile b/Makefile index 31b95064..de556a38 100644 --- a/Makefile +++ b/Makefile @@ -16,7 +16,8 @@ SHELL := /bin/bash PYTHON3 ?= $(shell which python3) all: \ - .lib.done.log + .lib.done.log \ + .venv.done.log $(MAKE) \ --directory ontology diff --git a/etc/dependency_files.tsv b/etc/dependency_files.tsv new file mode 100644 index 00000000..876633b6 --- /dev/null +++ b/etc/dependency_files.tsv @@ -0,0 +1,2 @@ +http://purl.org/co/ ${top_srcdir}/dependencies/collections-ontology/collections.owl +http://purl.org/spar/error ${top_srcdir}/dependencies/error/docs/current/error.ttl diff --git a/ontology/uco/master/domain_directories.tsv b/etc/domain_directories.tsv similarity index 100% rename from ontology/uco/master/domain_directories.tsv rename to etc/domain_directories.tsv diff --git a/ontology/co/Makefile b/ontology/co/Makefile index 82b6e784..f4ba8dea 100644 --- a/ontology/co/Makefile +++ b/ontology/co/Makefile @@ -30,7 +30,8 @@ check_reference_basenames := $(foreach ttl_basename,$(ttl_basenames),.check-$(tt check_targets := $(foreach ttl_basename,$(ttl_basenames),check-$(ttl_basename)) all: \ - $(check_reference_basenames) + $(check_reference_basenames) \ + catalog-v001.xml .check-%.ttl: \ %.ttl \ @@ -43,8 +44,25 @@ all: \ --target-format turtle mv $@_ $@ +catalog-v001.xml: \ + $(top_srcdir)/.venv.done.log \ + $(top_srcdir)/etc/domain_directories.tsv \ + $(top_srcdir)/etc/dependency_files.tsv \ + $(top_srcdir)/src/create-catalog-v001.xml.py \ + $(ttl_basenames) + rm -f _$@ + source $(top_srcdir)/venv/bin/activate \ + && python3 $(top_srcdir)/src/create-catalog-v001.xml.py \ + _$@ \ + $(top_srcdir)/etc/domain_directories.tsv \ + $(top_srcdir)/etc/dependency_files.tsv \ + "$(top_srcdir)" \ + $(ttl_basenames) + mv _$@ $@ + check: \ - $(check_targets) + $(check_targets) \ + catalog-v001.xml # Reminder: diff exits non-0 on finding any differences. # Reminder: The $^ automatic Make variable is the name of all recipe prerequisites. @@ -55,5 +73,6 @@ check-%.ttl: \ || (echo "ERROR:ontology/co/Makefile:The local $< does not match the normalized version. If the above reported changes look fine, run 'cp .check-$< $<' while in the sub-folder ontology/co/ to get a file ready to commit to Git." >&2 ; exit 1) clean: - @rm -f $(check_reference_basenames) - + @rm -f \ + $(check_reference_basenames) \ + catalog-v001.xml diff --git a/ontology/uco/master/Makefile b/ontology/uco/master/Makefile deleted file mode 100644 index 8bef57c6..00000000 --- a/ontology/uco/master/Makefile +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/make -f - -# This software was developed at the National Institute of Standards -# and Technology by employees of the Federal Government in the course -# of their official duties. Pursuant to title 17 Section 105 of the -# United States Code this software is not subject to copyright -# protection and is in the public domain. NIST assumes no -# responsibility whatsoever for its use by other parties, and makes -# no guarantees, expressed or implied, about its quality, -# reliability, or any other characteristic. -# -# We would appreciate acknowledgement if the software is used. - -SHELL := /bin/bash - -top_srcdir := $(shell cd ../../.. ; pwd) - -all: \ - catalog-v001.xml - -catalog-v001.xml: \ - $(top_srcdir)/src/create-catalog-v001.xml.py \ - $(top_srcdir)/.venv.done.log - rm -f _$@ - source $(top_srcdir)/venv/bin/activate \ - && python3 $(top_srcdir)/src/create-catalog-v001.xml.py \ - _$@ \ - domain_directories.tsv \ - "$(top_srcdir)" \ - uco.ttl - mv _$@ $@ - -check: \ - catalog-v001.xml - -clean: - @rm -f \ - catalog-v001.xml diff --git a/src/create-catalog-v001.xml.py b/src/create-catalog-v001.xml.py index aad15a79..a3b0be5b 100644 --- a/src/create-catalog-v001.xml.py +++ b/src/create-catalog-v001.xml.py @@ -14,7 +14,26 @@ # # We would appreciate acknowledgement if the software is used. -__version__ = "0.0.3" +""" +This program constructs a catalog-v001.xml file, initially implemented +to satisfy local-file needs of the Protégé ontology editor. The +resulting catalog file lets a user of the XML file interact with their +ontology, even spread across multiple files, without requesting network +resources. This is beneficial, for instance, in tracking local edits to +resources otherwise stored online. It is also beneficial when networked +resources are not available, such as due to service interruptions or +link rot. + +One catalog-v001.xml file will generally support only its housing +directory in a source code hierarchy. The catalog file is built to make +relative path references to every OWL-imported IRI in the transitive +import closure of all of the ontology graph files in the directory. +Once the catalog file is generated, a user should be able to open a +sibling graph file in the same directory and have Protégé load without +making network requests for ontology graphs. +""" + +__version__ = "0.1.0" import argparse import csv @@ -31,23 +50,37 @@ NS_RDF = RDF -# XML prolog, as generated by Protege. +# XML prolog, as generated by Protégé. XML_VERSION_INFO = '' def main() -> None: parser = argparse.ArgumentParser() parser.add_argument("--debug", action="store_true") + parser.add_argument( + "--catalog-xml", + help="A generated catalog-v001.xml file for some dependent or imported ontology. This should be supplied for ontologies not covered by the dependency_files_tsv argument, nor meant to be crawled upon using domain_directories_tsv. (For instance, this could be used to import relative file references for ontologies tracked as Git submodules.)", + action="append", + ) # "x" mode - exclusive creation. # https://docs.python.org/3/library/functions.html#open parser.add_argument("out_xml", type=argparse.FileType("x")) parser.add_argument( - "roots_tsv", - help="A two-column file, with column 1 being a string prefix in-common to ontology prefix IRIs, and column 2 being a file system directory relative to top_srcdir that is the root directory housing that ontology's files.", + "domain_directories_tsv", + help="A two-column file, with column 1 being a string prefix in-common to ontology prefix IRIs, and column 2 being a file system directory relative to top_srcdir that is the root directory housing that ontology's files. Directories specified in this file will be recursively walked to discover ontology graph files. This file may be empty, but it must exist.", type=argparse.FileType("r"), ) - parser.add_argument("top_srcdir") - parser.add_argument("in_ttl") + parser.add_argument( + "dependency_files_tsv", + help="A two-column file, with column 1 being an ontology reference IRI (ontology IRI or version IRI), and column 2 being a local, version-controlled file relative to top_srcdir that houses the corresponding ontology data. This file may be empty, but it must exist.", + type=argparse.FileType("r"), + ) + parser.add_argument("top_srcdir", help="The root directory of the Git repository for this ontology. In the two TSV arguments, the variable top_srcdir is substituted with this value.") + parser.add_argument( + "in_ttl", + help="Input graph files. Due to the target use case of Protégé Catalog files, these are required to be in the same directory.", + nargs="+", + ) args = parser.parse_args() logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO) @@ -57,34 +90,49 @@ def main() -> None: raise FileNotFoundError(args.top_srcdir) if not top_srcdir_abspath.is_dir(): raise NotADirectoryError(args.top_srcdir) + logging.debug("top_srcdir_abspath = %r.", top_srcdir_abspath) - focus_graph_abspath = Path(args.in_ttl).resolve() focus_graph = Graph() - focus_graph.parse(str(focus_graph_abspath)) - focus_graph_srcdir_abspath = focus_graph_abspath.parent - focus_graph_relpath = focus_graph_abspath.relative_to(top_srcdir_abspath) - logging.debug(focus_graph_relpath) - top_srcdir_relpath = Path(os.path.relpath(top_srcdir_abspath, focus_graph_abspath)) - logging.debug(top_srcdir_relpath) + focus_graph_srcdir_abspaths: Set[Path] = set() + for in_ttl in args.in_ttl: + focus_graph_abspath = Path(in_ttl).resolve() + focus_graph.parse(str(focus_graph_abspath)) + focus_graph_srcdir_abspaths.add(focus_graph_abspath.parent) + if len(focus_graph_srcdir_abspaths) > 1: + for focus_graph_srcdir_abspath_no, focus_graph_srcdir_abspath in enumerate( + sorted(focus_graph_srcdir_abspaths) + ): + logging.error( + "%d: %s", + 1 + focus_graph_srcdir_abspath_no, + str(focus_graph_srcdir_abspath), + ) + raise ValueError( + "Input graphs are required to be in the same directory. Found them in %d directories." + % len(focus_graph_srcdir_abspaths) + ) + focus_graph_srcdir_abspath = sorted(focus_graph_srcdir_abspaths)[0] - logging.debug(os.path.commonpath([top_srcdir_abspath, focus_graph_abspath])) + top_srcdir_relpath = Path( + os.path.relpath(top_srcdir_abspath, focus_graph_srcdir_abspath) + ) + logging.debug("top_srcdir_relpath = %r.", top_srcdir_relpath) - # Determine sole focus ontology IRI. Fail if there is not exactly 1 found. + # Determine focus ontology IRIs. n_focus_ontologies: Set[URIRef] = set() for triple in focus_graph.triples((None, NS_RDF.type, NS_OWL.Ontology)): if isinstance(triple[0], URIRef): n_focus_ontologies.add(triple[0]) if len(n_focus_ontologies) < 1: raise ValueError("Found no focus ontology IRI.") - if len(n_focus_ontologies) > 1: - # TODO - Add --focus-iri flag? - raise NotImplementedError("Found multiple ontology IRIs to use as focus.") - n_focus_ontology: URIRef = sorted(n_focus_ontologies)[0] + + # Free focus-graph memory. + del focus_graph # Read TSV to get domain prefixes' housing directories. ontology_string_prefix_to_domain_directory: Dict[str, Path] = dict() - reader = csv.reader(args.roots_tsv, delimiter="\t") + reader = csv.reader(args.domain_directories_tsv, delimiter="\t") for row in reader: ontology_string_prefix = row[0] domain_directory_str = row[1].replace("${top_srcdir}", str(top_srcdir_abspath)) @@ -96,13 +144,34 @@ def main() -> None: ontology_string_prefix_to_domain_directory[ ontology_string_prefix ] = domain_directory - logging.debug(ontology_string_prefix_to_domain_directory) + logging.debug( + "ontology_string_prefix_to_domain_directory = %r.", + ontology_string_prefix_to_domain_directory, + ) # Walk domain directories to associate ontology reference IRIs with backing files, and to build imports graph. + # # Definition, possibly specialized to just this script: - # An ontology reference IRI is either an ontology IRI or a versionIRI of an ontology. + # An "ontology reference IRI" is either an ontology IRI or a versionIRI of an ontology. imports_graph = Graph() n_ontology_reference_to_backing_file: Dict[URIRef, Path] = dict() + + def _load_graph(graph_file_path: Path) -> None: + tmp_graph = Graph() + logging.debug("graph_file_path = %r.", graph_file_path) + tmp_graph.parse(str(graph_file_path)) + for triple in tmp_graph.triples((None, NS_RDF.type, NS_OWL.Ontology)): + assert isinstance(triple[0], URIRef) + n_ontology_reference_to_backing_file[triple[0]] = graph_file_path + imports_graph.add(triple) + for triple in tmp_graph.triples((None, NS_OWL.imports, None)): + imports_graph.add(triple) + for triple in tmp_graph.triples((None, NS_OWL.versionIRI, None)): + assert isinstance(triple[2], URIRef) + n_ontology_reference_to_backing_file[triple[2]] = graph_file_path + imports_graph.add(triple) + + # Do deep walk for domain directories. for domain_directory in ontology_string_prefix_to_domain_directory.values(): for dirpath, dirnames, filenames in os.walk(str(domain_directory)): for filename in filenames: @@ -114,94 +183,160 @@ def main() -> None: continue dirpath_path = Path(dirpath) graph_filepath = dirpath_path / filename - tmp_graph = Graph() - tmp_graph.parse(str(graph_filepath)) - for triple in tmp_graph.triples((None, NS_RDF.type, NS_OWL.Ontology)): - assert isinstance(triple[0], URIRef) - n_ontology_reference_to_backing_file[triple[0]] = graph_filepath - imports_graph.add(triple) - for triple in tmp_graph.triples((None, NS_OWL.imports, None)): - imports_graph.add(triple) - for triple in tmp_graph.triples((None, NS_OWL.versionIRI, None)): - assert isinstance(triple[2], URIRef) - n_ontology_reference_to_backing_file[triple[2]] = graph_filepath - imports_graph.add(triple) - logging.debug(len(imports_graph)) - logging.debug(n_ontology_reference_to_backing_file) - - unversioned_iri_imports_graph = Graph() - query = """\ -SELECT ?nImportingOntology ?nImportedOntology -WHERE { - ?nImportingOntology - owl:imports ?nVersionIRI ; - . - ?nImportedOntology - owl:versionIRI ?nVersionIRI ; - . -} -""" - for versioned_iri_result in imports_graph.query(query): - unversioned_iri_imports_graph.add( - (versioned_iri_result[0], NS_OWL.imports, versioned_iri_result[1]) - ) - query = """\ -SELECT ?nImportingOntology ?nImportedOntology -WHERE { - ?nImportingOntology - owl:imports ?nImportedOntology ; - . - ?nImportedOntology - a owl:Ontology ; - . -} -""" - for unversioned_iri_result in imports_graph.query(query): - unversioned_iri_imports_graph.add( - (unversioned_iri_result[0], NS_OWL.imports, unversioned_iri_result[1]) - ) - logging.debug(len(unversioned_iri_imports_graph)) + _load_graph(graph_filepath) + # Do direct imports from dependency file map. + reader = csv.reader(args.dependency_files_tsv, delimiter="\t") + for row in reader: + logging.debug("row = %r.", row) + n_ontology_reference = URIRef(row[0]) + graph_file_name = row[1].replace("${top_srcdir}", str(top_srcdir_abspath)) + graph_file_path = Path(graph_file_name) + if not graph_file_path.exists(): + raise FileNotFoundError(graph_file_path) + if graph_file_path.is_dir(): + raise IsADirectoryError(graph_file_path) + n_ontology_reference_to_backing_file[n_ontology_reference] = graph_file_path + _load_graph(graph_file_path) + # Inherit prior catalog files. + catalog_paths: Set[Path] = set() + if args.catalog_xml: + for catalog_file_name in args.catalog_xml: + catalog_path = Path(catalog_file_name).resolve() + logging.debug("catalog_path = %r.", catalog_path) + if not catalog_path.exists(): + raise FileNotFoundError(catalog_file_name) + if catalog_path.name != "catalog-v001.xml": + logging.error("catalog_file_name = %r.", catalog_file_name) + raise FileNotFoundError( + 'Expecting catalog file to be named "catalog-v001.xml".' + ) + catalog_paths.add(catalog_path) + for catalog_path in sorted(catalog_paths): + logging.debug("catalog_path = %r.", catalog_path) + catalogued_directory_path = catalog_path.parent + # Load graph files accompanying this catalog-v001.xml. + for file_path in catalogued_directory_path.iterdir(): + file_basename = file_path.name + # Skip build files (syntax normalization checks). + if file_basename.startswith("."): + continue + # Restrict to Turtle files. + if not file_basename.endswith(".ttl"): + continue + _load_graph(catalog_path) + # Use catalog-v001.xml to find further graph files. + tree = ETree.parse(catalog_path) + for child in tree.getroot(): + logging.debug("child.attrib = %r.", child.attrib) + logging.debug("child.tag = %r.", child.tag) + if child.tag != "{urn:oasis:names:tc:entity:xmlns:xml:catalog}uri": + continue + if child.attrib["uri"].startswith("http:"): + continue + if child.attrib["uri"].startswith("https:"): + continue + if child.attrib["uri"].startswith("urn:"): + continue + n_ontology_reference = URIRef(child.attrib["name"]) + backing_ontology_path = catalogued_directory_path / child.attrib["uri"] + logging.debug("backing_ontology_path = %r.", backing_ontology_path) + if not backing_ontology_path.exists(): + logging.info( + "catalogued_directory_path = %r.", catalogued_directory_path + ) + logging.info('child.attrib["uri"] = %r.', child.attrib["uri"]) + raise FileNotFoundError("Unable to find referenced ontology file.") + if not backing_ontology_path.is_file(): + logging.info( + "catalogued_directory_path = %r.", catalogued_directory_path + ) + logging.info('child.attrib["uri"] = %r.', child.attrib["uri"]) + raise ValueError("Referenced ontology file path is not regular file.") + n_ontology_reference_to_backing_file[ + n_ontology_reference + ] = backing_ontology_path.resolve() + _load_graph(backing_ontology_path) + + logging.debug("len(imports_graph) = %d.", len(imports_graph)) + if args.debug: + logging.debug("n_ontology_reference_to_backing_file:") + for n_ontology_reference in sorted(n_ontology_reference_to_backing_file.keys()): + logging.debug( + "%r -> %r.", + n_ontology_reference, + n_ontology_reference_to_backing_file[n_ontology_reference], + ) n_imported_iri_to_relative_backing_path: Dict[URIRef, Path] = dict() def _map_n_ontology_reference(n_ontology_reference: URIRef) -> None: - # Handle base case - node visited. - if n_ontology_reference in n_imported_iri_to_relative_backing_path: - return + logging.debug("n_ontology_reference = %r.", n_ontology_reference) + ontology_reference_backing_file_abspath = n_ontology_reference_to_backing_file[ + n_ontology_reference + ] + ontology_reference_backing_file_relpath = Path( + os.path.relpath( + ontology_reference_backing_file_abspath, focus_graph_srcdir_abspath + ) + ) + n_imported_iri_to_relative_backing_path[ + n_ontology_reference + ] = ontology_reference_backing_file_relpath n_imported_iris: Set[URIRef] = set() - for triple in imports_graph.triples( - (n_ontology_reference, NS_OWL.imports, None) + for result in imports_graph.query( + """\ +SELECT ?nImportIRI +WHERE { + { + ?nOntologyReference + owl:imports+ ?nImportIRI ; + . + } + UNION + { + ?nOntology + owl:imports+ ?nImportIRI ; + owl:versionIRI ?nOntologyReference ; + . + } +} +""", + initBindings={"nOntologyReference": n_ontology_reference}, ): - assert isinstance(triple[2], URIRef) - n_imported_iri = triple[2] + assert isinstance(result[0], URIRef) + n_imported_iri = result[0] n_imported_iris.add(n_imported_iri) - imported_iri_backing_file_abspath = n_ontology_reference_to_backing_file[ - n_imported_iri - ] - imported_iri_backing_file_relpath = Path( - os.path.relpath( - imported_iri_backing_file_abspath, focus_graph_srcdir_abspath - ) - ) - n_imported_iri_to_relative_backing_path[ - n_imported_iri - ] = imported_iri_backing_file_relpath - # Recurse. - for n_imported_iri in n_imported_iris: - _map_n_ontology_reference(n_imported_iri) + # Handle base case - cut mapped nodes. + logging.debug("n_imported_iris = %r.", n_imported_iris) + n_imported_unvisited_iris = n_imported_iris - { + x for x in n_imported_iri_to_relative_backing_path.keys() + } + logging.debug("n_imported_unvisited_iris = %r.", n_imported_unvisited_iris) + # Recurse, because owl:imports could have a versionIRI as its object. + for n_imported_unvisited_iri in n_imported_unvisited_iris: + _map_n_ontology_reference(n_imported_unvisited_iri) - _map_n_ontology_reference(n_focus_ontology) - logging.debug(n_imported_iri_to_relative_backing_path) + for n_focus_ontology in n_focus_ontologies: + _map_n_ontology_reference(n_focus_ontology) + + if args.debug: + logging.debug("n_imported_iri_to_relative_backing_path:") + for n_imported_iri in sorted(n_imported_iri_to_relative_backing_path.keys()): + logging.debug( + "* %r -> %r.", + n_imported_iri, + n_imported_iri_to_relative_backing_path[n_imported_iri], + ) # Create catalog XML tree. xml_root = ETree.Element("catalog") - # Mimic attributes for the root node from exemplar generated by Protege. + # Mimic attributes for the root node from exemplar generated by Protégé. xml_root.attrib = { "prefer": "public", "xmlns": "urn:oasis:names:tc:entity:xmlns:xml:catalog", } - # Sort catalog entries by relative file path, again mimicing Protege behavior. + # Sort catalog entries by relative file path, again mimicing Protégé behavior. catalog_entries: List[Tuple[str, str]] = sorted( [ ( diff --git a/src/review.mk b/src/review.mk index f4176d55..999c7c26 100644 --- a/src/review.mk +++ b/src/review.mk @@ -26,7 +26,8 @@ check_reference_basenames := $(foreach ttl_basename,$(ttl_basenames),.check-$(tt check_targets := $(foreach ttl_basename,$(ttl_basenames),check-$(ttl_basename)) all: \ - $(check_reference_basenames) + $(check_reference_basenames) \ + catalog-v001.xml .check-%.ttl: \ %.ttl \ @@ -39,8 +40,24 @@ all: \ --target-format turtle mv $@_ $@ +catalog-v001.xml: \ + $(top_srcdir)/.venv.done.log \ + $(top_srcdir)/etc/domain_directories.tsv \ + $(top_srcdir)/etc/dependency_files.tsv \ + $(top_srcdir)/src/create-catalog-v001.xml.py + rm -f _$@ + source $(top_srcdir)/venv/bin/activate \ + && python3 $(top_srcdir)/src/create-catalog-v001.xml.py \ + _$@ \ + $(top_srcdir)/etc/domain_directories.tsv \ + $(top_srcdir)/etc/dependency_files.tsv \ + "$(top_srcdir)" \ + $(ttl_basenames) + mv _$@ $@ + check: \ - $(check_targets) + $(check_targets) \ + catalog-v001.xml # Reminder: diff exits non-0 on finding any differences. # Reminder: The $^ automatic Make variable is the name of all recipe prerequisites. @@ -51,4 +68,6 @@ check-%.ttl: \ || (echo "ERROR:src/review.mk:The local $< does not match the normalized version. If the above reported changes look fine, run 'cp .check-$< $<' while in the sub-folder ontology/$$(basename $< .ttl)/ to get a file ready to commit to Git." >&2 ; exit 1) clean: - @rm -f $(check_reference_basenames) + @rm -f \ + $(check_reference_basenames) \ + catalog-v001.xml